summaryrefslogtreecommitdiffstats
path: root/examples/network
Commit message (Expand)AuthorAgeFilesLines
* Fix build on Symbian: make sure sym_iap_util.h can be found.Frans Englich2009-10-093-3/+3
* Symbian fix: rename examples ftp and musicplayer to start with q.Frans Englich2009-10-0910-5/+5
* Demos and applications have a nice Qt logo when installed on SymbianEspen Riskedal2009-10-023-3/+3
* Threaded Fortune server example: fix displayed IPPeter Hartmann2009-10-012-3/+14
* Update license headers again.Jason McDonald2009-09-0984-336/+336
* FortuneServer/Client example: fix displayed IPPeter Hartmann2009-09-032-8/+26
* Update tech preview license header for files that are new in 4.6.Jason McDonald2009-08-311-13/+13
* Merge branch '4.5' into 4.6Thiago Macieira2009-08-3183-1079/+1079
|\
| * Update tech preview license header.Jason McDonald2009-08-3183-1079/+1079
| * Update URL for Qt FTP site.Jason McDonald2009-08-111-1/+1
| * Update license headers.Jason McDonald2009-08-1183-83/+83
* | Examples: Warn about usage of QHttpMarkus Goetz2009-08-242-0/+2
* | renamed examplebase.pri to symbianpkgrules.priEspen Riskedal2009-08-2115-23/+20
* | Remove commented-out code from examples.axis2009-08-211-4/+0
* | Merge branch 'master' of git@scm.dev.nokia.troll.no:qt/qtaxis2009-08-191-4/+3
|\ \
| * | Added two convenience functions to QXmlStreamReaderThorbjørn Lindeijer2009-08-181-4/+3
* | | Changed names and URLs to reflect name change.axis2009-08-191-1/+1
* | | Merge commit 'qt/master'Jason Barron2009-08-1383-84/+84
|\ \ \ | |/ /
| * | Update obsolete URL's in code and docs.Jason McDonald2009-08-121-1/+1
| * | Update contact URL in license headers.Jason McDonald2009-08-1283-83/+83
* | | Update license headers according to commit 858c70f768e.axis2009-08-061-2/+2
* | | Fixed network examples building for SymbianMiikka Heikkinen2009-08-061-1/+2
* | | Merge commit 'origin/master'Jason Barron2009-08-063-13/+13
|\ \ \
| * | | 'Trailing whitespace' and 'blank line' fixes for demos and examplesJanne Anttila2009-08-043-13/+13
* | | | Merge commit 'origin/master'Jason Barron2009-08-046-7/+228
|\ \ \ \ | |/ / /
| * | | Adding some interesting functions that help findingAleksandar Sasha Babic2009-07-272-5/+197
| * | | Adaptation done to make network-chat work on S60.Aleksandar Sasha Babic2009-07-244-2/+31
* | | | Merge commit 'qt/master-stable'Jason Barron2009-08-041-1/+2
|\ \ \ \ | |/ / / |/| / / | |/ /
| * | Fix compilation errors in examples and demos on VxWorks and QNX.Robert Griebl2009-07-291-1/+3
| |/
* | Forgot variable definitioin.Aleksandar Sasha Babic2009-07-061-1/+2
* | Commenting some debug printf(...).Aleksandar Sasha Babic2009-07-061-6/+11
* | Making fortuneserver example working on the S60.Aleksandar Sasha Babic2009-07-063-2/+26
* | Adapting fortuneclient to work on S60 phones.Aleksandar Sasha Babic2009-07-064-1/+29
* | Better strategy to use/select deafult IAP:Aleksandar Sasha Babic2009-07-062-79/+172
* | We can store IAP selections in QSettings.Aleksandar Sasha Babic2009-07-041-4/+81
* | TCommDbPrefs removed as they don't help for 3.1 FP1 devices(N95).Aleksandar Sasha Babic2009-07-031-8/+3
* | By using TCommDbConnPref we are able toAleksandar Sasha Babic2009-07-012-2/+6
* | In loop have to close/reopen connectionAleksandar Sasha Babic2009-07-011-2/+5
* | If branch when active connection not found was misplaced.Aleksandar Sasha Babic2009-07-011-20/+40
* | RConnectionManager is not used any more.Aleksandar Sasha Babic2009-06-302-41/+30
* | Merge branch '4.5' of git@scm.dev.nokia.troll.no:qt/qtaxis2009-06-2983-166/+166
|\ \ | |/
| * Update license headers as requested by the marketing department.Jason McDonald2009-06-1683-166/+166
* | This changes will make firts FTP UI to be visibleAleksandar Sasha Babic2009-06-183-2/+18
* | The native Symbian code that helps to select and set default IAPAleksandar Sasha Babic2009-06-183-87/+135
* | Fixed FTP default IAP setting. Accidentaly reverted by ce409be66.Janne Anttila2009-06-181-5/+4
* | Making the writing in the status label in sync with the current state.Aleksandar Sasha Babic2009-06-171-0/+1
* | getting ftp to build in RnD environmentsmread2009-06-161-0/+1
* | Improving the code for setting the default IAP.Aleksandar Sasha Babic2009-06-152-26/+75
* | Merge branch 'master' of git@scm.dev.troll.no:qt/qt-s60-publicMiikka Heikkinen2009-06-102-15/+12
|\ \
| * | Enabled default iap setting for FTP example.Janne Anttila2009-06-102-15/+12
id=9b3d4847e5e555ef4bab130969639d54ec78cf0e&id2=41914349adbee033ba48d6c52dc26a7d36cd78fa'>Doc/library/binascii.rst9
-rw-r--r--Doc/library/bz2.rst229
-rw-r--r--Doc/library/chunk.rst5
-rw-r--r--Doc/library/cmd.rst4
-rw-r--r--Doc/library/codecs.rst34
-rw-r--r--Doc/library/collections.abc.rst182
-rw-r--r--Doc/library/collections.rst366
-rw-r--r--Doc/library/concurrent.futures.rst19
-rw-r--r--Doc/library/copyreg.rst8
-rw-r--r--Doc/library/crypt.rst115
-rw-r--r--Doc/library/csv.rst18
-rw-r--r--Doc/library/ctypes.rst115
-rw-r--r--Doc/library/curses.rst31
-rw-r--r--Doc/library/datatypes.rst1
-rw-r--r--Doc/library/datetime.rst74
-rw-r--r--Doc/library/debug.rst3
-rw-r--r--Doc/library/depgraph-output.pngbin0 -> 24719 bytes-rw-r--r--Doc/library/difflib.rst4
-rw-r--r--Doc/library/dis.rst12
-rw-r--r--Doc/library/distutils.rst20
-rw-r--r--Doc/library/email.generator.rst55
-rw-r--r--Doc/library/email.parser.rst78
-rw-r--r--Doc/library/email.policy.rst184
-rw-r--r--Doc/library/email.rst1
-rw-r--r--Doc/library/email.util.rst37
-rw-r--r--Doc/library/exceptions.rst241
-rw-r--r--Doc/library/faulthandler.rst136
-rw-r--r--Doc/library/fcntl.rst9
-rw-r--r--Doc/library/fileinput.rst9
-rw-r--r--Doc/library/ftplib.rst65
-rw-r--r--Doc/library/functions.rst53
-rw-r--r--Doc/library/functools.rst13
-rw-r--r--Doc/library/gettext.rst7
-rw-r--r--Doc/library/gzip.rst5
-rw-r--r--Doc/library/http.client.rst6
-rw-r--r--Doc/library/http.cookiejar.rst11
-rw-r--r--Doc/library/http.server.rst45
-rw-r--r--Doc/library/imaplib.rst11
-rw-r--r--Doc/library/importlib.rst52
-rw-r--r--Doc/library/inspect.rst12
-rw-r--r--Doc/library/io.rst65
-rw-r--r--Doc/library/itertools.rst48
-rw-r--r--Doc/library/logging.handlers.rst101
-rw-r--r--Doc/library/logging.rst41
-rw-r--r--Doc/library/lzma.rst349
-rw-r--r--Doc/library/math.rst13
-rw-r--r--Doc/library/mmap.rst12
-rw-r--r--Doc/library/msvcrt.rst15
-rw-r--r--Doc/library/multiprocessing.rst138
-rw-r--r--Doc/library/nntplib.rst16
-rw-r--r--Doc/library/os.rst1017
-rw-r--r--Doc/library/ossaudiodev.rst14
-rw-r--r--Doc/library/packaging-misc.rst27
-rw-r--r--Doc/library/packaging.command.rst111
-rw-r--r--Doc/library/packaging.compiler.rst681
-rw-r--r--Doc/library/packaging.database.rst345
-rw-r--r--Doc/library/packaging.depgraph.rst199
-rw-r--r--Doc/library/packaging.dist.rst108
-rw-r--r--Doc/library/packaging.fancy_getopt.rst75
-rw-r--r--Doc/library/packaging.install.rst112
-rw-r--r--Doc/library/packaging.metadata.rst122
-rw-r--r--Doc/library/packaging.pypi.dist.rst114
-rw-r--r--Doc/library/packaging.pypi.rst74
-rw-r--r--Doc/library/packaging.pypi.simple.rst218
-rw-r--r--Doc/library/packaging.pypi.xmlrpc.rst143
-rw-r--r--Doc/library/packaging.rst75
-rw-r--r--Doc/library/packaging.tests.pypi_server.rst105
-rw-r--r--Doc/library/packaging.util.rst155
-rw-r--r--Doc/library/packaging.version.rst104
-rw-r--r--Doc/library/pdb.rst5
-rw-r--r--Doc/library/pickle.rst61
-rw-r--r--Doc/library/platform.rst4
-rw-r--r--Doc/library/python.rst1
-rw-r--r--Doc/library/random.rst6
-rw-r--r--Doc/library/re.rst9
-rw-r--r--Doc/library/readline.rst4
-rw-r--r--Doc/library/resource.rst8
-rw-r--r--Doc/library/sched.rst66
-rw-r--r--Doc/library/select.rst114
-rw-r--r--Doc/library/shlex.rst37
-rw-r--r--Doc/library/shutil.rst118
-rw-r--r--Doc/library/signal.rst164
-rw-r--r--Doc/library/site.rst26
-rw-r--r--Doc/library/smtplib.rst62
-rw-r--r--Doc/library/socket.rst389
-rw-r--r--Doc/library/socketserver.rst16
-rw-r--r--Doc/library/sqlite3.rst20
-rw-r--r--Doc/library/ssl.rst218
-rw-r--r--Doc/library/stdtypes.rst385
-rw-r--r--Doc/library/struct.rst21
-rw-r--r--Doc/library/subprocess.rst151
-rw-r--r--Doc/library/sys.rst137
-rw-r--r--Doc/library/tarfile.rst31
-rw-r--r--Doc/library/telnetlib.rst6
-rw-r--r--Doc/library/tempfile.rst2
-rw-r--r--Doc/library/test.rst124
-rw-r--r--Doc/library/threading.rst56
-rw-r--r--Doc/library/time.rst116
-rw-r--r--Doc/library/tkinter.rst53
-rw-r--r--Doc/library/tokenize.rst108
-rw-r--r--Doc/library/unicodedata.rst13
-rw-r--r--Doc/library/unittest.rst80
-rw-r--r--Doc/library/urllib.error.rst12
-rw-r--r--Doc/library/urllib.request.rst158
-rw-r--r--Doc/library/warnings.rst3
-rw-r--r--Doc/library/webbrowser.rst97
-rw-r--r--Doc/library/winreg.rst48
-rw-r--r--Doc/library/xml.etree.elementtree.rst28
-rw-r--r--Doc/library/xmlrpc.client.rst53
-rw-r--r--Doc/library/xmlrpc.server.rst28
-rw-r--r--Doc/library/zipimport.rst5
-rw-r--r--Doc/library/zlib.rst39
-rw-r--r--Doc/license.rst2
-rw-r--r--Doc/packaging/builtdist.rst302
-rw-r--r--Doc/packaging/commandhooks.rst47
-rw-r--r--Doc/packaging/commandref.rst374
-rw-r--r--Doc/packaging/configfile.rst125
-rw-r--r--Doc/packaging/examples.rst334
-rw-r--r--Doc/packaging/extending.rst95
-rw-r--r--Doc/packaging/index.rst45
-rw-r--r--Doc/packaging/introduction.rst193
-rw-r--r--Doc/packaging/packageindex.rst104
-rw-r--r--Doc/packaging/setupcfg.rst890
-rw-r--r--Doc/packaging/setupscript.rst693
-rw-r--r--Doc/packaging/sourcedist.rst266
-rw-r--r--Doc/packaging/tutorial.rst112
-rw-r--r--Doc/packaging/uploading.rst80
-rw-r--r--Doc/reference/datamodel.rst50
-rw-r--r--Doc/reference/expressions.rst33
-rw-r--r--Doc/reference/lexical_analysis.rst33
-rw-r--r--Doc/reference/simple_stmts.rst27
-rw-r--r--Doc/tools/sphinxext/indexcontent.html8
-rw-r--r--Doc/tools/sphinxext/indexsidebar.html2
-rw-r--r--Doc/tools/sphinxext/pyspecific.py7
-rw-r--r--Doc/tools/sphinxext/susp-ignored.csv310
-rw-r--r--Doc/tutorial/classes.rst5
-rw-r--r--Doc/tutorial/datastructures.rst24
-rw-r--r--Doc/tutorial/interpreter.rst14
-rw-r--r--Doc/tutorial/stdlib.rst2
-rw-r--r--Doc/tutorial/stdlib2.rst6
-rw-r--r--Doc/using/cmdline.rst28
-rw-r--r--Doc/whatsnew/3.2.rst8
-rw-r--r--Doc/whatsnew/3.3.rst1182
-rw-r--r--Doc/whatsnew/index.rst1
-rw-r--r--Grammar/Grammar5
-rw-r--r--Include/Python-ast.h61
-rw-r--r--Include/Python.h38
-rw-r--r--Include/abstract.h16
-rw-r--r--Include/asdl.h1
-rw-r--r--Include/ast.h1
-rw-r--r--Include/code.h6
-rw-r--r--Include/codecs.h2
-rw-r--r--Include/compile.h14
-rw-r--r--Include/complexobject.h5
-rw-r--r--Include/datetime.h6
-rw-r--r--Include/descrobject.h1
-rw-r--r--Include/dictobject.h2
-rw-r--r--Include/errcode.h1
-rw-r--r--Include/fileutils.h2
-rw-r--r--Include/floatobject.h17
-rw-r--r--Include/frameobject.h44
-rw-r--r--Include/funcobject.h2
-rw-r--r--Include/genobject.h24
-rw-r--r--Include/graminit.h1
-rw-r--r--Include/import.h29
-rw-r--r--Include/intrcheck.h6
-rw-r--r--Include/listobject.h2
-rw-r--r--Include/longobject.h16
-rw-r--r--Include/memoryobject.h83
-rw-r--r--Include/methodobject.h3
-rw-r--r--Include/moduleobject.h4
-rw-r--r--Include/node.h1
-rw-r--r--Include/object.h52
-rw-r--r--Include/opcode.h188
-rw-r--r--Include/parsetok.h9
-rw-r--r--Include/patchlevel.h12
-rw-r--r--Include/py_curses.h1
-rw-r--r--Include/pydebug.h3
-rw-r--r--Include/pyerrors.h70
-rw-r--r--Include/pymacro.h55
-rw-r--r--Include/pymath.h6
-rw-r--r--Include/pyport.h2
-rw-r--r--Include/pystate.h10
-rw-r--r--Include/pythonrun.h4
-rw-r--r--Include/pythread.h4
-rw-r--r--Include/pytime.h24
-rw-r--r--Include/setobject.h2
-rw-r--r--Include/structmember.h10
-rw-r--r--Include/symtable.h11
-rw-r--r--Include/timefuncs.h25
-rw-r--r--Include/traceback.h38
-rw-r--r--Include/ucnhash.h6
-rw-r--r--Include/unicodeobject.h1021
-rw-r--r--LICENSE2
-rw-r--r--Lib/_dummy_thread.py6
-rw-r--r--Lib/_pyio.py139
-rw-r--r--Lib/abc.py27
-rw-r--r--Lib/aifc.py12
-rw-r--r--Lib/argparse.py50
-rw-r--r--Lib/ast.py1
-rw-r--r--Lib/asynchat.py2
-rw-r--r--Lib/asyncore.py18
-rwxr-xr-xLib/base64.py26
-rw-r--r--Lib/binhex.py1
-rw-r--r--Lib/bz2.py424
-rwxr-xr-xLib/cgi.py13
-rw-r--r--Lib/cgitb.py1
-rw-r--r--Lib/codecs.py14
-rw-r--r--Lib/collections/__init__.py (renamed from Lib/collections.py)98
-rw-r--r--Lib/collections/abc.py (renamed from Lib/_abcoll.py)37
-rw-r--r--Lib/concurrent/futures/_base.py23
-rw-r--r--Lib/concurrent/futures/process.py136
-rw-r--r--Lib/concurrent/futures/thread.py2
-rw-r--r--Lib/configparser.py3
-rw-r--r--Lib/contextlib.py1
-rw-r--r--Lib/copy.py77
-rw-r--r--Lib/crypt.py62
-rw-r--r--Lib/ctypes/test/test_callbacks.py2
-rw-r--r--Lib/ctypes/test/test_memfunctions.py2
-rw-r--r--Lib/ctypes/test/test_parameters.py9
-rw-r--r--Lib/ctypes/test/test_pep3118.py76
-rw-r--r--Lib/ctypes/test/test_python_api.py3
-rw-r--r--Lib/ctypes/test/test_refcounts.py3
-rw-r--r--Lib/ctypes/test/test_stringptr.py2
-rw-r--r--Lib/ctypes/util.py29
-rw-r--r--Lib/curses/__init__.py46
-rw-r--r--Lib/curses/wrapper.py50
-rw-r--r--Lib/datetime.py8
-rw-r--r--Lib/decimal.py143
-rw-r--r--Lib/difflib.py23
-rw-r--r--Lib/distutils/__init__.py2
-rw-r--r--Lib/distutils/command/bdist_wininst.py6
-rw-r--r--Lib/distutils/command/build_ext.py3
-rw-r--r--Lib/distutils/command/build_scripts.py7
-rw-r--r--Lib/distutils/tests/test_archive_util.py40
-rw-r--r--Lib/distutils/tests/test_bdist_rpm.py9
-rw-r--r--Lib/doctest.py9
-rw-r--r--Lib/email/_parseaddr.py27
-rw-r--r--Lib/email/errors.py5
-rw-r--r--Lib/email/feedparser.py31
-rw-r--r--Lib/email/generator.py67
-rw-r--r--Lib/email/parser.py43
-rw-r--r--Lib/email/policy.py174
-rw-r--r--Lib/email/utils.py76
-rw-r--r--Lib/encodings/cp65001.py40
-rw-r--r--Lib/encodings/idna.py22
-rw-r--r--Lib/fileinput.py3
-rw-r--r--Lib/fnmatch.py10
-rw-r--r--Lib/ftplib.py107
-rw-r--r--Lib/functools.py90
-rw-r--r--Lib/getopt.py19
-rw-r--r--Lib/getpass.py4
-rw-r--r--Lib/gzip.py45
-rw-r--r--Lib/http/client.py167
-rw-r--r--Lib/http/server.py50
-rw-r--r--Lib/idlelib/PyShell.py14
-rw-r--r--Lib/idlelib/configHandler.py3
-rw-r--r--Lib/idlelib/idlever.py2
-rw-r--r--Lib/idlelib/rpc.py7
-rw-r--r--Lib/idlelib/run.py37
-rw-r--r--Lib/imaplib.py33
-rw-r--r--Lib/importlib/__init__.py110
-rw-r--r--Lib/importlib/_bootstrap.py540
-rw-r--r--Lib/importlib/abc.py33
-rw-r--r--Lib/importlib/test/__main__.py19
-rw-r--r--Lib/importlib/test/benchmark.py157
-rw-r--r--Lib/importlib/test/extension/test_case_sensitivity.py6
-rw-r--r--Lib/importlib/test/import_/test___package__.py2
-rw-r--r--Lib/importlib/test/import_/test_api.py7
-rw-r--r--Lib/importlib/test/import_/test_path.py10
-rw-r--r--Lib/importlib/test/regrtest.py7
-rw-r--r--Lib/importlib/test/source/test_abc_loader.py17
-rw-r--r--Lib/importlib/test/source/test_case_sensitivity.py6
-rw-r--r--Lib/importlib/test/source/test_file_loader.py44
-rw-r--r--Lib/importlib/test/source/test_finder.py7
-rw-r--r--Lib/importlib/test/test_api.py28
-rw-r--r--Lib/importlib/test/test_util.py10
-rw-r--r--Lib/inspect.py127
-rw-r--r--Lib/json/decoder.py3
-rw-r--r--Lib/lib2to3/__main__.py4
-rw-r--r--Lib/lib2to3/fixer_base.py4
-rw-r--r--Lib/lib2to3/pytree.py20
-rw-r--r--Lib/lib2to3/refactor.py4
-rw-r--r--Lib/lib2to3/tests/test_pytree.py17
-rw-r--r--Lib/logging/__init__.py164
-rw-r--r--Lib/logging/config.py18
-rw-r--r--Lib/logging/handlers.py157
-rw-r--r--Lib/lzma.py401
-rw-r--r--Lib/mailbox.py7
-rw-r--r--Lib/mailcap.py6
-rw-r--r--Lib/mimetypes.py3
-rw-r--r--Lib/modulefinder.py16
-rw-r--r--Lib/multiprocessing/__init__.py9
-rw-r--r--Lib/multiprocessing/connection.py553
-rw-r--r--Lib/multiprocessing/dummy/__init__.py15
-rw-r--r--Lib/multiprocessing/forking.py64
-rw-r--r--Lib/multiprocessing/heap.py1
-rw-r--r--Lib/multiprocessing/managers.py9
-rw-r--r--Lib/multiprocessing/pool.py32
-rw-r--r--Lib/multiprocessing/process.py20
-rw-r--r--Lib/multiprocessing/queues.py19
-rw-r--r--Lib/multiprocessing/reduction.py36
-rw-r--r--Lib/multiprocessing/sharedctypes.py1
-rw-r--r--Lib/multiprocessing/synchronize.py5
-rw-r--r--Lib/multiprocessing/util.py24
-rw-r--r--Lib/nntplib.py18
-rw-r--r--Lib/numbers.py14
-rw-r--r--Lib/opcode.py2
-rw-r--r--Lib/optparse.py31
-rw-r--r--Lib/os.py103
-rw-r--r--Lib/packaging/__init__.py17
-rw-r--r--Lib/packaging/_trove.py571
-rw-r--r--Lib/packaging/command/__init__.py53
-rw-r--r--Lib/packaging/command/bdist.py141
-rw-r--r--Lib/packaging/command/bdist_dumb.py139
-rw-r--r--Lib/packaging/command/bdist_msi.py743
-rw-r--r--Lib/packaging/command/bdist_wininst.py345
-rw-r--r--Lib/packaging/command/build.py151
-rw-r--r--Lib/packaging/command/build_clib.py197
-rw-r--r--Lib/packaging/command/build_ext.py641
-rw-r--r--Lib/packaging/command/build_py.py392
-rw-r--r--Lib/packaging/command/build_scripts.py154
-rw-r--r--Lib/packaging/command/check.py88
-rw-r--r--Lib/packaging/command/clean.py76
-rw-r--r--Lib/packaging/command/cmd.py461
-rw-r--r--Lib/packaging/command/command_template35
-rw-r--r--Lib/packaging/command/config.py349
-rw-r--r--Lib/packaging/command/install_data.py79
-rw-r--r--Lib/packaging/command/install_dist.py605
-rw-r--r--Lib/packaging/command/install_distinfo.py143
-rw-r--r--Lib/packaging/command/install_headers.py43
-rw-r--r--Lib/packaging/command/install_lib.py188
-rw-r--r--Lib/packaging/command/install_scripts.py59
-rw-r--r--Lib/packaging/command/register.py263
-rw-r--r--Lib/packaging/command/sdist.py347
-rw-r--r--Lib/packaging/command/test.py80
-rw-r--r--Lib/packaging/command/upload.py168
-rw-r--r--Lib/packaging/command/upload_docs.py131
-rw-r--r--Lib/packaging/command/wininst-10.0-amd64.exebin0 -> 222208 bytes-rw-r--r--Lib/packaging/command/wininst-10.0.exebin0 -> 190464 bytes-rw-r--r--Lib/packaging/command/wininst-6.0.exebin0 -> 61440 bytes-rw-r--r--Lib/packaging/command/wininst-7.1.exebin0 -> 65536 bytes-rw-r--r--Lib/packaging/command/wininst-8.0.exebin0 -> 61440 bytes-rw-r--r--Lib/packaging/command/wininst-9.0-amd64.exebin0 -> 223744 bytes-rw-r--r--Lib/packaging/command/wininst-9.0.exebin0 -> 196096 bytes-rw-r--r--Lib/packaging/compat.py50
-rw-r--r--Lib/packaging/compiler/__init__.py274
-rw-r--r--Lib/packaging/compiler/bcppcompiler.py355
-rw-r--r--Lib/packaging/compiler/ccompiler.py863
-rw-r--r--Lib/packaging/compiler/cygwinccompiler.py355
-rw-r--r--Lib/packaging/compiler/extension.py121
-rw-r--r--Lib/packaging/compiler/msvc9compiler.py720
-rw-r--r--Lib/packaging/compiler/msvccompiler.py635
-rw-r--r--Lib/packaging/compiler/unixccompiler.py339
-rw-r--r--Lib/packaging/config.py391
-rw-r--r--Lib/packaging/create.py682
-rw-r--r--Lib/packaging/database.py651
-rw-r--r--Lib/packaging/depgraph.py270
-rw-r--r--Lib/packaging/dist.py769
-rw-r--r--Lib/packaging/errors.py138
-rw-r--r--Lib/packaging/fancy_getopt.py388
-rw-r--r--Lib/packaging/install.py529
-rw-r--r--Lib/packaging/manifest.py381
-rw-r--r--Lib/packaging/markers.py189
-rw-r--r--Lib/packaging/metadata.py570
-rw-r--r--Lib/packaging/pypi/__init__.py9
-rw-r--r--Lib/packaging/pypi/base.py48
-rw-r--r--Lib/packaging/pypi/dist.py544
-rw-r--r--Lib/packaging/pypi/errors.py39
-rw-r--r--Lib/packaging/pypi/mirrors.py52
-rw-r--r--Lib/packaging/pypi/simple.py462
-rw-r--r--Lib/packaging/pypi/wrapper.py99
-rw-r--r--Lib/packaging/pypi/xmlrpc.py200
-rw-r--r--Lib/packaging/run.py663
-rw-r--r--Lib/packaging/tests/LONG_DESC.txt44
-rw-r--r--Lib/packaging/tests/PKG-INFO57
-rw-r--r--Lib/packaging/tests/SETUPTOOLS-PKG-INFO182
-rw-r--r--Lib/packaging/tests/SETUPTOOLS-PKG-INFO2183
-rw-r--r--Lib/packaging/tests/__init__.py28
-rw-r--r--Lib/packaging/tests/__main__.py24
-rw-r--r--Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER (renamed from Lib/email/test/__init__.py)0
-rw-r--r--Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA4
-rw-r--r--Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD0
-rw-r--r--Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED0
-rw-r--r--Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES2
-rw-r--r--Lib/packaging/tests/fake_dists/babar.cfg1
-rw-r--r--Lib/packaging/tests/fake_dists/babar.png0
-rw-r--r--Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO6
-rw-r--r--Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO18
-rw-r--r--Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt0
-rw-r--r--Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt1
-rw-r--r--Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt3
-rw-r--r--Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe1
-rw-r--r--Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt6
-rw-r--r--Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt0
-rw-r--r--Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info5
-rw-r--r--Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER0
-rw-r--r--Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA9
-rw-r--r--Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD0
-rw-r--r--Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED0
-rw-r--r--Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py1
-rw-r--r--Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py10
-rw-r--r--Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py5
-rw-r--r--Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO5
-rw-r--r--Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER0
-rw-r--r--Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA5
-rw-r--r--Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD0
-rw-r--r--Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED0
-rw-r--r--Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py1
-rw-r--r--Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py8
-rw-r--r--Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info3
-rw-r--r--Lib/packaging/tests/fake_dists/strawberry-0.6.eggbin0 -> 1402 bytes-rw-r--r--Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER0
-rw-r--r--Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA7
-rw-r--r--Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD0
-rw-r--r--Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED0
-rw-r--r--Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py18
-rw-r--r--Lib/packaging/tests/fake_dists/truffles-5.0.egg-info3
-rw-r--r--Lib/packaging/tests/fixer/__init__.py0
-rw-r--r--Lib/packaging/tests/fixer/fix_echo.py16
-rw-r--r--Lib/packaging/tests/fixer/fix_echo2.py16
-rw-r--r--Lib/packaging/tests/pypi_server.py449
-rw-r--r--Lib/packaging/tests/pypi_test_server.py59
-rw-r--r--Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gzbin0 -> 110 bytes-rw-r--r--Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz0
-rw-r--r--Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html3
-rw-r--r--Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html3
-rw-r--r--Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html2
-rw-r--r--Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html6
-rw-r--r--Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html6
-rw-r--r--Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html6
-rw-r--r--Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html3
-rw-r--r--Lib/packaging/tests/pypiserver/project_list/simple/index.html5
-rw-r--r--Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html6
-rw-r--r--Lib/packaging/tests/pypiserver/test_found_links/simple/index.html1
-rw-r--r--Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html1
-rw-r--r--Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html1
-rw-r--r--Lib/packaging/tests/pypiserver/with_externals/external/external.html3
-rw-r--r--Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html4
-rw-r--r--Lib/packaging/tests/pypiserver/with_externals/simple/index.html1
-rw-r--r--Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html7
-rw-r--r--Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html1
-rw-r--r--Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html6
-rw-r--r--Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html1
-rw-r--r--Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html4
-rw-r--r--Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html1
-rw-r--r--Lib/packaging/tests/support.py400
-rw-r--r--Lib/packaging/tests/test_ccompiler.py15
-rw-r--r--Lib/packaging/tests/test_command_bdist.py61
-rw-r--r--Lib/packaging/tests/test_command_bdist_dumb.py91
-rw-r--r--Lib/packaging/tests/test_command_bdist_msi.py25
-rw-r--r--Lib/packaging/tests/test_command_bdist_wininst.py32
-rw-r--r--Lib/packaging/tests/test_command_build.py56
-rw-r--r--Lib/packaging/tests/test_command_build_clib.py141
-rw-r--r--Lib/packaging/tests/test_command_build_ext.py394
-rw-r--r--Lib/packaging/tests/test_command_build_py.py146
-rw-r--r--Lib/packaging/tests/test_command_build_scripts.py109
-rw-r--r--Lib/packaging/tests/test_command_check.py161
-rw-r--r--Lib/packaging/tests/test_command_clean.py46
-rw-r--r--Lib/packaging/tests/test_command_cmd.py102
-rw-r--r--Lib/packaging/tests/test_command_config.py76
-rw-r--r--Lib/packaging/tests/test_command_install_data.py148
-rw-r--r--Lib/packaging/tests/test_command_install_dist.py241
-rw-r--r--Lib/packaging/tests/test_command_install_distinfo.py252
-rw-r--r--Lib/packaging/tests/test_command_install_headers.py38
-rw-r--r--Lib/packaging/tests/test_command_install_lib.py110
-rw-r--r--Lib/packaging/tests/test_command_install_scripts.py75
-rw-r--r--Lib/packaging/tests/test_command_register.py260
-rw-r--r--Lib/packaging/tests/test_command_sdist.py394
-rw-r--r--Lib/packaging/tests/test_command_test.py224
-rw-r--r--Lib/packaging/tests/test_command_upload.py159
-rw-r--r--Lib/packaging/tests/test_command_upload_docs.py186
-rw-r--r--Lib/packaging/tests/test_compiler.py66
-rw-r--r--Lib/packaging/tests/test_config.py519
-rw-r--r--Lib/packaging/tests/test_create.py233
-rw-r--r--Lib/packaging/tests/test_cygwinccompiler.py88
-rw-r--r--Lib/packaging/tests/test_database.py686
-rw-r--r--Lib/packaging/tests/test_depgraph.py310
-rw-r--r--Lib/packaging/tests/test_dist.py264
-rw-r--r--Lib/packaging/tests/test_extension.py15
-rw-r--r--Lib/packaging/tests/test_install.py391
-rw-r--r--Lib/packaging/tests/test_manifest.py331
-rw-r--r--Lib/packaging/tests/test_markers.py75
-rw-r--r--Lib/packaging/tests/test_metadata.py454
-rw-r--r--Lib/packaging/tests/test_mixin2to3.py87
-rw-r--r--Lib/packaging/tests/test_msvc9compiler.py140
-rw-r--r--Lib/packaging/tests/test_pypi_dist.py287
-rw-r--r--Lib/packaging/tests/test_pypi_server.py88
-rw-r--r--Lib/packaging/tests/test_pypi_simple.py353
-rw-r--r--Lib/packaging/tests/test_pypi_xmlrpc.py101
-rw-r--r--Lib/packaging/tests/test_run.py92
-rw-r--r--Lib/packaging/tests/test_support.py78
-rw-r--r--Lib/packaging/tests/test_uninstall.py124
-rw-r--r--Lib/packaging/tests/test_unixccompiler.py132
-rw-r--r--Lib/packaging/tests/test_util.py1013
-rw-r--r--Lib/packaging/tests/test_version.py271
-rw-r--r--Lib/packaging/util.py1480
-rw-r--r--Lib/packaging/version.py451
-rwxr-xr-xLib/pdb.py95
-rw-r--r--Lib/pickle.py18
-rw-r--r--Lib/pickletools.py7
-rw-r--r--Lib/pipes.py23
-rw-r--r--Lib/pkgutil.py10
-rw-r--r--Lib/plat-linux/CDROM.py (renamed from Lib/plat-linux2/CDROM.py)0
-rw-r--r--Lib/plat-linux/DLFCN.py (renamed from Lib/plat-linux2/DLFCN.py)0
-rw-r--r--Lib/plat-linux/IN.py (renamed from Lib/plat-linux2/IN.py)0
-rw-r--r--Lib/plat-linux/TYPES.py (renamed from Lib/plat-linux2/TYPES.py)0
-rwxr-xr-xLib/plat-linux/regen (renamed from Lib/plat-linux2/regen)0
-rwxr-xr-xLib/platform.py110
-rw-r--r--Lib/plistlib.py10
-rw-r--r--Lib/poplib.py17
-rw-r--r--Lib/pstats.py3
-rw-r--r--Lib/py_compile.py7
-rwxr-xr-xLib/pydoc.py289
-rw-r--r--Lib/pydoc_data/topics.py29
-rw-r--r--Lib/queue.py137
-rw-r--r--Lib/random.py2
-rw-r--r--Lib/re.py15
-rw-r--r--Lib/sched.py82
-rw-r--r--Lib/shlex.py20
-rw-r--r--Lib/shutil.py240
-rw-r--r--Lib/site.py19
-rwxr-xr-xLib/smtpd.py46
-rw-r--r--Lib/smtplib.py152
-rw-r--r--Lib/socket.py11
-rw-r--r--Lib/socketserver.py20
-rw-r--r--Lib/sqlite3/test/dbapi.py2
-rw-r--r--Lib/sqlite3/test/factory.py4
-rw-r--r--Lib/sqlite3/test/hooks.py54
-rw-r--r--Lib/sqlite3/test/regression.py2
-rw-r--r--Lib/sqlite3/test/transactions.py2
-rw-r--r--Lib/sqlite3/test/types.py4
-rw-r--r--Lib/sqlite3/test/userfunctions.py2
-rw-r--r--Lib/sre_compile.py4
-rw-r--r--Lib/sre_parse.py2
-rw-r--r--Lib/ssl.py73
-rw-r--r--Lib/string.py24
-rw-r--r--Lib/subprocess.py689
-rw-r--r--Lib/sysconfig.cfg111
-rw-r--r--Lib/sysconfig.py305
-rwxr-xr-xLib/tabnanny.py3
-rw-r--r--Lib/tarfile.py163
-rw-r--r--Lib/tempfile.py56
-rw-r--r--Lib/test/buffer_tests.py8
-rw-r--r--Lib/test/crashers/README4
-rw-r--r--Lib/test/crashers/borrowed_ref_1.py29
-rw-r--r--Lib/test/crashers/borrowed_ref_2.py38
-rw-r--r--Lib/test/crashers/compiler_recursion.py12
-rw-r--r--Lib/test/crashers/loosing_mro_ref.py35
-rw-r--r--Lib/test/crashers/nasty_eq_vs_dict.py47
-rw-r--r--Lib/test/datetimetester.py57
-rw-r--r--Lib/test/decimaltestdata/extra.decTest13
-rw-r--r--Lib/test/dh512.pem9
-rw-r--r--Lib/test/exception_hierarchy.txt21
-rw-r--r--Lib/test/fork_wait.py10
-rw-r--r--Lib/test/future_test1.py (renamed from Lib/test/test_future1.py)0
-rw-r--r--Lib/test/future_test2.py (renamed from Lib/test/test_future2.py)0
-rw-r--r--Lib/test/json_tests/test_dump.py19
-rw-r--r--Lib/test/json_tests/test_scanstring.py11
-rw-r--r--Lib/test/keycert.passwd.pem33
-rw-r--r--Lib/test/list_tests.py41
-rw-r--r--Lib/test/lock_tests.py6
-rw-r--r--Lib/test/mailcap.txt39
-rw-r--r--Lib/test/math_testcases.txt114
-rw-r--r--Lib/test/mock_socket.py3
-rw-r--r--Lib/test/pickletester.py150
-rwxr-xr-xLib/test/regrtest.py283
-rw-r--r--Lib/test/script_helper.py5
-rw-r--r--Lib/test/ssl_key.passwd.pem18
-rw-r--r--Lib/test/ssl_servers.py16
-rw-r--r--Lib/test/string_tests.py73
-rw-r--r--Lib/test/support.py324
-rw-r--r--Lib/test/test__locale.py36
-rw-r--r--Lib/test/test_abc.py196
-rw-r--r--Lib/test/test_abstract_numbers.py2
-rw-r--r--Lib/test/test_aifc.py13
-rw-r--r--Lib/test/test_argparse.py94
-rwxr-xr-xLib/test/test_array.py32
-rw-r--r--Lib/test/test_ast.py423
-rw-r--r--Lib/test/test_asyncore.py178
-rw-r--r--Lib/test/test_base64.py165
-rw-r--r--Lib/test/test_bigmem.py223
-rw-r--r--Lib/test/test_binascii.py34
-rw-r--r--Lib/test/test_bool.py10
-rw-r--r--Lib/test/test_buffer.py3628
-rw-r--r--Lib/test/test_builtin.py44
-rw-r--r--Lib/test/test_bytes.py196
-rw-r--r--Lib/test/test_bz2.py424
-rw-r--r--Lib/test/test_calendar.py2
-rw-r--r--Lib/test/test_capi.py5
-rw-r--r--Lib/test/test_cgi.py8
-rw-r--r--Lib/test/test_cgitb.py55
-rw-r--r--Lib/test/test_cmd_line.py16
-rw-r--r--Lib/test/test_code.py2
-rw-r--r--Lib/test/test_codeccallbacks.py159
-rw-r--r--Lib/test/test_codecencodings_cn.py21
-rw-r--r--Lib/test/test_codecencodings_hk.py4
-rw-r--r--Lib/test/test_codecencodings_jp.py96
-rw-r--r--Lib/test/test_codecencodings_kr.py25
-rw-r--r--Lib/test/test_codecencodings_tw.py4
-rw-r--r--Lib/test/test_codecmaps_tw.py3
-rw-r--r--Lib/test/test_codecs.py401
-rw-r--r--Lib/test/test_collections.py96
-rw-r--r--Lib/test/test_compile.py40
-rw-r--r--Lib/test/test_concurrent_futures.py62
-rw-r--r--Lib/test/test_configparser.py (renamed from Lib/test/test_cfgparser.py)0
-rw-r--r--Lib/test/test_copy.py187
-rw-r--r--Lib/test/test_cprofile.py23
-rw-r--r--Lib/test/test_crashers.py38
-rw-r--r--Lib/test/test_crypt.py19
-rw-r--r--Lib/test/test_curses.py52
-rw-r--r--Lib/test/test_dbm.py4
-rw-r--r--Lib/test/test_decimal.py26
-rw-r--r--Lib/test/test_descr.py177
-rw-r--r--Lib/test/test_descrtut.py3
-rw-r--r--Lib/test/test_devpoll.py94
-rw-r--r--Lib/test/test_dict.py29
-rw-r--r--Lib/test/test_dis.py107
-rw-r--r--Lib/test/test_doctest.py444
-rw-r--r--Lib/test/test_dummy_thread.py4
-rw-r--r--Lib/test/test_email.py14
-rw-r--r--Lib/test/test_email/__init__.py55
-rw-r--r--Lib/test/test_email/__main__.py3
-rw-r--r--Lib/test/test_email/data/PyBanner048.gif (renamed from Lib/email/test/data/PyBanner048.gif)bin954 -> 954 bytes-rw-r--r--Lib/test/test_email/data/audiotest.au (renamed from Lib/email/test/data/audiotest.au)bin28144 -> 28144 bytes-rw-r--r--Lib/test/test_email/data/msg_01.txt (renamed from Lib/email/test/data/msg_01.txt)0
-rw-r--r--Lib/test/test_email/data/msg_02.txt (renamed from Lib/email/test/data/msg_02.txt)0
-rw-r--r--Lib/test/test_email/data/msg_03.txt (renamed from Lib/email/test/data/msg_03.txt)0
-rw-r--r--Lib/test/test_email/data/msg_04.txt (renamed from Lib/email/test/data/msg_04.txt)0
-rw-r--r--Lib/test/test_email/data/msg_05.txt (renamed from Lib/email/test/data/msg_05.txt)0
-rw-r--r--Lib/test/test_email/data/msg_06.txt (renamed from Lib/email/test/data/msg_06.txt)0
-rw-r--r--Lib/test/test_email/data/msg_07.txt (renamed from Lib/email/test/data/msg_07.txt)0
-rw-r--r--Lib/test/test_email/data/msg_08.txt (renamed from Lib/email/test/data/msg_08.txt)0
-rw-r--r--Lib/test/test_email/data/msg_09.txt (renamed from Lib/email/test/data/msg_09.txt)0
-rw-r--r--Lib/test/test_email/data/msg_10.txt (renamed from Lib/email/test/data/msg_10.txt)0
-rw-r--r--Lib/test/test_email/data/msg_11.txt (renamed from Lib/email/test/data/msg_11.txt)0
-rw-r--r--Lib/test/test_email/data/msg_12.txt (renamed from Lib/email/test/data/msg_12.txt)0
-rw-r--r--Lib/test/test_email/data/msg_12a.txt (renamed from Lib/email/test/data/msg_12a.txt)0
-rw-r--r--Lib/test/test_email/data/msg_13.txt (renamed from Lib/email/test/data/msg_13.txt)0
-rw-r--r--Lib/test/test_email/data/msg_14.txt (renamed from Lib/email/test/data/msg_14.txt)0
-rw-r--r--Lib/test/test_email/data/msg_15.txt (renamed from Lib/email/test/data/msg_15.txt)0
-rw-r--r--Lib/test/test_email/data/msg_16.txt (renamed from Lib/email/test/data/msg_16.txt)0
-rw-r--r--Lib/test/test_email/data/msg_17.txt (renamed from Lib/email/test/data/msg_17.txt)0
-rw-r--r--Lib/test/test_email/data/msg_18.txt (renamed from Lib/email/test/data/msg_18.txt)0
-rw-r--r--Lib/test/test_email/data/msg_19.txt (renamed from Lib/email/test/data/msg_19.txt)0
-rw-r--r--Lib/test/test_email/data/msg_20.txt (renamed from Lib/email/test/data/msg_20.txt)0
-rw-r--r--Lib/test/test_email/data/msg_21.txt (renamed from Lib/email/test/data/msg_21.txt)0
-rw-r--r--Lib/test/test_email/data/msg_22.txt (renamed from Lib/email/test/data/msg_22.txt)0
-rw-r--r--Lib/test/test_email/data/msg_23.txt (renamed from Lib/email/test/data/msg_23.txt)0
-rw-r--r--Lib/test/test_email/data/msg_24.txt (renamed from Lib/email/test/data/msg_24.txt)0
-rw-r--r--Lib/test/test_email/data/msg_25.txt (renamed from Lib/email/test/data/msg_25.txt)0
-rw-r--r--Lib/test/test_email/data/msg_26.txt (renamed from Lib/email/test/data/msg_26.txt)0
-rw-r--r--Lib/test/test_email/data/msg_27.txt (renamed from Lib/email/test/data/msg_27.txt)0
-rw-r--r--Lib/test/test_email/data/msg_28.txt (renamed from Lib/email/test/data/msg_28.txt)0
-rw-r--r--Lib/test/test_email/data/msg_29.txt (renamed from Lib/email/test/data/msg_29.txt)0
-rw-r--r--Lib/test/test_email/data/msg_30.txt (renamed from Lib/email/test/data/msg_30.txt)0
-rw-r--r--Lib/test/test_email/data/msg_31.txt (renamed from Lib/email/test/data/msg_31.txt)0
-rw-r--r--Lib/test/test_email/data/msg_32.txt (renamed from Lib/email/test/data/msg_32.txt)0
-rw-r--r--Lib/test/test_email/data/msg_33.txt (renamed from Lib/email/test/data/msg_33.txt)0
-rw-r--r--Lib/test/test_email/data/msg_34.txt (renamed from Lib/email/test/data/msg_34.txt)0
-rw-r--r--Lib/test/test_email/data/msg_35.txt (renamed from Lib/email/test/data/msg_35.txt)0
-rw-r--r--Lib/test/test_email/data/msg_36.txt (renamed from Lib/email/test/data/msg_36.txt)0
-rw-r--r--Lib/test/test_email/data/msg_37.txt (renamed from Lib/email/test/data/msg_37.txt)0
-rw-r--r--Lib/test/test_email/data/msg_38.txt (renamed from Lib/email/test/data/msg_38.txt)0
-rw-r--r--Lib/test/test_email/data/msg_39.txt (renamed from Lib/email/test/data/msg_39.txt)0
-rw-r--r--Lib/test/test_email/data/msg_40.txt (renamed from Lib/email/test/data/msg_40.txt)0
-rw-r--r--Lib/test/test_email/data/msg_41.txt (renamed from Lib/email/test/data/msg_41.txt)0
-rw-r--r--Lib/test/test_email/data/msg_42.txt (renamed from Lib/email/test/data/msg_42.txt)0
-rw-r--r--Lib/test/test_email/data/msg_43.txt (renamed from Lib/email/test/data/msg_43.txt)0
-rw-r--r--Lib/test/test_email/data/msg_44.txt (renamed from Lib/email/test/data/msg_44.txt)0
-rw-r--r--Lib/test/test_email/data/msg_45.txt (renamed from Lib/email/test/data/msg_45.txt)0
-rw-r--r--Lib/test/test_email/data/msg_46.txt (renamed from Lib/email/test/data/msg_46.txt)0
-rw-r--r--Lib/test/test_email/test_asian_codecs.py (renamed from Lib/email/test/test_email_codecs.py)15
-rw-r--r--Lib/test/test_email/test_email.py (renamed from Lib/email/test/test_email.py)344
-rw-r--r--Lib/test/test_email/test_generator.py136
-rw-r--r--Lib/test/test_email/test_policy.py150
-rw-r--r--Lib/test/test_email/test_utils.py45
-rw-r--r--Lib/test/test_email/torture_test.py (renamed from Lib/email/test/test_email_torture.py)0
-rw-r--r--Lib/test/test_epoll.py3
-rw-r--r--Lib/test/test_exceptions.py77
-rw-r--r--Lib/test/test_extcall.py87
-rw-r--r--Lib/test/test_faulthandler.py554
-rw-r--r--Lib/test/test_fileinput.py630
-rw-r--r--Lib/test/test_fileio.py5
-rw-r--r--Lib/test/test_float.py2
-rw-r--r--Lib/test/test_format.py44
-rw-r--r--Lib/test/test_ftplib.py180
-rw-r--r--Lib/test/test_funcattrs.py55
-rw-r--r--Lib/test/test_functools.py93
-rw-r--r--Lib/test/test_future.py12
-rw-r--r--Lib/test/test_gc.py6
-rw-r--r--Lib/test/test_gdb.py34
-rw-r--r--Lib/test/test_generators.py42
-rw-r--r--Lib/test/test_genericpath.py27
-rw-r--r--Lib/test/test_genexps.py8
-rw-r--r--Lib/test/test_getargs2.py9
-rw-r--r--Lib/test/test_glob.py6
-rw-r--r--Lib/test/test_grammar.py123
-rw-r--r--Lib/test/test_gzip.py25
-rw-r--r--Lib/test/test_hash.py14
-rw-r--r--Lib/test/test_http_cookiejar.py11
-rw-r--r--Lib/test/test_httplib.py140
-rw-r--r--Lib/test/test_httpservers.py73
-rw-r--r--Lib/test/test_imaplib.py55
-rw-r--r--Lib/test/test_imp.py22
-rw-r--r--Lib/test/test_import.py40
-rw-r--r--Lib/test/test_importhooks.py9
-rw-r--r--Lib/test/test_inspect.py2
-rw-r--r--Lib/test/test_io.py66
-rw-r--r--Lib/test/test_itertools.py33
-rw-r--r--Lib/test/test_keywordonlyarg.py2
-rw-r--r--Lib/test/test_lib2to3.py4
-rw-r--r--Lib/test/test_locale.py2
-rw-r--r--Lib/test/test_logging.py1711
-rw-r--r--Lib/test/test_long.py80
-rw-r--r--Lib/test/test_lzma.py1344
-rw-r--r--Lib/test/test_mailbox.py9
-rw-r--r--Lib/test/test_mailcap.py221
-rw-r--r--Lib/test/test_marshal.py17
-rw-r--r--Lib/test/test_math.py43
-rw-r--r--Lib/test/test_memoryview.py75
-rw-r--r--Lib/test/test_metaclass.py16
-rw-r--r--Lib/test/test_minidom.py72
-rw-r--r--Lib/test/test_mmap.py32
-rw-r--r--Lib/test/test_module.py8
-rw-r--r--Lib/test/test_modulefinder.py48
-rw-r--r--Lib/test/test_multibytecodec.py16
-rw-r--r--Lib/test/test_multibytecodec_support.py15
-rw-r--r--Lib/test/test_multiprocessing.py327
-rw-r--r--Lib/test/test_mutants.py291
-rw-r--r--Lib/test/test_nntplib.py27
-rw-r--r--Lib/test/test_ntpath.py7
-rw-r--r--Lib/test/test_optparse.py4
-rw-r--r--Lib/test/test_os.py701
-rw-r--r--Lib/test/test_ossaudiodev.py16
-rw-r--r--Lib/test/test_osx_env.py3
-rw-r--r--Lib/test/test_packaging.py5
-rw-r--r--Lib/test/test_parser.py11
-rw-r--r--Lib/test/test_pdb.py3
-rw-r--r--Lib/test/test_peepholer.py47
-rw-r--r--Lib/test/test_pep277.py66
-rw-r--r--Lib/test/test_pep292.py33
-rw-r--r--Lib/test/test_pep3131.py7
-rw-r--r--Lib/test/test_pep3151.py201
-rw-r--r--Lib/test/test_pep380.py951
-rw-r--r--Lib/test/test_pickle.py28
-rw-r--r--Lib/test/test_pipes.py15
-rw-r--r--Lib/test/test_pkgimport.py4
-rw-r--r--Lib/test/test_platform.py43
-rw-r--r--Lib/test/test_poplib.py15
-rw-r--r--Lib/test/test_posix.py586
-rw-r--r--Lib/test/test_posixpath.py12
-rw-r--r--Lib/test/test_print.py26
-rw-r--r--Lib/test/test_property.py23
-rw-r--r--Lib/test/test_pty.py19
-rw-r--r--Lib/test/test_pulldom.py347
-rw-r--r--Lib/test/test_pydoc.py12
-rw-r--r--Lib/test/test_raise.py89
-rw-r--r--Lib/test/test_range.py81
-rw-r--r--Lib/test/test_re.py31
-rw-r--r--Lib/test/test_reprlib.py40
-rw-r--r--Lib/test/test_richcmp.py1
-rw-r--r--Lib/test/test_runpy.py14
-rw-r--r--Lib/test/test_sax.py4
-rw-r--r--Lib/test/test_sched.py36
-rw-r--r--Lib/test/test_scope.py19
-rw-r--r--Lib/test/test_select.py22
-rw-r--r--Lib/test/test_shelve.py6
-rw-r--r--Lib/test/test_shlex.py21
-rw-r--r--Lib/test/test_shutil.py569
-rw-r--r--Lib/test/test_signal.py469
-rw-r--r--Lib/test/test_smtpd.py44
-rw-r--r--Lib/test/test_smtplib.py60
-rw-r--r--Lib/test/test_smtpnet.py42
-rw-r--r--Lib/test/test_socket.py2683
-rw-r--r--Lib/test/test_ssl.py302
-rw-r--r--Lib/test/test_string.py68
-rw-r--r--Lib/test/test_strlit.py23
-rw-r--r--Lib/test/test_struct.py66
-rw-r--r--Lib/test/test_subprocess.py181
-rw-r--r--Lib/test/test_super.py10
-rw-r--r--Lib/test/test_support.py188
-rw-r--r--Lib/test/test_sys.py76
-rw-r--r--Lib/test/test_sys_settrace.py10
-rw-r--r--Lib/test/test_sysconfig.py88
-rw-r--r--Lib/test/test_tarfile.py91
-rw-r--r--Lib/test/test_telnetlib.py1
-rw-r--r--Lib/test/test_tempfile.py263
-rw-r--r--Lib/test/test_threaded_import.py13
-rw-r--r--Lib/test/test_threading.py19
-rw-r--r--Lib/test/test_threadsignals.py6
-rw-r--r--Lib/test/test_time.py356
-rw-r--r--Lib/test/test_tokenize.py89
-rw-r--r--Lib/test/test_trace.py11
-rw-r--r--Lib/test/test_traceback.py15
-rw-r--r--Lib/test/test_ucn.py88
-rw-r--r--Lib/test/test_unicode.py313
-rw-r--r--Lib/test/test_unicode_file.py17
-rw-r--r--Lib/test/test_unicodedata.py16
-rw-r--r--Lib/test/test_urllib.py63
-rw-r--r--Lib/test/test_urllib2.py67
-rw-r--r--Lib/test/test_urllib2net.py13
-rw-r--r--Lib/test/test_userlist.py6
-rwxr-xr-xLib/test/test_userstring.py4
-rw-r--r--Lib/test/test_uuid.py4
-rw-r--r--Lib/test/test_wait3.py7
-rw-r--r--Lib/test/test_warnings.py14
-rw-r--r--Lib/test/test_xml_etree.py317
-rw-r--r--Lib/test/test_xml_etree_c.py47
-rw-r--r--Lib/test/test_xmlrpc.py103
-rw-r--r--Lib/test/test_xmlrpc_net.py4
-rw-r--r--Lib/test/test_zipfile.py76
-rw-r--r--Lib/test/test_zipfile64.py20
-rw-r--r--Lib/test/test_zipimport.py24
-rw-r--r--Lib/test/test_zipimport_support.py5
-rw-r--r--Lib/test/test_zlib.py43
-rw-r--r--Lib/test/threaded_import_hangers.py13
-rw-r--r--Lib/test/tokenize_tests.txt8
-rw-r--r--Lib/textwrap.py2
-rw-r--r--Lib/threading.py217
-rw-r--r--Lib/timeit.py8
-rw-r--r--Lib/tkinter/__init__.py131
-rw-r--r--Lib/tkinter/filedialog.py2
-rw-r--r--Lib/tkinter/test/test_tkinter/test_variables.py165
-rw-r--r--Lib/tokenize.py209
-rw-r--r--Lib/traceback.py8
-rw-r--r--Lib/turtle.py1
-rw-r--r--Lib/unittest/case.py131
-rw-r--r--Lib/unittest/main.py77
-rw-r--r--Lib/unittest/mock.py2137
-rw-r--r--Lib/unittest/result.py1
-rw-r--r--Lib/unittest/test/__init__.py1
-rw-r--r--Lib/unittest/test/_test_warnings.py1
-rw-r--r--Lib/unittest/test/test_assertions.py73
-rw-r--r--Lib/unittest/test/test_case.py63
-rw-r--r--Lib/unittest/test/test_loader.py4
-rw-r--r--Lib/unittest/test/test_program.py17
-rw-r--r--Lib/unittest/test/testmock/__init__.py17
-rw-r--r--Lib/unittest/test/testmock/support.py23
-rw-r--r--Lib/unittest/test/testmock/testcallable.py147
-rw-r--r--Lib/unittest/test/testmock/testhelpers.py835
-rw-r--r--Lib/unittest/test/testmock/testmagicmethods.py382
-rw-r--r--Lib/unittest/test/testmock/testmock.py1258
-rw-r--r--Lib/unittest/test/testmock/testpatch.py1652
-rw-r--r--Lib/unittest/test/testmock/testsentinel.py28
-rw-r--r--Lib/unittest/test/testmock/testwith.py176
-rw-r--r--Lib/urllib/error.py3
-rw-r--r--Lib/urllib/request.py191
-rw-r--r--Lib/urllib/response.py7
-rw-r--r--Lib/webbrowser.py17
-rw-r--r--Lib/wsgiref.egg-info8
-rw-r--r--Lib/xdrlib.py6
-rw-r--r--Lib/xml/dom/__init__.py1
-rw-r--r--Lib/xml/dom/domreg.py2
-rw-r--r--Lib/xml/dom/expatbuilder.py42
-rw-r--r--Lib/xml/dom/minidom.py271
-rw-r--r--Lib/xml/dom/pulldom.py6
-rw-r--r--Lib/xml/etree/ElementTree.py173
-rw-r--r--Lib/xml/etree/cElementTree.py4
-rw-r--r--Lib/xml/parsers/expat.py2
-rw-r--r--Lib/xmlrpc/client.py132
-rw-r--r--Lib/xmlrpc/server.py34
-rw-r--r--Makefile.pre.in402
-rw-r--r--Misc/ACKS159
-rw-r--r--Misc/NEWS2174
-rw-r--r--Misc/README1
-rw-r--r--Misc/RPM/python-3.3.spec (renamed from Misc/RPM/python-3.2.spec)4
-rw-r--r--Misc/python.man40
-rw-r--r--Misc/svnmap.txt72546
-rw-r--r--Misc/valgrind-python.supp67
-rw-r--r--Modules/Setup.dist5
-rw-r--r--Modules/_bisectmodule.c8
-rw-r--r--Modules/_bz2module.c590
-rw-r--r--Modules/_codecsmodule.c255
-rw-r--r--Modules/_collectionsmodule.c26
-rw-r--r--Modules/_cryptmodule.c (renamed from Modules/cryptmodule.c)4
-rw-r--r--Modules/_csv.c149
-rw-r--r--Modules/_ctypes/_ctypes.c46
-rw-r--r--Modules/_ctypes/callproc.c18
-rw-r--r--Modules/_ctypes/cfield.c84
-rw-r--r--Modules/_ctypes/stgdict.c12
-rw-r--r--Modules/_cursesmodule.c701
-rw-r--r--Modules/_datetimemodule.c328
-rw-r--r--Modules/_dbmmodule.c12
-rw-r--r--Modules/_elementtree.c724
-rw-r--r--Modules/_functoolsmodule.c210
-rw-r--r--Modules/_gestalt.c2
-rw-r--r--Modules/_hashopenssl.c8
-rw-r--r--Modules/_io/_iomodule.c163
-rw-r--r--Modules/_io/_iomodule.h17
-rw-r--r--Modules/_io/bufferedio.c253
-rw-r--r--Modules/_io/bytesio.c2
-rw-r--r--Modules/_io/fileio.c139
-rw-r--r--Modules/_io/iobase.c51
-rw-r--r--Modules/_io/stringio.c184
-rw-r--r--Modules/_io/textio.c681
-rw-r--r--Modules/_json.c436
-rw-r--r--Modules/_localemodule.c98
-rw-r--r--Modules/_lsprof.c45
-rw-r--r--Modules/_lzmamodule.c1111
-rw-r--r--Modules/_multiprocessing/connection.h527
-rw-r--r--Modules/_multiprocessing/multiprocessing.c198
-rw-r--r--Modules/_multiprocessing/multiprocessing.h80
-rw-r--r--Modules/_multiprocessing/pipe_connection.c149
-rw-r--r--Modules/_multiprocessing/semaphore.c56
-rw-r--r--Modules/_multiprocessing/socket_connection.c202
-rw-r--r--Modules/_multiprocessing/win32_functions.c569
-rw-r--r--Modules/_pickle.c306
-rw-r--r--Modules/_posixsubprocess.c13
-rw-r--r--Modules/_sqlite/cache.c21
-rw-r--r--Modules/_sqlite/connection.c99
-rw-r--r--Modules/_sqlite/connection.h3
-rw-r--r--Modules/_sqlite/cursor.c31
-rw-r--r--Modules/_sqlite/microprotocols.c8
-rw-r--r--Modules/_sqlite/module.c19
-rw-r--r--Modules/_sqlite/module.h2
-rw-r--r--Modules/_sqlite/row.c10
-rw-r--r--Modules/_sqlite/statement.c12
-rw-r--r--Modules/_sqlite/statement.h4
-rw-r--r--Modules/_sre.c621
-rw-r--r--Modules/_ssl.c487
-rw-r--r--Modules/_struct.c90
-rw-r--r--Modules/_testbuffer.c2813
-rw-r--r--Modules/_testcapimodule.c267
-rw-r--r--Modules/_threadmodule.c22
-rw-r--r--Modules/_time.c28
-rw-r--r--Modules/_time.h3
-rw-r--r--Modules/_tkinter.c83
-rw-r--r--Modules/arraymodule.c222
-rw-r--r--Modules/audioop.c4
-rw-r--r--Modules/binascii.c58
-rw-r--r--Modules/bz2module.c2169
-rw-r--r--Modules/cjkcodecs/_codecs_cn.c14
-rw-r--r--Modules/cjkcodecs/_codecs_hk.c2
-rw-r--r--Modules/cjkcodecs/_codecs_iso2022.c2
-rw-r--r--Modules/cjkcodecs/_codecs_jp.c34
-rw-r--r--Modules/cjkcodecs/_codecs_kr.c18
-rw-r--r--Modules/cjkcodecs/_codecs_tw.c4
-rw-r--r--Modules/cjkcodecs/multibytecodec.c48
-rw-r--r--Modules/errnomodule.c55
-rw-r--r--Modules/faulthandler.c1332
-rw-r--r--Modules/fcntlmodule.c5
-rw-r--r--Modules/fpectlmodule.c11
-rw-r--r--Modules/gc_weakref.txt6
-rw-r--r--Modules/gcmodule.c11
-rw-r--r--Modules/getbuildinfo.c18
-rw-r--r--Modules/getpath.c16
-rw-r--r--Modules/itertoolsmodule.c30
-rw-r--r--Modules/main.c45
-rw-r--r--Modules/mathmodule.c87
-rw-r--r--Modules/md5module.c20
-rw-r--r--Modules/mmapmodule.c90
-rw-r--r--Modules/nismodule.c2
-rw-r--r--Modules/operator.c27
-rw-r--r--Modules/ossaudiodev.c101
-rw-r--r--Modules/parsermodule.c95
-rw-r--r--Modules/posixmodule.c4238
-rw-r--r--Modules/pyexpat.c43
-rw-r--r--Modules/readline.c13
-rw-r--r--Modules/resource.c16
-rw-r--r--Modules/selectmodule.c489
-rw-r--r--Modules/sha1module.c20
-rw-r--r--Modules/sha256module.c18
-rw-r--r--Modules/sha512module.c18
-rw-r--r--Modules/signalmodule.c439
-rw-r--r--Modules/socketmodule.c1470
-rw-r--r--Modules/socketmodule.h27
-rw-r--r--Modules/sre.h4
-rw-r--r--Modules/syslogmodule.c14
-rw-r--r--Modules/termios.c5
-rw-r--r--Modules/testcapi_long.h29
-rw-r--r--Modules/timemodule.c503
-rw-r--r--Modules/tkappinit.c6
-rw-r--r--Modules/unicodedata.c303
-rw-r--r--Modules/unicodedata_db.h7148
-rw-r--r--Modules/unicodename_db.h38401
-rw-r--r--Modules/xxlimited.c3
-rw-r--r--Modules/zipimport.c511
-rw-r--r--Modules/zlibmodule.c27
-rw-r--r--Objects/abstract.c157
-rw-r--r--Objects/bytearrayobject.c337
-rw-r--r--Objects/bytes_methods.c12
-rw-r--r--Objects/bytesobject.c262
-rw-r--r--Objects/classobject.c14
-rw-r--r--Objects/codeobject.c116
-rw-r--r--Objects/complexobject.c40
-rw-r--r--Objects/descrobject.c112
-rw-r--r--Objects/dictobject.c86
-rw-r--r--Objects/enumobject.c4
-rw-r--r--Objects/exceptions.c820
-rw-r--r--Objects/fileobject.c34
-rw-r--r--Objects/floatobject.c32
-rw-r--r--Objects/frameobject.c10
-rw-r--r--Objects/funcobject.c150
-rw-r--r--Objects/genobject.c201
-rw-r--r--Objects/listobject.c72
-rw-r--r--Objects/longobject.c144
-rw-r--r--Objects/memoryobject.c2837
-rw-r--r--Objects/methodobject.c47
-rw-r--r--Objects/moduleobject.c106
-rw-r--r--Objects/object.c523
-rw-r--r--Objects/obmalloc.c63
-rw-r--r--Objects/rangeobject.c145
-rw-r--r--Objects/setobject.c136
-rw-r--r--Objects/sliceobject.c71
-rw-r--r--Objects/stringlib/asciilib.h29
-rw-r--r--Objects/stringlib/codecs.h353
-rw-r--r--Objects/stringlib/count.h9
-rw-r--r--Objects/stringlib/eq.h23
-rw-r--r--Objects/stringlib/fastsearch.h76
-rw-r--r--Objects/stringlib/find.h89
-rw-r--r--Objects/stringlib/find_max_char.h136
-rw-r--r--Objects/stringlib/formatter.h1518
-rw-r--r--Objects/stringlib/localeutil.h98
-rw-r--r--Objects/stringlib/partition.h12
-rw-r--r--Objects/stringlib/split.h26
-rw-r--r--Objects/stringlib/stringdefs.h8
-rw-r--r--Objects/stringlib/ucs1lib.h30
-rw-r--r--Objects/stringlib/ucs2lib.h29
-rw-r--r--Objects/stringlib/ucs4lib.h29
-rw-r--r--Objects/stringlib/undef.h11
-rw-r--r--Objects/stringlib/unicode_format.h (renamed from Objects/stringlib/string_format.h)401
-rw-r--r--Objects/stringlib/unicodedefs.h8
-rw-r--r--Objects/tupleobject.c17
-rw-r--r--Objects/typeobject.c831
-rw-r--r--Objects/typeslots.inc2
-rw-r--r--Objects/typeslots.py2
-rw-r--r--Objects/unicodectype.c125
-rw-r--r--Objects/unicodeobject.c11178
-rw-r--r--Objects/unicodetype_db.h4718
-rw-r--r--Objects/weakrefobject.c61
-rw-r--r--PC/VC6/_multiprocessing.dsp8
-rw-r--r--PC/VC6/pythoncore.dsp4
-rw-r--r--PC/VC6/readme.txt4
-rw-r--r--PC/VS7.1/pythoncore.vcproj24
-rw-r--r--PC/VS7.1/readme.txt4
-rw-r--r--PC/VS8.0/_multiprocessing.vcproj12
-rw-r--r--PC/VS8.0/build_ssl.bat4
-rw-r--r--PC/VS8.0/kill_python.c2
-rw-r--r--PC/VS8.0/pyproject.vsprops2
-rw-r--r--PC/_msi.c18
-rw-r--r--PC/_subprocess.c85
-rw-r--r--PC/config.c2
-rw-r--r--PC/example_nt/example.vcproj4
-rw-r--r--PC/frozen_dllmain.c6
-rw-r--r--PC/getpathp.c6
-rw-r--r--PC/import_nt.c131
-rwxr-xr-xPC/msvcrtmodule.c12
-rw-r--r--PC/os2emx/Makefile2
-rw-r--r--PC/os2emx/README.os2emx2
-rw-r--r--PC/os2emx/pyconfig.h2
-rw-r--r--PC/os2emx/python33.def (renamed from PC/os2emx/python27.def)250
-rw-r--r--PC/pyconfig.h11
-rw-r--r--PC/python3.def1374
-rw-r--r--PC/python3.mak12
-rw-r--r--PC/python33gen.py (renamed from PC/python32gen.py)9
-rw-r--r--PC/python33stub.def (renamed from PC/python32stub.def)2
-rw-r--r--PC/python_nt.rc2
-rw-r--r--PC/winreg.c86
-rw-r--r--PC/winsound.c2
-rw-r--r--PCbuild/_bz2.vcproj (renamed from PCbuild/bz2.vcproj)4
-rw-r--r--PCbuild/_lzma.vcproj537
-rw-r--r--PCbuild/_multiprocessing.vcproj12
-rw-r--r--PCbuild/_testbuffer.vcproj521
-rw-r--r--PCbuild/build_ssl.bat4
-rw-r--r--PCbuild/kill_python.c2
-rw-r--r--PCbuild/pcbuild.sln56
-rw-r--r--PCbuild/pyproject.vsprops6
-rw-r--r--PCbuild/pythoncore.vcproj16
-rw-r--r--PCbuild/readme.txt17
-rw-r--r--Parser/Python.asdl203
-rw-r--r--Parser/asdl.py25
-rwxr-xr-xParser/asdl_c.py85
-rw-r--r--Parser/intrcheck.c174
-rw-r--r--Parser/parsetok.c67
-rw-r--r--Parser/parsetok_pgen.c2
-rw-r--r--Parser/pgenmain.c2
-rw-r--r--Parser/tokenizer.c99
-rw-r--r--Parser/tokenizer.h9
-rw-r--r--Python/Python-ast.c1251
-rw-r--r--Python/_warnings.c97
-rw-r--r--Python/ast.c745
-rw-r--r--Python/bltinmodule.c131
-rw-r--r--Python/ceval.c744
-rw-r--r--Python/codecs.c220
-rw-r--r--Python/compile.c338
-rw-r--r--Python/dtoa.c4
-rw-r--r--Python/dynload_aix.c6
-rw-r--r--Python/dynload_dl.c15
-rw-r--r--Python/dynload_hpux.c3
-rw-r--r--Python/dynload_next.c5
-rw-r--r--Python/dynload_os2.c2
-rw-r--r--Python/dynload_shlib.c8
-rw-r--r--Python/dynload_win.c55
-rw-r--r--Python/errors.c89
-rw-r--r--Python/fileutils.c88
-rw-r--r--Python/formatter_unicode.c1510
-rw-r--r--Python/getargs.c347
-rw-r--r--Python/getcopyright.c2
-rw-r--r--Python/graminit.c190
-rw-r--r--Python/import.c2603
-rw-r--r--Python/importdl.c84
-rw-r--r--Python/importdl.h2
-rwxr-xr-xPython/makeopcodetargets.py3
-rw-r--r--Python/marshal.c58
-rw-r--r--Python/modsupport.c16
-rw-r--r--Python/opcode_targets.h2
-rw-r--r--Python/peephole.c189
-rw-r--r--Python/pystrtod.c2
-rw-r--r--Python/pythonrun.c189
-rw-r--r--Python/pytime.c133
-rw-r--r--Python/random.c11
-rw-r--r--Python/symtable.c185
-rw-r--r--Python/sysmodule.c108
-rw-r--r--Python/thread.c126
-rw-r--r--Python/thread_cthread.h112
-rw-r--r--Python/thread_lwp.h113
-rw-r--r--Python/thread_pth.h3
-rw-r--r--Python/thread_pthread.h10
-rw-r--r--Python/thread_sgi.h259
-rw-r--r--Python/thread_solaris.h130
-rw-r--r--Python/thread_wince.h136
-rw-r--r--Python/traceback.c289
-rw-r--r--README30
-rw-r--r--Tools/buildbot/external-common.bat5
-rw-r--r--Tools/gdb/libpython.py95
-rw-r--r--Tools/iobench/iobench.py19
-rw-r--r--Tools/msi/msi.py160
-rw-r--r--Tools/msi/msilib.py18
-rw-r--r--Tools/msi/uuids.py98
-rwxr-xr-xTools/pybench/pybench.py1
-rw-r--r--Tools/scripts/README3
-rwxr-xr-xTools/scripts/findnocoding.py4
-rwxr-xr-xTools/scripts/patchcheck.py53
-rwxr-xr-xTools/scripts/pysetup34
-rwxr-xr-xTools/scripts/pysource.py2
-rwxr-xr-xTools/scripts/reindent.py18
-rwxr-xr-xTools/scripts/run_tests.py47
-rw-r--r--Tools/unicode/comparecodecs.py2
-rw-r--r--Tools/unicode/makeunicodedata.py380
-rw-r--r--Tools/unittestgui/unittestgui.py1
-rwxr-xr-xconfig.guess1530
-rwxr-xr-xconfig.sub1773
-rwxr-xr-xconfigure1045
-rw-r--r--configure.ac420
-rw-r--r--pyconfig.h.in217
-rw-r--r--setup.py78
1199 files changed, 234988 insertions, 57572 deletions
diff --git a/.gitignore b/.gitignore
index 7dfc5ad..8efcd2e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,6 +12,7 @@ Doc/tools/jinja2/
Doc/tools/pygments/
Doc/tools/sphinx/
Lib/lib2to3/*.pickle
+Lib/_sysconfigdata.py
Makefile
Makefile.pre
Misc/python.pc
diff --git a/.hgeol b/.hgeol
index ed13171..afb1e6b 100644
--- a/.hgeol
+++ b/.hgeol
@@ -28,8 +28,9 @@
Lib/email/test/data/msg_26.txt = BIN
Lib/test/cjkencodings/* = BIN
-Lib/test/sndhdrdata/sndhdr.* = BIN
Lib/test/decimaltestdata/*.decTest = BIN
+Lib/test/sndhdrdata/sndhdr.* = BIN
+Lib/test/test_email/data/msg_26.txt = BIN
# All other files (which presumably are human-editable) are "native".
# This must be the last rule!
diff --git a/.hgignore b/.hgignore
index 24df3b9..0fd8562 100644
--- a/.hgignore
+++ b/.hgignore
@@ -5,7 +5,8 @@ Makefile$
Makefile.pre$
TAGS$
autom4te.cache$
-build/
+^build/
+^Doc/build/
buildno$
config.cache
config.log
@@ -32,6 +33,7 @@ Modules/config.c
Modules/ld_so_aix$
Parser/pgen$
Parser/pgen.stamp$
+PCbuild/amd64/
^core
^python-gdb.py
^python.exe-gdb.py
@@ -47,6 +49,7 @@ libpython*.so*
*.pyd
*.cover
*~
+Lib/_sysconfigdata.py
Lib/lib2to3/*.pickle
Lib/test/data/*
Misc/*.wpu
@@ -62,7 +65,10 @@ PCbuild/*.exp
PCbuild/*.o
PCbuild/*.ncb
PCbuild/*.bsc
+PCbuild/*.user
+PCbuild/*.suo
PCbuild/Win32-temp-*
+PCbuild/x64-temp-*
__pycache__
Modules/_testembed
.coverage
diff --git a/.hgtags b/.hgtags
index 0830aaf..0414826 100644
--- a/.hgtags
+++ b/.hgtags
@@ -95,3 +95,4 @@ ac1f7e5c05104d557d5acd922e95625ba5d1fe10 v3.2.1
c860feaa348d663e598986894ee4680480577e15 v3.2.2rc1
137e45f15c0bd262c9ad4c032d97425bc0589456 v3.2.2
7085403daf439adb3f9e70ef13f6bedb1c447376 v3.2.3rc1
+f1a9a6505731714f0e157453ff850e3b71615c45 v3.3.0a1
diff --git a/Doc/ACKS.txt b/Doc/ACKS.txt
index 70063c0..b64c650 100644
--- a/Doc/ACKS.txt
+++ b/Doc/ACKS.txt
@@ -62,6 +62,7 @@ docs@python.org), and we'll be glad to correct the problem.
* Stefan Franke
* Jim Fulton
* Peter Funk
+ * Ethan Furman
* Lele Gaifax
* Matthew Gallagher
* Gabriel Genellina
@@ -210,6 +211,7 @@ docs@python.org), and we'll be glad to correct the problem.
* David Turner
* Sandro Tosi
* Ville Vainio
+ * Nadeem Vawda
* Martijn Vries
* Charles G. Waldman
* Greg Ward
diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst
index d4dda7c..196aa77 100644
--- a/Doc/c-api/arg.rst
+++ b/Doc/c-api/arg.rst
@@ -146,7 +146,7 @@ Unless otherwise stated, buffers are not NUL-terminated.
Like ``u#``, but the Python object may also be ``None``, in which case the
:c:type:`Py_UNICODE` pointer is set to *NULL*.
-``U`` (:class:`str`) [PyUnicodeObject \*]
+``U`` (:class:`str`) [PyObject \*]
Requires that the Python object is a Unicode object, without attempting
any conversion. Raises :exc:`TypeError` if the object is not a Unicode
object. The C variable may also be declared as :c:type:`PyObject\*`.
@@ -260,9 +260,11 @@ Numbers
``n`` (:class:`int`) [Py_ssize_t]
Convert a Python integer to a C :c:type:`Py_ssize_t`.
-``c`` (:class:`bytes` of length 1) [char]
- Convert a Python byte, represented as a :class:`bytes` object of length 1,
- to a C :c:type:`char`.
+``c`` (:class:`bytes` or :class:`bytearray` of length 1) [char]
+ Convert a Python byte, represented as a :class:`bytes` or
+ :class:`bytearray` object of length 1, to a C :c:type:`char`.
+
+ .. versionchanged:: 3.3 Allow :class:`bytearray` objects
``C`` (:class:`str` of length 1) [int]
Convert a Python character, represented as a :class:`str` object of
diff --git a/Doc/c-api/buffer.rst b/Doc/c-api/buffer.rst
index d98ece3..d636935 100644
--- a/Doc/c-api/buffer.rst
+++ b/Doc/c-api/buffer.rst
@@ -7,6 +7,7 @@ Buffer Protocol
.. sectionauthor:: Greg Stein <gstein@lyra.org>
.. sectionauthor:: Benjamin Peterson
+.. sectionauthor:: Stefan Krah
.. index::
@@ -20,7 +21,7 @@ as image processing or numeric analysis.
While each of these types have their own semantics, they share the common
characteristic of being backed by a possibly large memory buffer. It is
-then desireable, in some situations, to access that buffer directly and
+then desirable, in some situations, to access that buffer directly and
without intermediate copying.
Python provides such a facility at the C level in the form of the *buffer
@@ -60,8 +61,10 @@ isn't needed anymore. Failure to do so could lead to various issues such as
resource leaks.
-The buffer structure
-====================
+.. _buffer-structure:
+
+Buffer structure
+================
Buffer structures (or simply "buffers") are useful as a way to expose the
binary data from another object to the Python programmer. They can also be
@@ -78,249 +81,411 @@ allows them to be created and copied very simply. When a generic wrapper
around a buffer is needed, a :ref:`memoryview <memoryview-objects>` object
can be created.
+For short instructions how to write an exporting object, see
+:ref:`Buffer Object Structures <buffer-structs>`. For obtaining
+a buffer, see :c:func:`PyObject_GetBuffer`.
.. c:type:: Py_buffer
- .. c:member:: void *buf
+ .. c:member:: void \*obj
+
+ A new reference to the exporting object. The reference is owned by
+ the consumer and automatically decremented and set to *NULL* by
+ :c:func:`PyBuffer_Release`. The field is the equivalent of the return
+ value of any standard C-API function.
+
+ As a special case, for *temporary* buffers that are wrapped by
+ :c:func:`PyMemoryView_FromBuffer` or :c:func:`PyBuffer_FillInfo`
+ this field is *NULL*. In general, exporting objects MUST NOT
+ use this scheme.
- A pointer to the start of the memory for the object.
+ .. c:member:: void \*buf
+
+ A pointer to the start of the logical structure described by the buffer
+ fields. This can be any location within the underlying physical memory
+ block of the exporter. For example, with negative :c:member:`~Py_buffer.strides`
+ the value may point to the end of the memory block.
+
+ For contiguous arrays, the value points to the beginning of the memory
+ block.
.. c:member:: Py_ssize_t len
- :noindex:
- The total length of the memory in bytes.
+ ``product(shape) * itemsize``. For contiguous arrays, this is the length
+ of the underlying memory block. For non-contiguous arrays, it is the length
+ that the logical structure would have if it were copied to a contiguous
+ representation.
+
+ Accessing ``((char *)buf)[0] up to ((char *)buf)[len-1]`` is only valid
+ if the buffer has been obtained by a request that guarantees contiguity. In
+ most cases such a request will be :c:macro:`PyBUF_SIMPLE` or :c:macro:`PyBUF_WRITABLE`.
.. c:member:: int readonly
- An indicator of whether the buffer is read only.
+ An indicator of whether the buffer is read-only. This field is controlled
+ by the :c:macro:`PyBUF_WRITABLE` flag.
+
+ .. c:member:: Py_ssize_t itemsize
+
+ Item size in bytes of a single element. Same as the value of :func:`struct.calcsize`
+ called on non-NULL :c:member:`~Py_buffer.format` values.
+
+ Important exception: If a consumer requests a buffer without the
+ :c:macro:`PyBUF_FORMAT` flag, :c:member:`~Py_Buffer.format` will
+ be set to *NULL*, but :c:member:`~Py_buffer.itemsize` still has
+ the value for the original format.
+
+ If :c:member:`~Py_Buffer.shape` is present, the equality
+ ``product(shape) * itemsize == len`` still holds and the consumer
+ can use :c:member:`~Py_buffer.itemsize` to navigate the buffer.
+
+ If :c:member:`~Py_Buffer.shape` is *NULL* as a result of a :c:macro:`PyBUF_SIMPLE`
+ or a :c:macro:`PyBUF_WRITABLE` request, the consumer must disregard
+ :c:member:`~Py_buffer.itemsize` and assume ``itemsize == 1``.
- .. c:member:: const char *format
- :noindex:
+ .. c:member:: const char \*format
- A *NULL* terminated string in :mod:`struct` module style syntax giving
- the contents of the elements available through the buffer. If this is
- *NULL*, ``"B"`` (unsigned bytes) is assumed.
+ A *NUL* terminated string in :mod:`struct` module style syntax describing
+ the contents of a single item. If this is *NULL*, ``"B"`` (unsigned bytes)
+ is assumed.
+
+ This field is controlled by the :c:macro:`PyBUF_FORMAT` flag.
.. c:member:: int ndim
- The number of dimensions the memory represents as a multi-dimensional
- array. If it is 0, :c:data:`strides` and :c:data:`suboffsets` must be
- *NULL*.
-
- .. c:member:: Py_ssize_t *shape
-
- An array of :c:type:`Py_ssize_t`\s the length of :c:data:`ndim` giving the
- shape of the memory as a multi-dimensional array. Note that
- ``((*shape)[0] * ... * (*shape)[ndims-1])*itemsize`` should be equal to
- :c:data:`len`.
-
- .. c:member:: Py_ssize_t *strides
-
- An array of :c:type:`Py_ssize_t`\s the length of :c:data:`ndim` giving the
- number of bytes to skip to get to a new element in each dimension.
-
- .. c:member:: Py_ssize_t *suboffsets
-
- An array of :c:type:`Py_ssize_t`\s the length of :c:data:`ndim`. If these
- suboffset numbers are greater than or equal to 0, then the value stored
- along the indicated dimension is a pointer and the suboffset value
- dictates how many bytes to add to the pointer after de-referencing. A
- suboffset value that it negative indicates that no de-referencing should
- occur (striding in a contiguous memory block).
-
- Here is a function that returns a pointer to the element in an N-D array
- pointed to by an N-dimensional index when there are both non-NULL strides
- and suboffsets::
-
- void *get_item_pointer(int ndim, void *buf, Py_ssize_t *strides,
- Py_ssize_t *suboffsets, Py_ssize_t *indices) {
- char *pointer = (char*)buf;
- int i;
- for (i = 0; i < ndim; i++) {
- pointer += strides[i] * indices[i];
- if (suboffsets[i] >=0 ) {
- pointer = *((char**)pointer) + suboffsets[i];
- }
- }
- return (void*)pointer;
- }
+ The number of dimensions the memory represents as an n-dimensional array.
+ If it is 0, :c:member:`~Py_Buffer.buf` points to a single item representing
+ a scalar. In this case, :c:member:`~Py_buffer.shape`, :c:member:`~Py_buffer.strides`
+ and :c:member:`~Py_buffer.suboffsets` MUST be *NULL*.
+ The macro :c:macro:`PyBUF_MAX_NDIM` limits the maximum number of dimensions
+ to 64. Exporters MUST respect this limit, consumers of multi-dimensional
+ buffers SHOULD be able to handle up to :c:macro:`PyBUF_MAX_NDIM` dimensions.
- .. c:member:: Py_ssize_t itemsize
+ .. c:member:: Py_ssize_t \*shape
+
+ An array of :c:type:`Py_ssize_t` of length :c:member:`~Py_buffer.ndim`
+ indicating the shape of the memory as an n-dimensional array. Note that
+ ``shape[0] * ... * shape[ndim-1] * itemsize`` MUST be equal to
+ :c:member:`~Py_buffer.len`.
+
+ Shape values are restricted to ``shape[n] >= 0``. The case
+ ``shape[n] == 0`` requires special attention. See `complex arrays`_
+ for further information.
+
+ The shape array is read-only for the consumer.
+
+ .. c:member:: Py_ssize_t \*strides
+
+ An array of :c:type:`Py_ssize_t` of length :c:member:`~Py_buffer.ndim`
+ giving the number of bytes to skip to get to a new element in each
+ dimension.
+
+ Stride values can be any integer. For regular arrays, strides are
+ usually positive, but a consumer MUST be able to handle the case
+ ``strides[n] <= 0``. See `complex arrays`_ for further information.
+
+ The strides array is read-only for the consumer.
+
+ .. c:member:: Py_ssize_t \*suboffsets
+
+ An array of :c:type:`Py_ssize_t` of length :c:member:`~Py_buffer.ndim`.
+ If ``suboffsets[n] >= 0``, the values stored along the nth dimension are
+ pointers and the suboffset value dictates how many bytes to add to each
+ pointer after de-referencing. A suboffset value that is negative
+ indicates that no de-referencing should occur (striding in a contiguous
+ memory block).
- This is a storage for the itemsize (in bytes) of each element of the
- shared memory. It is technically un-necessary as it can be obtained
- using :c:func:`PyBuffer_SizeFromFormat`, however an exporter may know
- this information without parsing the format string and it is necessary
- to know the itemsize for proper interpretation of striding. Therefore,
- storing it is more convenient and faster.
+ This type of array representation is used by the Python Imaging Library
+ (PIL). See `complex arrays`_ for further information how to access elements
+ of such an array.
- .. c:member:: void *internal
+ The suboffsets array is read-only for the consumer.
+
+ .. c:member:: void \*internal
This is for use internally by the exporting object. For example, this
might be re-cast as an integer by the exporter and used to store flags
about whether or not the shape, strides, and suboffsets arrays must be
- freed when the buffer is released. The consumer should never alter this
+ freed when the buffer is released. The consumer MUST NOT alter this
value.
+.. _buffer-request-types:
-Buffer-related functions
-========================
+Buffer request types
+====================
+Buffers are usually obtained by sending a buffer request to an exporting
+object via :c:func:`PyObject_GetBuffer`. Since the complexity of the logical
+structure of the memory can vary drastically, the consumer uses the *flags*
+argument to specify the exact buffer type it can handle.
-.. c:function:: int PyObject_CheckBuffer(PyObject *obj)
+All :c:data:`Py_buffer` fields are unambiguously defined by the request
+type.
+
+request-independent fields
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+The following fields are not influenced by *flags* and must always be filled in
+with the correct values: :c:member:`~Py_buffer.obj`, :c:member:`~Py_buffer.buf`,
+:c:member:`~Py_buffer.len`, :c:member:`~Py_buffer.itemsize`, :c:member:`~Py_buffer.ndim`.
- Return 1 if *obj* supports the buffer interface otherwise 0. When 1 is
- returned, it doesn't guarantee that :c:func:`PyObject_GetBuffer` will
- succeed.
+readonly, format
+~~~~~~~~~~~~~~~~
-.. c:function:: int PyObject_GetBuffer(PyObject *obj, Py_buffer *view, int flags)
+ .. c:macro:: PyBUF_WRITABLE
- Export a view over some internal data from the target object *obj*.
- *obj* must not be NULL, and *view* must point to an existing
- :c:type:`Py_buffer` structure allocated by the caller (most uses of
- this function will simply declare a local variable of type
- :c:type:`Py_buffer`). The *flags* argument is a bit field indicating
- what kind of buffer is requested. The buffer interface allows
- for complicated memory layout possibilities; however, some callers
- won't want to handle all the complexity and instead request a simple
- view of the target object (using :c:macro:`PyBUF_SIMPLE` for a read-only
- view and :c:macro:`PyBUF_WRITABLE` for a read-write view).
+ Controls the :c:member:`~Py_buffer.readonly` field. If set, the exporter
+ MUST provide a writable buffer or else report failure. Otherwise, the
+ exporter MAY provide either a read-only or writable buffer, but the choice
+ MUST be consistent for all consumers.
- Some exporters may not be able to share memory in every possible way and
- may need to raise errors to signal to some consumers that something is
- just not possible. These errors should be a :exc:`BufferError` unless
- there is another error that is actually causing the problem. The
- exporter can use flags information to simplify how much of the
- :c:data:`Py_buffer` structure is filled in with non-default values and/or
- raise an error if the object can't support a simpler view of its memory.
+ .. c:macro:: PyBUF_FORMAT
- On success, 0 is returned and the *view* structure is filled with useful
- values. On error, -1 is returned and an exception is raised; the *view*
- is left in an undefined state.
+ Controls the :c:member:`~Py_buffer.format` field. If set, this field MUST
+ be filled in correctly. Otherwise, this field MUST be *NULL*.
- The following are the possible values to the *flags* arguments.
- .. c:macro:: PyBUF_SIMPLE
+:c:macro:`PyBUF_WRITABLE` can be \|'d to any of the flags in the next section.
+Since :c:macro:`PyBUF_SIMPLE` is defined as 0, :c:macro:`PyBUF_WRITABLE`
+can be used as a stand-alone flag to request a simple writable buffer.
- This is the default flag. The returned buffer exposes a read-only
- memory area. The format of data is assumed to be raw unsigned bytes,
- without any particular structure. This is a "stand-alone" flag
- constant. It never needs to be '|'d to the others. The exporter will
- raise an error if it cannot provide such a contiguous buffer of bytes.
+:c:macro:`PyBUF_FORMAT` can be \|'d to any of the flags except :c:macro:`PyBUF_SIMPLE`.
+The latter already implies format ``B`` (unsigned bytes).
- .. c:macro:: PyBUF_WRITABLE
- Like :c:macro:`PyBUF_SIMPLE`, but the returned buffer is writable. If
- the exporter doesn't support writable buffers, an error is raised.
+shape, strides, suboffsets
+~~~~~~~~~~~~~~~~~~~~~~~~~~
- .. c:macro:: PyBUF_STRIDES
+The flags that control the logical structure of the memory are listed
+in decreasing order of complexity. Note that each flag contains all bits
+of the flags below it.
- This implies :c:macro:`PyBUF_ND`. The returned buffer must provide
- strides information (i.e. the strides cannot be NULL). This would be
- used when the consumer can handle strided, discontiguous arrays.
- Handling strides automatically assumes you can handle shape. The
- exporter can raise an error if a strided representation of the data is
- not possible (i.e. without the suboffsets).
- .. c:macro:: PyBUF_ND
++-----------------------------+-------+---------+------------+
+| Request | shape | strides | suboffsets |
++=============================+=======+=========+============+
+| .. c:macro:: PyBUF_INDIRECT | yes | yes | if needed |
++-----------------------------+-------+---------+------------+
+| .. c:macro:: PyBUF_STRIDES | yes | yes | NULL |
++-----------------------------+-------+---------+------------+
+| .. c:macro:: PyBUF_ND | yes | NULL | NULL |
++-----------------------------+-------+---------+------------+
+| .. c:macro:: PyBUF_SIMPLE | NULL | NULL | NULL |
++-----------------------------+-------+---------+------------+
- The returned buffer must provide shape information. The memory will be
- assumed C-style contiguous (last dimension varies the fastest). The
- exporter may raise an error if it cannot provide this kind of
- contiguous buffer. If this is not given then shape will be *NULL*.
- .. c:macro:: PyBUF_C_CONTIGUOUS
- PyBUF_F_CONTIGUOUS
- PyBUF_ANY_CONTIGUOUS
+contiguity requests
+~~~~~~~~~~~~~~~~~~~
- These flags indicate that the contiguity returned buffer must be
- respectively, C-contiguous (last dimension varies the fastest), Fortran
- contiguous (first dimension varies the fastest) or either one. All of
- these flags imply :c:macro:`PyBUF_STRIDES` and guarantee that the
- strides buffer info structure will be filled in correctly.
+C or Fortran contiguity can be explicitly requested, with and without stride
+information. Without stride information, the buffer must be C-contiguous.
- .. c:macro:: PyBUF_INDIRECT
++-----------------------------------+-------+---------+------------+--------+
+| Request | shape | strides | suboffsets | contig |
++===================================+=======+=========+============+========+
+| .. c:macro:: PyBUF_C_CONTIGUOUS | yes | yes | NULL | C |
++-----------------------------------+-------+---------+------------+--------+
+| .. c:macro:: PyBUF_F_CONTIGUOUS | yes | yes | NULL | F |
++-----------------------------------+-------+---------+------------+--------+
+| .. c:macro:: PyBUF_ANY_CONTIGUOUS | yes | yes | NULL | C or F |
++-----------------------------------+-------+---------+------------+--------+
+| .. c:macro:: PyBUF_ND | yes | NULL | NULL | C |
++-----------------------------------+-------+---------+------------+--------+
- This flag indicates the returned buffer must have suboffsets
- information (which can be NULL if no suboffsets are needed). This can
- be used when the consumer can handle indirect array referencing implied
- by these suboffsets. This implies :c:macro:`PyBUF_STRIDES`.
- .. c:macro:: PyBUF_FORMAT
+compound requests
+~~~~~~~~~~~~~~~~~
- The returned buffer must have true format information if this flag is
- provided. This would be used when the consumer is going to be checking
- for what 'kind' of data is actually stored. An exporter should always
- be able to provide this information if requested. If format is not
- explicitly requested then the format must be returned as *NULL* (which
- means ``'B'``, or unsigned bytes).
+All possible requests are fully defined by some combination of the flags in
+the previous section. For convenience, the buffer protocol provides frequently
+used combinations as single flags.
- .. c:macro:: PyBUF_STRIDED
+In the following table *U* stands for undefined contiguity. The consumer would
+have to call :c:func:`PyBuffer_IsContiguous` to determine contiguity.
- This is equivalent to ``(PyBUF_STRIDES | PyBUF_WRITABLE)``.
- .. c:macro:: PyBUF_STRIDED_RO
- This is equivalent to ``(PyBUF_STRIDES)``.
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| Request | shape | strides | suboffsets | contig | readonly | format |
++===============================+=======+=========+============+========+==========+========+
+| .. c:macro:: PyBUF_FULL | yes | yes | if needed | U | 0 | yes |
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| .. c:macro:: PyBUF_FULL_RO | yes | yes | if needed | U | 1 or 0 | yes |
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| .. c:macro:: PyBUF_RECORDS | yes | yes | NULL | U | 0 | yes |
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| .. c:macro:: PyBUF_RECORDS_RO | yes | yes | NULL | U | 1 or 0 | yes |
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| .. c:macro:: PyBUF_STRIDED | yes | yes | NULL | U | 0 | NULL |
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| .. c:macro:: PyBUF_STRIDED_RO | yes | yes | NULL | U | 1 or 0 | NULL |
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| .. c:macro:: PyBUF_CONTIG | yes | NULL | NULL | C | 0 | NULL |
++-------------------------------+-------+---------+------------+--------+----------+--------+
+| .. c:macro:: PyBUF_CONTIG_RO | yes | NULL | NULL | C | 1 or 0 | NULL |
++-------------------------------+-------+---------+------------+--------+----------+--------+
- .. c:macro:: PyBUF_RECORDS
- This is equivalent to ``(PyBUF_STRIDES | PyBUF_FORMAT |
- PyBUF_WRITABLE)``.
+Complex arrays
+==============
- .. c:macro:: PyBUF_RECORDS_RO
+NumPy-style: shape and strides
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The logical structure of NumPy-style arrays is defined by :c:member:`~Py_buffer.itemsize`,
+:c:member:`~Py_buffer.ndim`, :c:member:`~Py_buffer.shape` and :c:member:`~Py_buffer.strides`.
+
+If ``ndim == 0``, the memory location pointed to by :c:member:`~Py_buffer.buf` is
+interpreted as a scalar of size :c:member:`~Py_buffer.itemsize`. In that case,
+both :c:member:`~Py_buffer.shape` and :c:member:`~Py_buffer.strides` are *NULL*.
+
+If :c:member:`~Py_buffer.strides` is *NULL*, the array is interpreted as
+a standard n-dimensional C-array. Otherwise, the consumer must access an
+n-dimensional array as follows:
+
+ ``ptr = (char *)buf + indices[0] * strides[0] + ... + indices[n-1] * strides[n-1]``
+ ``item = *((typeof(item) *)ptr);``
+
+
+As noted above, :c:member:`~Py_buffer.buf` can point to any location within
+the actual memory block. An exporter can check the validity of a buffer with
+this function:
+
+.. code-block:: python
+
+ def verify_structure(memlen, itemsize, ndim, shape, strides, offset):
+ """Verify that the parameters represent a valid array within
+ the bounds of the allocated memory:
+ char *mem: start of the physical memory block
+ memlen: length of the physical memory block
+ offset: (char *)buf - mem
+ """
+ if offset % itemsize:
+ return False
+ if offset < 0 or offset+itemsize > memlen:
+ return False
+ if any(v % itemsize for v in strides):
+ return False
+
+ if ndim <= 0:
+ return ndim == 0 and not shape and not strides
+ if 0 in shape:
+ return True
+
+ imin = sum(strides[j]*(shape[j]-1) for j in range(ndim)
+ if strides[j] <= 0)
+ imax = sum(strides[j]*(shape[j]-1) for j in range(ndim)
+ if strides[j] > 0)
+
+ return 0 <= offset+imin and offset+imax+itemsize <= memlen
+
+
+PIL-style: shape, strides and suboffsets
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In addition to the regular items, PIL-style arrays can contain pointers
+that must be followed in order to get to the next element in a dimension.
+For example, the regular three-dimensional C-array ``char v[2][2][3]`` can
+also be viewed as an array of 2 pointers to 2 two-dimensional arrays:
+``char (*v[2])[2][3]``. In suboffsets representation, those two pointers
+can be embedded at the start of :c:member:`~Py_buffer.buf`, pointing
+to two ``char x[2][3]`` arrays that can be located anywhere in memory.
+
+
+Here is a function that returns a pointer to the element in an N-D array
+pointed to by an N-dimensional index when there are both non-NULL strides
+and suboffsets::
+
+ void *get_item_pointer(int ndim, void *buf, Py_ssize_t *strides,
+ Py_ssize_t *suboffsets, Py_ssize_t *indices) {
+ char *pointer = (char*)buf;
+ int i;
+ for (i = 0; i < ndim; i++) {
+ pointer += strides[i] * indices[i];
+ if (suboffsets[i] >=0 ) {
+ pointer = *((char**)pointer) + suboffsets[i];
+ }
+ }
+ return (void*)pointer;
+ }
- This is equivalent to ``(PyBUF_STRIDES | PyBUF_FORMAT)``.
- .. c:macro:: PyBUF_FULL
+Buffer-related functions
+========================
- This is equivalent to ``(PyBUF_INDIRECT | PyBUF_FORMAT |
- PyBUF_WRITABLE)``.
+.. c:function:: int PyObject_CheckBuffer(PyObject *obj)
- .. c:macro:: PyBUF_FULL_RO
+ Return 1 if *obj* supports the buffer interface otherwise 0. When 1 is
+ returned, it doesn't guarantee that :c:func:`PyObject_GetBuffer` will
+ succeed.
- This is equivalent to ``(PyBUF_INDIRECT | PyBUF_FORMAT)``.
- .. c:macro:: PyBUF_CONTIG
+.. c:function:: int PyObject_GetBuffer(PyObject *exporter, Py_buffer *view, int flags)
- This is equivalent to ``(PyBUF_ND | PyBUF_WRITABLE)``.
+ Send a request to *exporter* to fill in *view* as specified by *flags*.
+ If the exporter cannot provide a buffer of the exact type, it MUST raise
+ :c:data:`PyExc_BufferError`, set :c:member:`view->obj` to *NULL* and
+ return -1.
- .. c:macro:: PyBUF_CONTIG_RO
+ On success, fill in *view*, set :c:member:`view->obj` to a new reference
+ to *exporter* and return 0. In the case of chained buffer providers
+ that redirect requests to a single object, :c:member:`view->obj` MAY
+ refer to this object instead of *exporter* (See :ref:`Buffer Object Structures <buffer-structs>`).
- This is equivalent to ``(PyBUF_ND)``.
+ Successful calls to :c:func:`PyObject_GetBuffer` must be paired with calls
+ to :c:func:`PyBuffer_Release`, similar to :c:func:`malloc` and :c:func:`free`.
+ Thus, after the consumer is done with the buffer, :c:func:`PyBuffer_Release`
+ must be called exactly once.
.. c:function:: void PyBuffer_Release(Py_buffer *view)
- Release the buffer *view*. This should be called when the buffer is no
- longer being used as it may free memory from it.
+ Release the buffer *view* and decrement the reference count for
+ :c:member:`view->obj`. This function MUST be called when the buffer
+ is no longer being used, otherwise reference leaks may occur.
+
+ It is an error to call this function on a buffer that was not obtained via
+ :c:func:`PyObject_GetBuffer`.
.. c:function:: Py_ssize_t PyBuffer_SizeFromFormat(const char *)
- Return the implied :c:data:`~Py_buffer.itemsize` from the struct-stype
- :c:data:`~Py_buffer.format`.
+ Return the implied :c:data:`~Py_buffer.itemsize` from :c:data:`~Py_buffer.format`.
+ This function is not yet implemented.
-.. c:function:: int PyBuffer_IsContiguous(Py_buffer *view, char fortran)
+.. c:function:: int PyBuffer_IsContiguous(Py_buffer *view, char order)
- Return 1 if the memory defined by the *view* is C-style (*fortran* is
- ``'C'``) or Fortran-style (*fortran* is ``'F'``) contiguous or either one
- (*fortran* is ``'A'``). Return 0 otherwise.
+ Return 1 if the memory defined by the *view* is C-style (*order* is
+ ``'C'``) or Fortran-style (*order* is ``'F'``) contiguous or either one
+ (*order* is ``'A'``). Return 0 otherwise.
-.. c:function:: void PyBuffer_FillContiguousStrides(int ndim, Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t itemsize, char fortran)
+.. c:function:: void PyBuffer_FillContiguousStrides(int ndim, Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t itemsize, char order)
Fill the *strides* array with byte-strides of a contiguous (C-style if
- *fortran* is ``'C'`` or Fortran-style if *fortran* is ``'F'``) array of the
+ *order* is ``'C'`` or Fortran-style if *order* is ``'F'``) array of the
given shape with the given number of bytes per element.
-.. c:function:: int PyBuffer_FillInfo(Py_buffer *view, PyObject *obj, void *buf, Py_ssize_t len, int readonly, int infoflags)
+.. c:function:: int PyBuffer_FillInfo(Py_buffer *view, PyObject *exporter, void *buf, Py_ssize_t len, int readonly, int flags)
+
+ Handle buffer requests for an exporter that wants to expose *buf* of size *len*
+ with writability set according to *readonly*. *buf* is interpreted as a sequence
+ of unsigned bytes.
+
+ The *flags* argument indicates the request type. This function always fills in
+ *view* as specified by flags, unless *buf* has been designated as read-only
+ and :c:macro:`PyBUF_WRITABLE` is set in *flags*.
+
+ On success, set :c:member:`view->obj` to a new reference to *exporter* and
+ return 0. Otherwise, raise :c:data:`PyExc_BufferError`, set
+ :c:member:`view->obj` to *NULL* and return -1;
+
+ If this function is used as part of a :ref:`getbufferproc <buffer-structs>`,
+ *exporter* MUST be set to the exporting object. Otherwise, *exporter* MUST
+ be NULL.
+
- Fill in a buffer-info structure, *view*, correctly for an exporter that can
- only share a contiguous chunk of memory of "unsigned bytes" of the given
- length. Return 0 on success and -1 (with raising an error) on error.
diff --git a/Doc/c-api/datetime.rst b/Doc/c-api/datetime.rst
index fcd1395..39542bd 100644
--- a/Doc/c-api/datetime.rst
+++ b/Doc/c-api/datetime.rst
@@ -170,6 +170,31 @@ and the type is not checked:
Return the microsecond, as an int from 0 through 999999.
+Macros to extract fields from time delta objects. The argument must be an
+instance of :c:data:`PyDateTime_Delta`, including subclasses. The argument must
+not be *NULL*, and the type is not checked:
+
+.. c:function:: int PyDateTime_DELTA_GET_DAYS(PyDateTime_Delta *o)
+
+ Return the number of days, as an int from -999999999 to 999999999.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: int PyDateTime_DELTA_GET_SECONDS(PyDateTime_Delta *o)
+
+ Return the number of seconds, as an int from 0 through 86399.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: int PyDateTime_DELTA_GET_MICROSECOND(PyDateTime_Delta *o)
+
+ Return the number of microseconds, as an int from 0 through 999999.
+
+ .. versionadded:: 3.3
+
+
Macros for the convenience of modules implementing the DB API:
.. c:function:: PyObject* PyDateTime_FromTimestamp(PyObject *args)
diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst
index 6df84e0..ac714a6 100644
--- a/Doc/c-api/dict.rst
+++ b/Doc/c-api/dict.rst
@@ -209,3 +209,10 @@ Dictionary Objects
for key, value in seq2:
if override or key not in a:
a[key] = value
+
+
+.. c:function:: int PyDict_ClearFreeList()
+
+ Clear the free list. Return the total number of freed items.
+
+ .. versionadded:: 3.3
diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst
index 6f13c80..fd7aee7 100644
--- a/Doc/c-api/exceptions.rst
+++ b/Doc/c-api/exceptions.rst
@@ -421,17 +421,24 @@ Exception Objects
.. c:function:: PyObject* PyException_GetCause(PyObject *ex)
- Return the cause (another exception instance set by ``raise ... from ...``)
- associated with the exception as a new reference, as accessible from Python
- through :attr:`__cause__`. If there is no cause associated, this returns
- *NULL*.
+ Return the cause (either an exception instance, or :const:`None`,
+ set by ``raise ... from ...``) associated with the exception as a new
+ reference, as accessible from Python through :attr:`__cause__`.
+
+ If there is no cause associated, this returns *NULL* (from Python
+ ``__cause__ is Ellipsis``). If the cause is :const:`None`, the default
+ exception display routines stop showing the context chain.
.. c:function:: void PyException_SetCause(PyObject *ex, PyObject *ctx)
Set the cause associated with the exception to *ctx*. Use *NULL* to clear
- it. There is no type check to make sure that *ctx* is an exception instance.
- This steals a reference to *ctx*.
+ it. There is no type check to make sure that *ctx* is either an exception
+ instance or :const:`None`. This steals a reference to *ctx*.
+
+ If the cause is set to :const:`None` the default exception display
+ routines will not display this exception's context, and will not follow the
+ chain any further.
.. _unicodeexceptions:
@@ -525,7 +532,7 @@ recursion depth automatically).
Marks a point where a recursive C-level call is about to be performed.
- If :const:`USE_STACKCHECK` is defined, this function checks if the the OS
+ If :const:`USE_STACKCHECK` is defined, this function checks if the OS
stack overflowed using :c:func:`PyOS_CheckStack`. In this is the case, it
sets a :exc:`MemoryError` and returns a nonzero value.
@@ -582,65 +589,116 @@ All standard Python exceptions are available as global variables whose names are
:c:type:`PyObject\*`; they are all class objects. For completeness, here are all
the variables:
-+-------------------------------------+----------------------------+----------+
-| C Name | Python Name | Notes |
-+=====================================+============================+==========+
-| :c:data:`PyExc_BaseException` | :exc:`BaseException` | \(1) |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_Exception` | :exc:`Exception` | \(1) |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_ArithmeticError` | :exc:`ArithmeticError` | \(1) |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_LookupError` | :exc:`LookupError` | \(1) |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_AssertionError` | :exc:`AssertionError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_AttributeError` | :exc:`AttributeError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_EOFError` | :exc:`EOFError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_EnvironmentError` | :exc:`EnvironmentError` | \(1) |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_FloatingPointError` | :exc:`FloatingPointError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_IOError` | :exc:`IOError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_ImportError` | :exc:`ImportError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_IndexError` | :exc:`IndexError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_KeyError` | :exc:`KeyError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_KeyboardInterrupt` | :exc:`KeyboardInterrupt` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_MemoryError` | :exc:`MemoryError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_NameError` | :exc:`NameError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_NotImplementedError` | :exc:`NotImplementedError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_OSError` | :exc:`OSError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_OverflowError` | :exc:`OverflowError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_ReferenceError` | :exc:`ReferenceError` | \(2) |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_RuntimeError` | :exc:`RuntimeError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_SyntaxError` | :exc:`SyntaxError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_SystemError` | :exc:`SystemError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_SystemExit` | :exc:`SystemExit` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_TypeError` | :exc:`TypeError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_ValueError` | :exc:`ValueError` | |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_WindowsError` | :exc:`WindowsError` | \(3) |
-+-------------------------------------+----------------------------+----------+
-| :c:data:`PyExc_ZeroDivisionError` | :exc:`ZeroDivisionError` | |
-+-------------------------------------+----------------------------+----------+
++-----------------------------------------+---------------------------------+----------+
+| C Name | Python Name | Notes |
++=========================================+=================================+==========+
+| :c:data:`PyExc_BaseException` | :exc:`BaseException` | \(1) |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_Exception` | :exc:`Exception` | \(1) |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ArithmeticError` | :exc:`ArithmeticError` | \(1) |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_LookupError` | :exc:`LookupError` | \(1) |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_AssertionError` | :exc:`AssertionError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_AttributeError` | :exc:`AttributeError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_BlockingIOError` | :exc:`BlockingIOError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_BrokenPipeError` | :exc:`BrokenPipeError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ChildProcessError` | :exc:`ChildProcessError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ConnectionError` | :exc:`ConnectionError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ConnectionAbortedError` | :exc:`ConnectionAbortedError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ConnectionRefusedError` | :exc:`ConnectionRefusedError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ConnectionResetError` | :exc:`ConnectionResetError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_FileExistsError` | :exc:`FileExistsError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_FileNotFoundError` | :exc:`FileNotFoundError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_EOFError` | :exc:`EOFError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_FloatingPointError` | :exc:`FloatingPointError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ImportError` | :exc:`ImportError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_IndexError` | :exc:`IndexError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_InterruptedError` | :exc:`InterruptedError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_IsADirectoryError` | :exc:`IsADirectoryError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_KeyError` | :exc:`KeyError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_KeyboardInterrupt` | :exc:`KeyboardInterrupt` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_MemoryError` | :exc:`MemoryError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_NameError` | :exc:`NameError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_NotADirectoryError` | :exc:`NotADirectoryError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_NotImplementedError` | :exc:`NotImplementedError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_OSError` | :exc:`OSError` | \(1) |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_OverflowError` | :exc:`OverflowError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_PermissionError` | :exc:`PermissionError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ProcessLookupError` | :exc:`ProcessLookupError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ReferenceError` | :exc:`ReferenceError` | \(2) |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_RuntimeError` | :exc:`RuntimeError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_SyntaxError` | :exc:`SyntaxError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_SystemError` | :exc:`SystemError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_TimeoutError` | :exc:`TimeoutError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_SystemExit` | :exc:`SystemExit` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_TypeError` | :exc:`TypeError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ValueError` | :exc:`ValueError` | |
++-----------------------------------------+---------------------------------+----------+
+| :c:data:`PyExc_ZeroDivisionError` | :exc:`ZeroDivisionError` | |
++-----------------------------------------+---------------------------------+----------+
+
+.. versionadded:: 3.3
+ :c:data:`PyExc_BlockingIOError`, :c:data:`PyExc_BrokenPipeError`,
+ :c:data:`PyExc_ChildProcessError`, :c:data:`PyExc_ConnectionError`,
+ :c:data:`PyExc_ConnectionAbortedError`, :c:data:`PyExc_ConnectionRefusedError`,
+ :c:data:`PyExc_ConnectionResetError`, :c:data:`PyExc_FileExistsError`,
+ :c:data:`PyExc_FileNotFoundError`, :c:data:`PyExc_InterruptedError`,
+ :c:data:`PyExc_IsADirectoryError`, :c:data:`PyExc_NotADirectoryError`,
+ :c:data:`PyExc_PermissionError`, :c:data:`PyExc_ProcessLookupError`
+ and :c:data:`PyExc_TimeoutError` were introduced following :pep:`3151`.
+
+
+These are compatibility aliases to :c:data:`PyExc_OSError`:
+
++-------------------------------------+----------+
+| C Name | Notes |
++=====================================+==========+
+| :c:data:`PyExc_EnvironmentError` | |
++-------------------------------------+----------+
+| :c:data:`PyExc_IOError` | |
++-------------------------------------+----------+
+| :c:data:`PyExc_WindowsError` | \(3) |
++-------------------------------------+----------+
+
+.. versionchanged:: 3.3
+ These aliases used to be separate exception types.
+
.. index::
single: PyExc_BaseException
@@ -649,28 +707,42 @@ the variables:
single: PyExc_LookupError
single: PyExc_AssertionError
single: PyExc_AttributeError
+ single: PyExc_BlockingIOError
+ single: PyExc_BrokenPipeError
+ single: PyExc_ConnectionError
+ single: PyExc_ConnectionAbortedError
+ single: PyExc_ConnectionRefusedError
+ single: PyExc_ConnectionResetError
single: PyExc_EOFError
- single: PyExc_EnvironmentError
+ single: PyExc_FileExistsError
+ single: PyExc_FileNotFoundError
single: PyExc_FloatingPointError
- single: PyExc_IOError
single: PyExc_ImportError
single: PyExc_IndexError
+ single: PyExc_InterruptedError
+ single: PyExc_IsADirectoryError
single: PyExc_KeyError
single: PyExc_KeyboardInterrupt
single: PyExc_MemoryError
single: PyExc_NameError
+ single: PyExc_NotADirectoryError
single: PyExc_NotImplementedError
single: PyExc_OSError
single: PyExc_OverflowError
+ single: PyExc_PermissionError
+ single: PyExc_ProcessLookupError
single: PyExc_ReferenceError
single: PyExc_RuntimeError
single: PyExc_SyntaxError
single: PyExc_SystemError
single: PyExc_SystemExit
+ single: PyExc_TimeoutError
single: PyExc_TypeError
single: PyExc_ValueError
- single: PyExc_WindowsError
single: PyExc_ZeroDivisionError
+ single: PyExc_EnvironmentError
+ single: PyExc_IOError
+ single: PyExc_WindowsError
Notes:
diff --git a/Doc/c-api/function.rst b/Doc/c-api/function.rst
index 31805fd..ad98322 100644
--- a/Doc/c-api/function.rst
+++ b/Doc/c-api/function.rst
@@ -38,6 +38,16 @@ There are a few functions specific to Python functions.
object, the argument defaults and closure are set to *NULL*.
+.. c:function:: PyObject* PyFunction_NewWithQualName(PyObject *code, PyObject *globals, PyObject *qualname)
+
+ As :c:func:`PyFunction_New`, but also allows to set the function object's
+ ``__qualname__`` attribute. *qualname* should be a unicode object or NULL;
+ if NULL, the ``__qualname__`` attribute is set to the same value as its
+ ``__name__`` attribute.
+
+ .. versionadded:: 3.3
+
+
.. c:function:: PyObject* PyFunction_GetCode(PyObject *op)
Return the code object associated with the function object *op*.
diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst
index cf48363..b168751 100644
--- a/Doc/c-api/import.rst
+++ b/Doc/c-api/import.rst
@@ -57,7 +57,7 @@ Importing Modules
:c:func:`PyImport_ImportModule`.
-.. c:function:: PyObject* PyImport_ImportModuleLevel(char *name, PyObject *globals, PyObject *locals, PyObject *fromlist, int level)
+.. c:function:: PyObject* PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, PyObject *locals, PyObject *fromlist, int level)
Import a module. This is best described by referring to the built-in Python
function :func:`__import__`, as the standard :func:`__import__` function calls
@@ -68,6 +68,13 @@ Importing Modules
the return value when a submodule of a package was requested is normally the
top-level package, unless a non-empty *fromlist* was given.
+ .. versionadded:: 3.3
+
+
+.. c:function:: PyObject* PyImport_ImportModuleLevel(char *name, PyObject *globals, PyObject *locals, PyObject *fromlist, int level)
+
+ Similar to :c:func:`PyImport_ImportModuleLevelObject`, but the name is an
+ UTF-8 encoded string instead of a Unicode object.
.. c:function:: PyObject* PyImport_Import(PyObject *name)
@@ -86,7 +93,7 @@ Importing Modules
an exception set on failure (the module still exists in this case).
-.. c:function:: PyObject* PyImport_AddModule(const char *name)
+.. c:function:: PyObject* PyImport_AddModuleObject(PyObject *name)
Return the module object corresponding to a module name. The *name* argument
may be of the form ``package.module``. First check the modules dictionary if
@@ -100,6 +107,14 @@ Importing Modules
or one of its variants to import a module. Package structures implied by a
dotted name for *name* are not created if not already present.
+ .. versionadded:: 3.3
+
+
+.. c:function:: PyObject* PyImport_AddModule(const char *name)
+
+ Similar to :c:func:`PyImport_AddModuleObject`, but the name is a UTF-8
+ encoded string instead of a Unicode object.
+
.. c:function:: PyObject* PyImport_ExecCodeModule(char *name, PyObject *co)
@@ -136,14 +151,23 @@ Importing Modules
See also :c:func:`PyImport_ExecCodeModuleWithPathnames`.
-.. c:function:: PyObject* PyImport_ExecCodeModuleWithPathnames(char *name, PyObject *co, char *pathname, char *cpathname)
+.. c:function:: PyObject* PyImport_ExecCodeModuleObject(PyObject *name, PyObject *co, PyObject *pathname, PyObject *cpathname)
Like :c:func:`PyImport_ExecCodeModuleEx`, but the :attr:`__cached__`
attribute of the module object is set to *cpathname* if it is
non-``NULL``. Of the three functions, this is the preferred one to use.
+ .. versionadded:: 3.3
+
+
+.. c:function:: PyObject* PyImport_ExecCodeModuleWithPathnames(char *name, PyObject *co, char *pathname, char *cpathname)
+
+ Like :c:func:`PyImport_ExecCodeModuleObject`, but *name*, *pathname* and
+ *cpathname* are UTF-8 encoded strings.
+
.. versionadded:: 3.2
+
.. c:function:: long PyImport_GetMagicNumber()
Return the magic number for Python bytecode files (a.k.a. :file:`.pyc` and
@@ -200,7 +224,7 @@ Importing Modules
For internal use only.
-.. c:function:: int PyImport_ImportFrozenModule(char *name)
+.. c:function:: int PyImport_ImportFrozenModuleObject(PyObject *name)
Load a frozen module named *name*. Return ``1`` for success, ``0`` if the
module is not found, and ``-1`` with an exception set if the initialization
@@ -208,6 +232,14 @@ Importing Modules
:c:func:`PyImport_ImportModule`. (Note the misnomer --- this function would
reload the module if it was already imported.)
+ .. versionadded:: 3.3
+
+
+.. c:function:: int PyImport_ImportFrozenModule(char *name)
+
+ Similar to :c:func:`PyImport_ImportFrozenModuleObject`, but the name is a
+ UTF-8 encoded string instead of a Unicode object.
+
.. c:type:: struct _frozen
@@ -247,13 +279,13 @@ Importing Modules
Structure describing a single entry in the list of built-in modules. Each of
these structures gives the name and initialization function for a module built
- into the interpreter. Programs which embed Python may use an array of these
- structures in conjunction with :c:func:`PyImport_ExtendInittab` to provide
- additional built-in modules. The structure is defined in
- :file:`Include/import.h` as::
+ into the interpreter. The name is an ASCII encoded string. Programs which
+ embed Python may use an array of these structures in conjunction with
+ :c:func:`PyImport_ExtendInittab` to provide additional built-in modules.
+ The structure is defined in :file:`Include/import.h` as::
struct _inittab {
- char *name;
+ char *name; /* ASCII encoded string */
PyObject* (*initfunc)(void);
};
diff --git a/Doc/c-api/list.rst b/Doc/c-api/list.rst
index feb9015..5b263a7 100644
--- a/Doc/c-api/list.rst
+++ b/Doc/c-api/list.rst
@@ -142,3 +142,10 @@ List Objects
Return a new tuple object containing the contents of *list*; equivalent to
``tuple(list)``.
+
+
+.. c:function:: int PyList_ClearFreeList()
+
+ Clear the free list. Return the total number of freed items.
+
+ .. versionadded:: 3.3
diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst
index b2295e0..4c295fa 100644
--- a/Doc/c-api/long.rst
+++ b/Doc/c-api/long.rst
@@ -100,6 +100,20 @@ All integers are implemented as "long" integer objects of arbitrary size.
string is first encoded to a byte string using :c:func:`PyUnicode_EncodeDecimal`
and then converted using :c:func:`PyLong_FromString`.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyLong_FromUnicodeObject`.
+
+
+.. c:function:: PyObject* PyLong_FromUnicodeObject(PyObject *u, int base)
+
+ Convert a sequence of Unicode digits in the string *u* to a Python integer
+ value. The Unicode string is first encoded to a byte string using
+ :c:func:`PyUnicode_EncodeDecimal` and then converted using
+ :c:func:`PyLong_FromString`.
+
+ .. versionadded:: 3.3
+
.. c:function:: PyObject* PyLong_FromVoidPtr(void *p)
diff --git a/Doc/c-api/memoryview.rst b/Doc/c-api/memoryview.rst
index 6b49cdf..5e50977 100644
--- a/Doc/c-api/memoryview.rst
+++ b/Doc/c-api/memoryview.rst
@@ -17,16 +17,21 @@ any other object.
Create a memoryview object from an object that provides the buffer interface.
If *obj* supports writable buffer exports, the memoryview object will be
- readable and writable, otherwise it will be read-only.
+ read/write, otherwise it may be either read-only or read/write at the
+ discretion of the exporter.
+.. c:function:: PyObject *PyMemoryView_FromMemory(char *mem, Py_ssize_t size, int flags)
+
+ Create a memoryview object using *mem* as the underlying buffer.
+ *flags* can be one of :c:macro:`PyBUF_READ` or :c:macro:`PyBUF_WRITE`.
+
+ .. versionadded:: 3.3
.. c:function:: PyObject *PyMemoryView_FromBuffer(Py_buffer *view)
Create a memoryview object wrapping the given buffer structure *view*.
- The memoryview object then owns the buffer represented by *view*, which
- means you shouldn't try to call :c:func:`PyBuffer_Release` yourself: it
- will be done on deallocation of the memoryview object.
-
+ For simple byte buffers, :c:func:`PyMemoryView_FromMemory` is the preferred
+ function.
.. c:function:: PyObject *PyMemoryView_GetContiguous(PyObject *obj, int buffertype, char order)
@@ -43,10 +48,16 @@ any other object.
currently allowed to create subclasses of :class:`memoryview`.
-.. c:function:: Py_buffer *PyMemoryView_GET_BUFFER(PyObject *obj)
+.. c:function:: Py_buffer *PyMemoryView_GET_BUFFER(PyObject *mview)
+
+ Return a pointer to the memoryview's private copy of the exporter's buffer.
+ *mview* **must** be a memoryview instance; this macro doesn't check its type,
+ you must do it yourself or you will risk crashes.
+
+.. c:function:: Py_buffer *PyMemoryView_GET_BASE(PyObject *mview)
- Return a pointer to the buffer structure wrapped by the given
- memoryview object. The object **must** be a memoryview instance;
- this macro doesn't check its type, you must do it yourself or you
- will risk crashes.
+ Return either a pointer to the exporting object that the memoryview is based
+ on or *NULL* if the memoryview has been created by one of the functions
+ :c:func:`PyMemoryView_FromMemory` or :c:func:`PyMemoryView_FromBuffer`.
+ *mview* **must** be a memoryview instance.
diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst
index ffd68e3..32587be 100644
--- a/Doc/c-api/module.rst
+++ b/Doc/c-api/module.rst
@@ -29,7 +29,7 @@ There are only a few functions special to module objects.
:c:data:`PyModule_Type`.
-.. c:function:: PyObject* PyModule_New(const char *name)
+.. c:function:: PyObject* PyModule_NewObject(PyObject *name)
.. index::
single: __name__ (module attribute)
@@ -40,6 +40,14 @@ There are only a few functions special to module objects.
Only the module's :attr:`__doc__` and :attr:`__name__` attributes are filled in;
the caller is responsible for providing a :attr:`__file__` attribute.
+ .. versionadded:: 3.3
+
+
+.. c:function:: PyObject* PyModule_New(const char *name)
+
+ Similar to :c:func:`PyImport_NewObject`, but the name is an UTF-8 encoded
+ string instead of a Unicode object.
+
.. c:function:: PyObject* PyModule_GetDict(PyObject *module)
@@ -52,7 +60,7 @@ There are only a few functions special to module objects.
manipulate a module's :attr:`__dict__`.
-.. c:function:: char* PyModule_GetName(PyObject *module)
+.. c:function:: PyObject* PyModule_GetNameObject(PyObject *module)
.. index::
single: __name__ (module attribute)
@@ -61,15 +69,13 @@ There are only a few functions special to module objects.
Return *module*'s :attr:`__name__` value. If the module does not provide one,
or if it is not a string, :exc:`SystemError` is raised and *NULL* is returned.
+ .. versionadded:: 3.3
-.. c:function:: char* PyModule_GetFilename(PyObject *module)
- Similar to :c:func:`PyModule_GetFilenameObject` but return the filename
- encoded to 'utf-8'.
+.. c:function:: char* PyModule_GetName(PyObject *module)
- .. deprecated:: 3.2
- :c:func:`PyModule_GetFilename` raises :c:type:`UnicodeEncodeError` on
- unencodable filenames, use :c:func:`PyModule_GetFilenameObject` instead.
+ Similar to :c:func:`PyModule_GetNameObject` but return the name encoded to
+ ``'utf-8'``.
.. c:function:: PyObject* PyModule_GetFilenameObject(PyObject *module)
@@ -81,11 +87,21 @@ There are only a few functions special to module objects.
Return the name of the file from which *module* was loaded using *module*'s
:attr:`__file__` attribute. If this is not defined, or if it is not a
unicode string, raise :exc:`SystemError` and return *NULL*; otherwise return
- a reference to a :c:type:`PyUnicodeObject`.
+ a reference to a Unicode object.
.. versionadded:: 3.2
+.. c:function:: char* PyModule_GetFilename(PyObject *module)
+
+ Similar to :c:func:`PyModule_GetFilenameObject` but return the filename
+ encoded to 'utf-8'.
+
+ .. deprecated:: 3.2
+ :c:func:`PyModule_GetFilename` raises :c:type:`UnicodeEncodeError` on
+ unencodable filenames, use :c:func:`PyModule_GetFilenameObject` instead.
+
+
.. c:function:: void* PyModule_GetState(PyObject *module)
Return the "state" of the module, that is, a pointer to the block of memory
diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst
index d0d45ad..d895547 100644
--- a/Doc/c-api/object.rst
+++ b/Doc/c-api/object.rst
@@ -6,6 +6,19 @@ Object Protocol
===============
+.. c:var:: PyObject* Py_NotImplemented
+
+ The ``NotImplemented`` singleton, used to signal that an operation is
+ not implemented for the given type combination.
+
+
+.. c:macro:: Py_RETURN_NOTIMPLEMENTED
+
+ Properly handle returning :c:data:`Py_NotImplemented` from within a C
+ function (that is, increment the reference count of NotImplemented and
+ return it).
+
+
.. c:function:: int PyObject_Print(PyObject *o, FILE *fp, int flags)
Print an object *o*, on file *fp*. Returns ``-1`` on error. The flags argument
@@ -88,6 +101,22 @@ Object Protocol
This is the equivalent of the Python statement ``del o.attr_name``.
+.. c:function:: PyObject* PyType_GenericGetDict(PyObject *o, void *context)
+
+ A generic implementation for the getter of a ``__dict__`` descriptor. It
+ creates the dictionary if necessary.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: int PyType_GenericSetDict(PyObject *o, void *context)
+
+ A generic implementation for the setter of a ``__dict__`` descriptor. This
+ implementation does not allow the dictionary to be deleted.
+
+ .. versionadded:: 3.3
+
+
.. c:function:: PyObject* PyObject_RichCompare(PyObject *o1, PyObject *o2, int opid)
Compare the values of *o1* and *o2* using the operation specified by *opid*,
diff --git a/Doc/c-api/set.rst b/Doc/c-api/set.rst
index 66b47c4..5f0ef90 100644
--- a/Doc/c-api/set.rst
+++ b/Doc/c-api/set.rst
@@ -157,3 +157,10 @@ subtypes but not for instances of :class:`frozenset` or its subtypes.
.. c:function:: int PySet_Clear(PyObject *set)
Empty an existing set of all elements.
+
+
+.. c:function:: int PySet_ClearFreeList()
+
+ Clear the free list. Return the total number of freed items.
+
+ .. versionadded:: 3.3
diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst
index b3386ea..cfd0d78 100644
--- a/Doc/c-api/type.rst
+++ b/Doc/c-api/type.rst
@@ -75,8 +75,8 @@ Type Objects
.. c:function:: PyObject* PyType_GenericNew(PyTypeObject *type, PyObject *args, PyObject *kwds)
- XXX: Document.
-
+ Generic handler for the :attr:`tp_new` slot of a type object. Initialize
+ all instance variables to *NULL*.
.. c:function:: int PyType_Ready(PyTypeObject *type)
diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst
index 68ca9ad..ea1a0ad 100644
--- a/Doc/c-api/typeobj.rst
+++ b/Doc/c-api/typeobj.rst
@@ -1198,46 +1198,88 @@ Buffer Object Structures
.. sectionauthor:: Greg J. Stein <greg@lyra.org>
.. sectionauthor:: Benjamin Peterson
+.. sectionauthor:: Stefan Krah
+.. c:type:: PyBufferProcs
-The :ref:`buffer interface <bufferobjects>` exports a model where an object can expose its internal
-data.
+ This structure holds pointers to the functions required by the
+ :ref:`Buffer protocol <bufferobjects>`. The protocol defines how
+ an exporter object can expose its internal data to consumer objects.
-If an object does not export the buffer interface, then its :attr:`tp_as_buffer`
-member in the :c:type:`PyTypeObject` structure should be *NULL*. Otherwise, the
-:attr:`tp_as_buffer` will point to a :c:type:`PyBufferProcs` structure.
+.. c:member:: getbufferproc PyBufferProcs.bf_getbuffer
+ The signature of this function is::
-.. c:type:: PyBufferProcs
+ int (PyObject *exporter, Py_buffer *view, int flags);
+
+ Handle a request to *exporter* to fill in *view* as specified by *flags*.
+ Except for point (3), an implementation of this function MUST take these
+ steps:
+
+ (1) Check if the request can be met. If not, raise :c:data:`PyExc_BufferError`,
+ set :c:data:`view->obj` to *NULL* and return -1.
+
+ (2) Fill in the requested fields.
+
+ (3) Increment an internal counter for the number of exports.
+
+ (4) Set :c:data:`view->obj` to *exporter* and increment :c:data:`view->obj`.
+
+ (5) Return 0.
+
+ If *exporter* is part of a chain or tree of buffer providers, two main
+ schemes can be used:
+
+ * Re-export: Each member of the tree acts as the exporting object and
+ sets :c:data:`view->obj` to a new reference to itself.
+
+ * Redirect: The buffer request is redirected to the root object of the
+ tree. Here, :c:data:`view->obj` will be a new reference to the root
+ object.
+
+ The individual fields of *view* are described in section
+ :ref:`Buffer structure <buffer-structure>`, the rules how an exporter
+ must react to specific requests are in section
+ :ref:`Buffer request types <buffer-request-types>`.
+
+ All memory pointed to in the :c:type:`Py_buffer` structure belongs to
+ the exporter and must remain valid until there are no consumers left.
+ :c:member:`~Py_buffer.format`, :c:member:`~Py_buffer.shape`,
+ :c:member:`~Py_buffer.strides`, :c:member:`~Py_buffer.suboffsets`
+ and :c:member:`~Py_buffer.internal`
+ are read-only for the consumer.
+
+ :c:func:`PyBuffer_FillInfo` provides an easy way of exposing a simple
+ bytes buffer while dealing correctly with all request types.
+
+ :c:func:`PyObject_GetBuffer` is the interface for the consumer that
+ wraps this function.
+
+.. c:member:: releasebufferproc PyBufferProcs.bf_releasebuffer
+
+ The signature of this function is::
+
+ void (PyObject *exporter, Py_buffer *view);
- Structure used to hold the function pointers which define an implementation of
- the buffer protocol.
+ Handle a request to release the resources of the buffer. If no resources
+ need to be released, :c:member:`PyBufferProcs.bf_releasebuffer` may be
+ *NULL*. Otherwise, a standard implementation of this function will take
+ these optional steps:
- .. c:member:: getbufferproc bf_getbuffer
+ (1) Decrement an internal counter for the number of exports.
- This should fill a :c:type:`Py_buffer` with the necessary data for
- exporting the type. The signature of :data:`getbufferproc` is ``int
- (PyObject *obj, Py_buffer *view, int flags)``. *obj* is the object to
- export, *view* is the :c:type:`Py_buffer` struct to fill, and *flags* gives
- the conditions the caller wants the memory under. (See
- :c:func:`PyObject_GetBuffer` for all flags.) :c:member:`bf_getbuffer` is
- responsible for filling *view* with the appropriate information.
- (:c:func:`PyBuffer_FillView` can be used in simple cases.) See
- :c:type:`Py_buffer`\s docs for what needs to be filled in.
+ (2) If the counter is 0, free all memory associated with *view*.
+ The exporter MUST use the :c:member:`~Py_buffer.internal` field to keep
+ track of buffer-specific resources. This field is guaranteed to remain
+ constant, while a consumer MAY pass a copy of the original buffer as the
+ *view* argument.
- .. c:member:: releasebufferproc bf_releasebuffer
- This should release the resources of the buffer. The signature of
- :c:data:`releasebufferproc` is ``void (PyObject *obj, Py_buffer *view)``.
- If the :c:data:`bf_releasebuffer` function is not provided (i.e. it is
- *NULL*), then it does not ever need to be called.
+ This function MUST NOT decrement :c:data:`view->obj`, since that is
+ done automatically in :c:func:`PyBuffer_Release` (this scheme is
+ useful for breaking reference cycles).
- The exporter of the buffer interface must make sure that any memory
- pointed to in the :c:type:`Py_buffer` structure remains valid until
- releasebuffer is called. Exporters will need to define a
- :c:data:`bf_releasebuffer` function if they can re-allocate their memory,
- strides, shape, suboffsets, or format variables which they might share
- through the struct bufferinfo.
- See :c:func:`PyBuffer_Release`.
+ :c:func:`PyBuffer_Release` is the interface for the consumer that
+ wraps this function.
diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst
index 3500654..a74a73d 100644
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -6,38 +6,72 @@ Unicode Objects and Codecs
--------------------------
.. sectionauthor:: Marc-Andre Lemburg <mal@lemburg.com>
+.. sectionauthor:: Georg Brandl <georg@python.org>
Unicode Objects
^^^^^^^^^^^^^^^
+Since the implementation of :pep:`393` in Python 3.3, Unicode objects internally
+use a variety of representations, in order to allow handling the complete range
+of Unicode characters while staying memory efficient. There are special cases
+for strings where all code points are below 128, 256, or 65536; otherwise, code
+points must be below 1114112 (which is the full Unicode range).
+
+:c:type:`Py_UNICODE*` and UTF-8 representations are created on demand and cached
+in the Unicode object. The :c:type:`Py_UNICODE*` representation is deprecated
+and inefficient; it should be avoided in performance- or memory-sensitive
+situations.
+
+Due to the transition between the old APIs and the new APIs, unicode objects
+can internally be in two states depending on how they were created:
+
+* "canonical" unicode objects are all objects created by a non-deprecated
+ unicode API. They use the most efficient representation allowed by the
+ implementation.
+
+* "legacy" unicode objects have been created through one of the deprecated
+ APIs (typically :c:func:`PyUnicode_FromUnicode`) and only bear the
+ :c:type:`Py_UNICODE*` representation; you will have to call
+ :c:func:`PyUnicode_READY` on them before calling any other API.
+
+
Unicode Type
""""""""""""
These are the basic Unicode object types used for the Unicode implementation in
Python:
+.. c:type:: Py_UCS4
+ Py_UCS2
+ Py_UCS1
+
+ These types are typedefs for unsigned integer types wide enough to contain
+ characters of 32 bits, 16 bits and 8 bits, respectively. When dealing with
+ single Unicode characters, use :c:type:`Py_UCS4`.
+
+ .. versionadded:: 3.3
+
.. c:type:: Py_UNICODE
- This type represents the storage type which is used by Python internally as
- basis for holding Unicode ordinals. Python's default builds use a 16-bit type
- for :c:type:`Py_UNICODE` and store Unicode values internally as UCS2. It is also
- possible to build a UCS4 version of Python (most recent Linux distributions come
- with UCS4 builds of Python). These builds then use a 32-bit type for
- :c:type:`Py_UNICODE` and store Unicode data internally as UCS4. On platforms
- where :c:type:`wchar_t` is available and compatible with the chosen Python
- Unicode build variant, :c:type:`Py_UNICODE` is a typedef alias for
- :c:type:`wchar_t` to enhance native platform compatibility. On all other
- platforms, :c:type:`Py_UNICODE` is a typedef alias for either :c:type:`unsigned
- short` (UCS2) or :c:type:`unsigned long` (UCS4).
+ This is a typedef of :c:type:`wchar_t`, which is a 16-bit type or 32-bit type
+ depending on the platform.
-Note that UCS2 and UCS4 Python builds are not binary compatible. Please keep
-this in mind when writing extensions or interfaces.
+ .. versionchanged:: 3.3
+ In previous versions, this was a 16-bit type or a 32-bit type depending on
+ whether you selected a "narrow" or "wide" Unicode version of Python at
+ build time.
-.. c:type:: PyUnicodeObject
+.. c:type:: PyASCIIObject
+ PyCompactUnicodeObject
+ PyUnicodeObject
- This subtype of :c:type:`PyObject` represents a Python Unicode object.
+ These subtypes of :c:type:`PyObject` represent a Python Unicode object. In
+ almost all cases, they shouldn't be used directly, since all API functions
+ that deal with Unicode objects take and return :c:type:`PyObject` pointers.
+
+ .. versionadded:: 3.3
.. c:var:: PyTypeObject PyUnicode_Type
@@ -45,10 +79,10 @@ this in mind when writing extensions or interfaces.
This instance of :c:type:`PyTypeObject` represents the Python Unicode type. It
is exposed to Python code as ``str``.
+
The following APIs are really C macros and can be used to do fast checks and to
access internal read-only data of Unicode objects:
-
.. c:function:: int PyUnicode_Check(PyObject *o)
Return true if the object *o* is a Unicode object or an instance of a Unicode
@@ -61,28 +95,106 @@ access internal read-only data of Unicode objects:
subtype.
-.. c:function:: Py_ssize_t PyUnicode_GET_SIZE(PyObject *o)
+.. c:function:: int PyUnicode_READY(PyObject *o)
- Return the size of the object. *o* has to be a :c:type:`PyUnicodeObject` (not
- checked).
+ Ensure the string object *o* is in the "canonical" representation. This is
+ required before using any of the access macros described below.
+ .. XXX expand on when it is not required
-.. c:function:: Py_ssize_t PyUnicode_GET_DATA_SIZE(PyObject *o)
+ Returns 0 on success and -1 with an exception set on failure, which in
+ particular happens if memory allocation fails.
- Return the size of the object's internal buffer in bytes. *o* has to be a
- :c:type:`PyUnicodeObject` (not checked).
+ .. versionadded:: 3.3
-.. c:function:: Py_UNICODE* PyUnicode_AS_UNICODE(PyObject *o)
+.. c:function:: Py_ssize_t PyUnicode_GET_LENGTH(PyObject *o)
+
+ Return the length of the Unicode string, in code points. *o* has to be a
+ Unicode object in the "canonical" representation (not checked).
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_UCS1* PyUnicode_1BYTE_DATA(PyObject *o)
+ Py_UCS2* PyUnicode_2BYTE_DATA(PyObject *o)
+ Py_UCS4* PyUnicode_4BYTE_DATA(PyObject *o)
+
+ Return a pointer to the canonical representation cast to UCS1, UCS2 or UCS4
+ integer types for direct character access. No checks are performed if the
+ canonical representation has the correct character size; use
+ :c:func:`PyUnicode_KIND` to select the right macro. Make sure
+ :c:func:`PyUnicode_READY` has been called before accessing this.
+
+ .. versionadded:: 3.3
+
+
+.. c:macro:: PyUnicode_WCHAR_KIND
+ PyUnicode_1BYTE_KIND
+ PyUnicode_2BYTE_KIND
+ PyUnicode_4BYTE_KIND
+
+ Return values of the :c:func:`PyUnicode_KIND` macro.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: int PyUnicode_KIND(PyObject *o)
+
+ Return one of the PyUnicode kind constants (see above) that indicate how many
+ bytes per character this Unicode object uses to store its data. *o* has to
+ be a Unicode object in the "canonical" representation (not checked).
+
+ .. XXX document "0" return value?
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: void* PyUnicode_DATA(PyObject *o)
+
+ Return a void pointer to the raw unicode buffer. *o* has to be a Unicode
+ object in the "canonical" representation (not checked).
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: void PyUnicode_WRITE(int kind, void *data, Py_ssize_t index, \
+ Py_UCS4 value)
+
+ Write into a canonical representation *data* (as obtained with
+ :c:func:`PyUnicode_DATA`). This macro does not do any sanity checks and is
+ intended for usage in loops. The caller should cache the *kind* value and
+ *data* pointer as obtained from other macro calls. *index* is the index in
+ the string (starts at 0) and *value* is the new code point value which should
+ be written to that location.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_UCS4 PyUnicode_READ(int kind, void *data, Py_ssize_t index)
+
+ Read a code point from a canonical representation *data* (as obtained with
+ :c:func:`PyUnicode_DATA`). No checks or ready calls are performed.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_UCS4 PyUnicode_READ_CHAR(PyObject *o, Py_ssize_t index)
+
+ Read a character from a Unicode object *o*, which must be in the "canonical"
+ representation. This is less efficient than :c:func:`PyUnicode_READ` if you
+ do multiple consecutive reads.
+
+ .. versionadded:: 3.3
- Return a pointer to the internal :c:type:`Py_UNICODE` buffer of the object. *o*
- has to be a :c:type:`PyUnicodeObject` (not checked).
+.. c:function:: PyUnicode_MAX_CHAR_VALUE(PyObject *o)
-.. c:function:: const char* PyUnicode_AS_DATA(PyObject *o)
+ Return the maximum code point that is suitable for creating another string
+ based on *o*, which must be in the "canonical" representation. This is
+ always an approximation but more efficient than iterating over the string.
- Return a pointer to the internal buffer of the object. *o* has to be a
- :c:type:`PyUnicodeObject` (not checked).
+ .. versionadded:: 3.3
.. c:function:: int PyUnicode_ClearFreeList()
@@ -90,6 +202,46 @@ access internal read-only data of Unicode objects:
Clear the free list. Return the total number of freed items.
+.. c:function:: Py_ssize_t PyUnicode_GET_SIZE(PyObject *o)
+
+ Return the size of the deprecated :c:type:`Py_UNICODE` representation, in
+ code units (this includes surrogate pairs as 2 units). *o* has to be a
+ Unicode object (not checked).
+
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style Unicode API, please migrate to using
+ :c:func:`PyUnicode_GET_LENGTH`.
+
+
+.. c:function:: Py_ssize_t PyUnicode_GET_DATA_SIZE(PyObject *o)
+
+ Return the size of the deprecated :c:type:`Py_UNICODE` representation in
+ bytes. *o* has to be a Unicode object (not checked).
+
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style Unicode API, please migrate to using
+ :c:func:`PyUnicode_GET_LENGTH`.
+
+
+.. c:function:: Py_UNICODE* PyUnicode_AS_UNICODE(PyObject *o)
+ const char* PyUnicode_AS_DATA(PyObject *o)
+
+ Return a pointer to a :c:type:`Py_UNICODE` representation of the object. The
+ ``AS_DATA`` form casts the pointer to :c:type:`const char *`. *o* has to be
+ a Unicode object (not checked).
+
+ .. versionchanged:: 3.3
+ This macro is now inefficient -- because in many cases the
+ :c:type:`Py_UNICODE` representation does not exist and needs to be created
+ -- and can fail (return *NULL* with an exception set). Try to port the
+ code to use the new :c:func:`PyUnicode_nBYTE_DATA` macros or use
+ :c:func:`PyUnicode_WRITE` or :c:func:`PyUnicode_READ`.
+
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style Unicode API, please migrate to using the
+ :c:func:`PyUnicode_nBYTE_DATA` family of macros.
+
+
Unicode Character Properties
""""""""""""""""""""""""""""
@@ -166,16 +318,25 @@ These APIs can be used for fast direct character conversions:
Return the character *ch* converted to lower case.
+ .. deprecated:: 3.3
+ This function uses simple case mappings.
+
.. c:function:: Py_UNICODE Py_UNICODE_TOUPPER(Py_UNICODE ch)
Return the character *ch* converted to upper case.
+ .. deprecated:: 3.3
+ This function uses simple case mappings.
+
.. c:function:: Py_UNICODE Py_UNICODE_TOTITLE(Py_UNICODE ch)
Return the character *ch* converted to title case.
+ .. deprecated:: 3.3
+ This function uses simple case mappings.
+
.. c:function:: int Py_UNICODE_TODECIMAL(Py_UNICODE ch)
@@ -195,31 +356,66 @@ These APIs can be used for fast direct character conversions:
possible. This macro does not raise exceptions.
-Plain Py_UNICODE
-""""""""""""""""
+These APIs can be used to work with surrogates:
+
+.. c:macro:: Py_UNICODE_IS_SURROGATE(ch)
+
+ Check if *ch* is a surrogate (``0xD800 <= ch <= 0xDFFF``).
+
+.. c:macro:: Py_UNICODE_IS_HIGH_SURROGATE(ch)
+
+ Check if *ch* is an high surrogate (``0xD800 <= ch <= 0xDBFF``).
+
+.. c:macro:: Py_UNICODE_IS_LOW_SURROGATE(ch)
+
+ Check if *ch* is a low surrogate (``0xDC00 <= ch <= 0xDFFF``).
+
+.. c:macro:: Py_UNICODE_JOIN_SURROGATES(high, low)
+
+ Join two surrogate characters and return a single Py_UCS4 value.
+ *high* and *low* are respectively the leading and trailing surrogates in a
+ surrogate pair.
+
+
+Creating and accessing Unicode strings
+""""""""""""""""""""""""""""""""""""""
To create Unicode objects and access their basic sequence properties, use these
APIs:
+.. c:function:: PyObject* PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar)
-.. c:function:: PyObject* PyUnicode_FromUnicode(const Py_UNICODE *u, Py_ssize_t size)
+ Create a new Unicode object. *maxchar* should be the true maximum code point
+ to be placed in the string. As an approximation, it can be rounded up to the
+ nearest value in the sequence 127, 255, 65535, 1114111.
- Create a Unicode object from the Py_UNICODE buffer *u* of the given size. *u*
- may be *NULL* which causes the contents to be undefined. It is the user's
- responsibility to fill in the needed data. The buffer is copied into the new
- object. If the buffer is not *NULL*, the return value might be a shared object.
- Therefore, modification of the resulting Unicode object is only allowed when *u*
- is *NULL*.
+ This is the recommended way to allocate a new Unicode object. Objects
+ created using this function are not resizable.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: PyObject* PyUnicode_FromKindAndData(int kind, const void *buffer, \
+ Py_ssize_t size)
+
+ Create a new Unicode object with the given *kind* (possible values are
+ :c:macro:`PyUnicode_1BYTE_KIND` etc., as returned by
+ :c:func:`PyUnicode_KIND`). The *buffer* must point to an array of *size*
+ units of 1, 2 or 4 bytes per character, as given by the kind.
+
+ .. versionadded:: 3.3
.. c:function:: PyObject* PyUnicode_FromStringAndSize(const char *u, Py_ssize_t size)
- Create a Unicode object from the char buffer *u*. The bytes will be interpreted
- as being UTF-8 encoded. *u* may also be *NULL* which
- causes the contents to be undefined. It is the user's responsibility to fill in
- the needed data. The buffer is copied into the new object. If the buffer is not
- *NULL*, the return value might be a shared object. Therefore, modification of
- the resulting Unicode object is only allowed when *u* is *NULL*.
+ Create a Unicode object from the char buffer *u*. The bytes will be
+ interpreted as being UTF-8 encoded. The buffer is copied into the new
+ object. If the buffer is not *NULL*, the return value might be a shared
+ object, i.e. modification of the data is not allowed.
+
+ If *u* is *NULL*, this function behaves like :c:func:`PyUnicode_FromUnicode`
+ with the buffer set to *NULL*. This usage is deprecated in favor of
+ :c:func:`PyUnicode_New`.
.. c:function:: PyObject *PyUnicode_FromString(const char *u)
@@ -260,18 +456,27 @@ APIs:
| :attr:`%ld` | long | Exactly equivalent to |
| | | ``printf("%ld")``. |
+-------------------+---------------------+--------------------------------+
+ | :attr:`%li` | long | Exactly equivalent to |
+ | | | ``printf("%li")``. |
+ +-------------------+---------------------+--------------------------------+
| :attr:`%lu` | unsigned long | Exactly equivalent to |
| | | ``printf("%lu")``. |
+-------------------+---------------------+--------------------------------+
| :attr:`%lld` | long long | Exactly equivalent to |
| | | ``printf("%lld")``. |
+-------------------+---------------------+--------------------------------+
+ | :attr:`%lli` | long long | Exactly equivalent to |
+ | | | ``printf("%lli")``. |
+ +-------------------+---------------------+--------------------------------+
| :attr:`%llu` | unsigned long long | Exactly equivalent to |
| | | ``printf("%llu")``. |
+-------------------+---------------------+--------------------------------+
| :attr:`%zd` | Py_ssize_t | Exactly equivalent to |
| | | ``printf("%zd")``. |
+-------------------+---------------------+--------------------------------+
+ | :attr:`%zi` | Py_ssize_t | Exactly equivalent to |
+ | | | ``printf("%zi")``. |
+ +-------------------+---------------------+--------------------------------+
| :attr:`%zu` | size_t | Exactly equivalent to |
| | | ``printf("%zu")``. |
+-------------------+---------------------+--------------------------------+
@@ -322,27 +527,178 @@ APIs:
.. versionchanged:: 3.2
Support for ``"%lld"`` and ``"%llu"`` added.
+ .. versionchanged:: 3.3
+ Support for ``"%li"``, ``"%lli"`` and ``"%zi"`` added.
+
.. c:function:: PyObject* PyUnicode_FromFormatV(const char *format, va_list vargs)
Identical to :c:func:`PyUnicode_FromFormat` except that it takes exactly two
arguments.
+
+.. c:function:: PyObject* PyUnicode_FromEncodedObject(PyObject *obj, \
+ const char *encoding, const char *errors)
+
+ Coerce an encoded object *obj* to an Unicode object and return a reference with
+ incremented refcount.
+
+ :class:`bytes`, :class:`bytearray` and other char buffer compatible objects
+ are decoded according to the given *encoding* and using the error handling
+ defined by *errors*. Both can be *NULL* to have the interface use the default
+ values (see the next section for details).
+
+ All other objects, including Unicode objects, cause a :exc:`TypeError` to be
+ set.
+
+ The API returns *NULL* if there was an error. The caller is responsible for
+ decref'ing the returned objects.
+
+
+.. c:function:: Py_ssize_t PyUnicode_GetLength(PyObject *unicode)
+
+ Return the length of the Unicode object, in code points.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: int PyUnicode_CopyCharacters(PyObject *to, Py_ssize_t to_start, \
+ PyObject *to, Py_ssize_t from_start, Py_ssize_t how_many)
+
+ Copy characters from one Unicode object into another. This function performs
+ character conversion when necessary and falls back to :c:func:`memcpy` if
+ possible. Returns ``-1`` and sets an exception on error, otherwise returns
+ ``0``.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_ssize_t PyUnicode_Fill(PyObject *unicode, Py_ssize_t start, \
+ Py_ssize_t length, Py_UCS4 fill_char)
+
+ Fill a string with a character: write *fill_char* into
+ ``unicode[start:start+length]``.
+
+ Fail if *fill_char* is bigger than the string maximum character, or if the
+ string has more than 1 reference.
+
+ Return the number of written character, or return ``-1`` and raise an
+ exception on error.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: int PyUnicode_WriteChar(PyObject *unicode, Py_ssize_t index, \
+ Py_UCS4 character)
+
+ Write a character to a string. The string must have been created through
+ :c:func:`PyUnicode_New`. Since Unicode strings are supposed to be immutable,
+ the string must not be shared, or have been hashed yet.
+
+ This function checks that *unicode* is a Unicode object, that the index is
+ not out of bounds, and that the object can be modified safely (i.e. that it
+ its reference count is one), in contrast to the macro version
+ :c:func:`PyUnicode_WRITE_CHAR`.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_UCS4 PyUnicode_ReadChar(PyObject *unicode, Py_ssize_t index)
+
+ Read a character from a string. This function checks that *unicode* is a
+ Unicode object and the index is not out of bounds, in contrast to the macro
+ version :c:func:`PyUnicode_READ_CHAR`.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: PyObject* PyUnicode_Substring(PyObject *str, Py_ssize_t start, \
+ Py_ssize_t end)
+
+ Return a substring of *str*, from character index *start* (included) to
+ character index *end* (excluded). Negative indices are not supported.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_UCS4* PyUnicode_AsUCS4(PyObject *u, Py_UCS4 *buffer, \
+ Py_ssize_t buflen, int copy_null)
+
+ Copy the string *u* into a UCS4 buffer, including a null character, if
+ *copy_null* is set. Returns *NULL* and sets an exception on error (in
+ particular, a :exc:`ValueError` if *buflen* is smaller than the length of
+ *u*). *buffer* is returned on success.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_UCS4* PyUnicode_AsUCS4Copy(PyObject *u)
+
+ Copy the string *u* into a new UCS4 buffer that is allocated using
+ :c:func:`PyMem_Malloc`. If this fails, *NULL* is returned with a
+ :exc:`MemoryError` set.
+
+ .. versionadded:: 3.3
+
+
+Deprecated Py_UNICODE APIs
+""""""""""""""""""""""""""
+
+.. deprecated-removed:: 3.3 4.0
+
+These API functions are deprecated with the implementation of :pep:`393`.
+Extension modules can continue using them, as they will not be removed in Python
+3.x, but need to be aware that their use can now cause performance and memory hits.
+
+
+.. c:function:: PyObject* PyUnicode_FromUnicode(const Py_UNICODE *u, Py_ssize_t size)
+
+ Create a Unicode object from the Py_UNICODE buffer *u* of the given size. *u*
+ may be *NULL* which causes the contents to be undefined. It is the user's
+ responsibility to fill in the needed data. The buffer is copied into the new
+ object.
+
+ If the buffer is not *NULL*, the return value might be a shared object.
+ Therefore, modification of the resulting Unicode object is only allowed when
+ *u* is *NULL*.
+
+ If the buffer is *NULL*, :c:func:`PyUnicode_READY` must be called once the
+ string content has been filled before using any of the access macros such as
+ :c:func:`PyUnicode_KIND`.
+
+ Please migrate to using :c:func:`PyUnicode_FromKindAndData` or
+ :c:func:`PyUnicode_New`.
+
+
+.. c:function:: Py_UNICODE* PyUnicode_AsUnicode(PyObject *unicode)
+
+ Return a read-only pointer to the Unicode object's internal
+ :c:type:`Py_UNICODE` buffer, or *NULL* on error. This will create the
+ :c:type:`Py_UNICODE*` representation of the object if it is not yet
+ available. Note that the resulting :c:type:`Py_UNICODE` string may contain
+ embedded null characters, which would cause the string to be truncated when
+ used in most C functions.
+
+ Please migrate to using :c:func:`PyUnicode_AsUCS4`,
+ :c:func:`PyUnicode_Substring`, :c:func:`PyUnicode_ReadChar` or similar new
+ APIs.
+
+
.. c:function:: PyObject* PyUnicode_TransformDecimalToASCII(Py_UNICODE *s, Py_ssize_t size)
Create a Unicode object by replacing all decimal digits in
:c:type:`Py_UNICODE` buffer of the given *size* by ASCII digits 0--9
- according to their decimal value. Return *NULL* if an exception
- occurs.
+ according to their decimal value. Return *NULL* if an exception occurs.
-.. c:function:: Py_UNICODE* PyUnicode_AsUnicode(PyObject *unicode)
+.. c:function:: Py_UNICODE* PyUnicode_AsUnicodeAndSize(PyObject *unicode, Py_ssize_t *size)
- Return a read-only pointer to the Unicode object's internal
- :c:type:`Py_UNICODE` buffer, *NULL* if *unicode* is not a Unicode object.
- Note that the resulting :c:type:`Py_UNICODE*` string may contain embedded
- null characters, which would cause the string to be truncated when used in
- most C functions.
+ Like :c:func:`PyUnicode_AsUnicode`, but also saves the :c:func:`Py_UNICODE`
+ array length in *size*. Note that the resulting :c:type:`Py_UNICODE*` string
+ may contain embedded null characters, which would cause the string to be
+ truncated when used in most C functions.
+
+ .. versionadded:: 3.3
.. c:function:: Py_UNICODE* PyUnicode_AsUnicodeCopy(PyObject *unicode)
@@ -350,44 +706,76 @@ APIs:
Create a copy of a Unicode string ending with a nul character. Return *NULL*
and raise a :exc:`MemoryError` exception on memory allocation failure,
otherwise return a new allocated buffer (use :c:func:`PyMem_Free` to free
- the buffer). Note that the resulting :c:type:`Py_UNICODE*` string may contain
- embedded null characters, which would cause the string to be truncated when
- used in most C functions.
+ the buffer). Note that the resulting :c:type:`Py_UNICODE*` string may
+ contain embedded null characters, which would cause the string to be
+ truncated when used in most C functions.
.. versionadded:: 3.2
+ Please migrate to using :c:func:`PyUnicode_AsUCS4Copy` or similar new APIs.
+
.. c:function:: Py_ssize_t PyUnicode_GetSize(PyObject *unicode)
- Return the length of the Unicode object.
+ Return the size of the deprecated :c:type:`Py_UNICODE` representation, in
+ code units (this includes surrogate pairs as 2 units).
+ Please migrate to using :c:func:`PyUnicode_GetLength`.
-.. c:function:: PyObject* PyUnicode_FromEncodedObject(PyObject *obj, const char *encoding, const char *errors)
- Coerce an encoded object *obj* to an Unicode object and return a reference with
- incremented refcount.
+.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj)
- :class:`bytes`, :class:`bytearray` and other char buffer compatible objects
- are decoded according to the given *encoding* and using the error handling
- defined by *errors*. Both can be *NULL* to have the interface use the default
- values (see the next section for details).
+ Shortcut for ``PyUnicode_FromEncodedObject(obj, NULL, "strict")`` which is used
+ throughout the interpreter whenever coercion to Unicode is needed.
- All other objects, including Unicode objects, cause a :exc:`TypeError` to be
- set.
- The API returns *NULL* if there was an error. The caller is responsible for
- decref'ing the returned objects.
+Locale Encoding
+"""""""""""""""
+The current locale encoding can be used to decode text from the operating
+system.
-.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj)
+.. c:function:: PyObject* PyUnicode_DecodeLocaleAndSize(const char *str, Py_ssize_t len, int surrogateescape)
+
+ Decode a string from the current locale encoding. The decoder is strict if
+ *surrogateescape* is equal to zero, otherwise it uses the
+ ``'surrogateescape'`` error handler (:pep:`383`) to escape undecodable
+ bytes. If a byte sequence can be decoded as a surrogate character and
+ *surrogateescape* is not equal to zero, the byte sequence is escaped using
+ the ``'surrogateescape'`` error handler instead of being decoded. *str*
+ must end with a null character but cannot contain embedded null characters.
+
+ .. seealso::
+
+ Use :c:func:`PyUnicode_DecodeFSDefaultAndSize` to decode a string from
+ :c:data:`Py_FileSystemDefaultEncoding` (the locale encoding read at
+ Python startup).
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: PyObject* PyUnicode_DecodeLocale(const char *str, int surrogateescape)
+
+ Similar to :c:func:`PyUnicode_DecodeLocaleAndSize`, but compute the string
+ length using :c:func:`strlen`.
+
+ .. versionadded:: 3.3
- Shortcut for ``PyUnicode_FromEncodedObject(obj, NULL, "strict")`` which is used
- throughout the interpreter whenever coercion to Unicode is needed.
-If the platform supports :c:type:`wchar_t` and provides a header file wchar.h,
-Python can interface directly to this type using the following functions.
-Support is optimized if Python's own :c:type:`Py_UNICODE` type is identical to
-the system's :c:type:`wchar_t`.
+.. c:function:: PyObject* PyUnicode_EncodeLocale(PyObject *unicode, int surrogateescape)
+
+ Encode a Unicode object to the current locale encoding. The encoder is
+ strict if *surrogateescape* is equal to zero, otherwise it uses the
+ ``'surrogateescape'`` error handler (:pep:`383`). Return a :class:`bytes`
+ object. *str* cannot contain embedded null characters.
+
+ .. seealso::
+
+ Use :c:func:`PyUnicode_EncodeFSDefault` to encode a string to
+ :c:data:`Py_FileSystemDefaultEncoding` (the locale encoding read at
+ Python startup).
+
+ .. versionadded:: 3.3
File System Encoding
@@ -430,6 +818,13 @@ used, passing :c:func:`PyUnicode_FSDecoder` as the conversion function:
If :c:data:`Py_FileSystemDefaultEncoding` is not set, fall back to the
locale encoding.
+ .. seealso::
+
+ :c:data:`Py_FileSystemDefaultEncoding` is initialized at startup from the
+ locale encoding and cannot be modified later. If you need to decode a
+ string from the current locale encoding, use
+ :c:func:`PyUnicode_DecodeLocaleAndSize`.
+
.. versionchanged:: 3.2
Use ``'strict'`` error handler on Windows.
@@ -458,6 +853,13 @@ used, passing :c:func:`PyUnicode_FSDecoder` as the conversion function:
If :c:data:`Py_FileSystemDefaultEncoding` is not set, fall back to the
locale encoding.
+ .. seealso::
+
+ :c:data:`Py_FileSystemDefaultEncoding` is initialized at startup from the
+ locale encoding and cannot be modified later. If you need to encode a
+ string to the current locale encoding, use
+ :c:func:`PyUnicode_EncodeLocale`.
+
.. versionadded:: 3.2
@@ -479,9 +881,9 @@ wchar_t Support
Copy the Unicode object contents into the :c:type:`wchar_t` buffer *w*. At most
*size* :c:type:`wchar_t` characters are copied (excluding a possibly trailing
0-termination character). Return the number of :c:type:`wchar_t` characters
- copied or -1 in case of an error. Note that the resulting :c:type:`wchar_t`
+ copied or -1 in case of an error. Note that the resulting :c:type:`wchar_t*`
string may or may not be 0-terminated. It is the responsibility of the caller
- to make sure that the :c:type:`wchar_t` string is 0-terminated in case this is
+ to make sure that the :c:type:`wchar_t*` string is 0-terminated in case this is
required by the application. Also, note that the :c:type:`wchar_t*` string
might contain null characters, which would cause the string to be truncated
when used with most C functions.
@@ -497,12 +899,32 @@ wchar_t Support
Returns a buffer allocated by :c:func:`PyMem_Alloc` (use
:c:func:`PyMem_Free` to free it) on success. On error, returns *NULL*,
*\*size* is undefined and raises a :exc:`MemoryError`. Note that the
- resulting :c:type:`wchar_t*` string might contain null characters, which
+ resulting :c:type:`wchar_t` string might contain null characters, which
would cause the string to be truncated when used with most C functions.
.. versionadded:: 3.2
+UCS4 Support
+""""""""""""
+
+.. versionadded:: 3.3
+
+.. XXX are these meant to be public?
+
+.. c:function:: size_t Py_UCS4_strlen(const Py_UCS4 *u)
+ Py_UCS4* Py_UCS4_strcpy(Py_UCS4 *s1, const Py_UCS4 *s2)
+ Py_UCS4* Py_UCS4_strncpy(Py_UCS4 *s1, const Py_UCS4 *s2, size_t n)
+ Py_UCS4* Py_UCS4_strcat(Py_UCS4 *s1, const Py_UCS4 *s2)
+ int Py_UCS4_strcmp(const Py_UCS4 *s1, const Py_UCS4 *s2)
+ int Py_UCS4_strncmp(const Py_UCS4 *s1, const Py_UCS4 *s2, size_t n)
+ Py_UCS4* Py_UCS4_strchr(const Py_UCS4 *s, Py_UCS4 c)
+ Py_UCS4* Py_UCS4_strrchr(const Py_UCS4 *s, Py_UCS4 c)
+
+ These utility functions work on strings of :c:type:`Py_UCS4` characters and
+ otherwise behave like the C standard library functions with the same name.
+
+
.. _builtincodecs:
Built-in Codecs
@@ -537,7 +959,8 @@ Generic Codecs
These are the generic codec APIs:
-.. c:function:: PyObject* PyUnicode_Decode(const char *s, Py_ssize_t size, const char *encoding, const char *errors)
+.. c:function:: PyObject* PyUnicode_Decode(const char *s, Py_ssize_t size, \
+ const char *encoding, const char *errors)
Create a Unicode object by decoding *size* bytes of the encoded string *s*.
*encoding* and *errors* have the same meaning as the parameters of the same name
@@ -546,7 +969,18 @@ These are the generic codec APIs:
the codec.
-.. c:function:: PyObject* PyUnicode_Encode(const Py_UNICODE *s, Py_ssize_t size, const char *encoding, const char *errors)
+.. c:function:: PyObject* PyUnicode_AsEncodedString(PyObject *unicode, \
+ const char *encoding, const char *errors)
+
+ Encode a Unicode object and return the result as Python bytes object.
+ *encoding* and *errors* have the same meaning as the parameters of the same
+ name in the Unicode :meth:`encode` method. The codec to be used is looked up
+ using the Python codec registry. Return *NULL* if an exception was raised by
+ the codec.
+
+
+.. c:function:: PyObject* PyUnicode_Encode(const Py_UNICODE *s, Py_ssize_t size, \
+ const char *encoding, const char *errors)
Encode the :c:type:`Py_UNICODE` buffer *s* of the given *size* and return a Python
bytes object. *encoding* and *errors* have the same meaning as the
@@ -554,14 +988,9 @@ These are the generic codec APIs:
to be used is looked up using the Python codec registry. Return *NULL* if an
exception was raised by the codec.
-
-.. c:function:: PyObject* PyUnicode_AsEncodedString(PyObject *unicode, const char *encoding, const char *errors)
-
- Encode a Unicode object and return the result as Python bytes object.
- *encoding* and *errors* have the same meaning as the parameters of the same
- name in the Unicode :meth:`encode` method. The codec to be used is looked up
- using the Python codec registry. Return *NULL* if an exception was raised by
- the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsEncodedString`.
UTF-8 Codecs
@@ -576,7 +1005,8 @@ These are the UTF-8 codec APIs:
*s*. Return *NULL* if an exception was raised by the codec.
-.. c:function:: PyObject* PyUnicode_DecodeUTF8Stateful(const char *s, Py_ssize_t size, const char *errors, Py_ssize_t *consumed)
+.. c:function:: PyObject* PyUnicode_DecodeUTF8Stateful(const char *s, Py_ssize_t size, \
+ const char *errors, Py_ssize_t *consumed)
If *consumed* is *NULL*, behave like :c:func:`PyUnicode_DecodeUTF8`. If
*consumed* is not *NULL*, trailing incomplete UTF-8 byte sequences will not be
@@ -584,18 +1014,45 @@ These are the UTF-8 codec APIs:
that have been decoded will be stored in *consumed*.
+.. c:function:: PyObject* PyUnicode_AsUTF8String(PyObject *unicode)
+
+ Encode a Unicode object using UTF-8 and return the result as Python bytes
+ object. Error handling is "strict". Return *NULL* if an exception was
+ raised by the codec.
+
+
+.. c:function:: char* PyUnicode_AsUTF8AndSize(PyObject *unicode, Py_ssize_t *size)
+
+ Return a pointer to the default encoding (UTF-8) of the Unicode object, and
+ store the size of the encoded representation (in bytes) in *size*. *size*
+ can be *NULL*, in this case no size will be stored.
+
+ In the case of an error, *NULL* is returned with an exception set and no
+ *size* is stored.
+
+ This caches the UTF-8 representation of the string in the Unicode object, and
+ subsequent calls will return a pointer to the same buffer. The caller is not
+ responsible for deallocating the buffer.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: char* PyUnicode_AsUTF8(PyObject *unicode)
+
+ As :c:func:`PyUnicode_AsUTF8AndSize`, but does not store the size.
+
+ .. versionadded:: 3.3
+
+
.. c:function:: PyObject* PyUnicode_EncodeUTF8(const Py_UNICODE *s, Py_ssize_t size, const char *errors)
Encode the :c:type:`Py_UNICODE` buffer *s* of the given *size* using UTF-8 and
return a Python bytes object. Return *NULL* if an exception was raised by
the codec.
-
-.. c:function:: PyObject* PyUnicode_AsUTF8String(PyObject *unicode)
-
- Encode a Unicode object using UTF-8 and return the result as Python bytes
- object. Error handling is "strict". Return *NULL* if an exception was
- raised by the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsUTF8String` or :c:func:`PyUnicode_AsUTF8AndSize`.
UTF-32 Codecs
@@ -604,7 +1061,8 @@ UTF-32 Codecs
These are the UTF-32 codec APIs:
-.. c:function:: PyObject* PyUnicode_DecodeUTF32(const char *s, Py_ssize_t size, const char *errors, int *byteorder)
+.. c:function:: PyObject* PyUnicode_DecodeUTF32(const char *s, Py_ssize_t size, \
+ const char *errors, int *byteorder)
Decode *size* bytes from a UTF-32 encoded buffer string and return the
corresponding Unicode object. *errors* (if non-*NULL*) defines the error
@@ -632,7 +1090,8 @@ These are the UTF-32 codec APIs:
Return *NULL* if an exception was raised by the codec.
-.. c:function:: PyObject* PyUnicode_DecodeUTF32Stateful(const char *s, Py_ssize_t size, const char *errors, int *byteorder, Py_ssize_t *consumed)
+.. c:function:: PyObject* PyUnicode_DecodeUTF32Stateful(const char *s, Py_ssize_t size, \
+ const char *errors, int *byteorder, Py_ssize_t *consumed)
If *consumed* is *NULL*, behave like :c:func:`PyUnicode_DecodeUTF32`. If
*consumed* is not *NULL*, :c:func:`PyUnicode_DecodeUTF32Stateful` will not treat
@@ -641,7 +1100,15 @@ These are the UTF-32 codec APIs:
that have been decoded will be stored in *consumed*.
-.. c:function:: PyObject* PyUnicode_EncodeUTF32(const Py_UNICODE *s, Py_ssize_t size, const char *errors, int byteorder)
+.. c:function:: PyObject* PyUnicode_AsUTF32String(PyObject *unicode)
+
+ Return a Python byte string using the UTF-32 encoding in native byte
+ order. The string always starts with a BOM mark. Error handling is "strict".
+ Return *NULL* if an exception was raised by the codec.
+
+
+.. c:function:: PyObject* PyUnicode_EncodeUTF32(const Py_UNICODE *s, Py_ssize_t size, \
+ const char *errors, int byteorder)
Return a Python bytes object holding the UTF-32 encoded value of the Unicode
data in *s*. Output is written according to the following byte order::
@@ -658,12 +1125,9 @@ These are the UTF-32 codec APIs:
Return *NULL* if an exception was raised by the codec.
-
-.. c:function:: PyObject* PyUnicode_AsUTF32String(PyObject *unicode)
-
- Return a Python byte string using the UTF-32 encoding in native byte
- order. The string always starts with a BOM mark. Error handling is "strict".
- Return *NULL* if an exception was raised by the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsUTF32String`.
UTF-16 Codecs
@@ -672,7 +1136,8 @@ UTF-16 Codecs
These are the UTF-16 codec APIs:
-.. c:function:: PyObject* PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors, int *byteorder)
+.. c:function:: PyObject* PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, \
+ const char *errors, int *byteorder)
Decode *size* bytes from a UTF-16 encoded buffer string and return the
corresponding Unicode object. *errors* (if non-*NULL*) defines the error
@@ -699,7 +1164,8 @@ These are the UTF-16 codec APIs:
Return *NULL* if an exception was raised by the codec.
-.. c:function:: PyObject* PyUnicode_DecodeUTF16Stateful(const char *s, Py_ssize_t size, const char *errors, int *byteorder, Py_ssize_t *consumed)
+.. c:function:: PyObject* PyUnicode_DecodeUTF16Stateful(const char *s, Py_ssize_t size, \
+ const char *errors, int *byteorder, Py_ssize_t *consumed)
If *consumed* is *NULL*, behave like :c:func:`PyUnicode_DecodeUTF16`. If
*consumed* is not *NULL*, :c:func:`PyUnicode_DecodeUTF16Stateful` will not treat
@@ -708,7 +1174,15 @@ These are the UTF-16 codec APIs:
number of bytes that have been decoded will be stored in *consumed*.
-.. c:function:: PyObject* PyUnicode_EncodeUTF16(const Py_UNICODE *s, Py_ssize_t size, const char *errors, int byteorder)
+.. c:function:: PyObject* PyUnicode_AsUTF16String(PyObject *unicode)
+
+ Return a Python byte string using the UTF-16 encoding in native byte
+ order. The string always starts with a BOM mark. Error handling is "strict".
+ Return *NULL* if an exception was raised by the codec.
+
+
+.. c:function:: PyObject* PyUnicode_EncodeUTF16(const Py_UNICODE *s, Py_ssize_t size, \
+ const char *errors, int byteorder)
Return a Python bytes object holding the UTF-16 encoded value of the Unicode
data in *s*. Output is written according to the following byte order::
@@ -726,12 +1200,9 @@ These are the UTF-16 codec APIs:
Return *NULL* if an exception was raised by the codec.
-
-.. c:function:: PyObject* PyUnicode_AsUTF16String(PyObject *unicode)
-
- Return a Python byte string using the UTF-16 encoding in native byte
- order. The string always starts with a BOM mark. Error handling is "strict".
- Return *NULL* if an exception was raised by the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsUTF16String`.
UTF-7 Codecs
@@ -746,7 +1217,8 @@ These are the UTF-7 codec APIs:
*s*. Return *NULL* if an exception was raised by the codec.
-.. c:function:: PyObject* PyUnicode_DecodeUTF7Stateful(const char *s, Py_ssize_t size, const char *errors, Py_ssize_t *consumed)
+.. c:function:: PyObject* PyUnicode_DecodeUTF7Stateful(const char *s, Py_ssize_t size, \
+ const char *errors, Py_ssize_t *consumed)
If *consumed* is *NULL*, behave like :c:func:`PyUnicode_DecodeUTF7`. If
*consumed* is not *NULL*, trailing incomplete UTF-7 base-64 sections will not
@@ -754,7 +1226,8 @@ These are the UTF-7 codec APIs:
bytes that have been decoded will be stored in *consumed*.
-.. c:function:: PyObject* PyUnicode_EncodeUTF7(const Py_UNICODE *s, Py_ssize_t size, int base64SetO, int base64WhiteSpace, const char *errors)
+.. c:function:: PyObject* PyUnicode_EncodeUTF7(const Py_UNICODE *s, Py_ssize_t size, \
+ int base64SetO, int base64WhiteSpace, const char *errors)
Encode the :c:type:`Py_UNICODE` buffer of the given size using UTF-7 and
return a Python bytes object. Return *NULL* if an exception was raised by
@@ -765,6 +1238,11 @@ These are the UTF-7 codec APIs:
nonzero, whitespace will be encoded in base-64. Both are set to zero for the
Python "utf-7" codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API.
+
+ .. XXX replace with what?
+
Unicode-Escape Codecs
"""""""""""""""""""""
@@ -772,24 +1250,29 @@ Unicode-Escape Codecs
These are the "Unicode Escape" codec APIs:
-.. c:function:: PyObject* PyUnicode_DecodeUnicodeEscape(const char *s, Py_ssize_t size, const char *errors)
+.. c:function:: PyObject* PyUnicode_DecodeUnicodeEscape(const char *s, \
+ Py_ssize_t size, const char *errors)
Create a Unicode object by decoding *size* bytes of the Unicode-Escape encoded
string *s*. Return *NULL* if an exception was raised by the codec.
+.. c:function:: PyObject* PyUnicode_AsUnicodeEscapeString(PyObject *unicode)
+
+ Encode a Unicode object using Unicode-Escape and return the result as Python
+ string object. Error handling is "strict". Return *NULL* if an exception was
+ raised by the codec.
+
+
.. c:function:: PyObject* PyUnicode_EncodeUnicodeEscape(const Py_UNICODE *s, Py_ssize_t size)
Encode the :c:type:`Py_UNICODE` buffer of the given *size* using Unicode-Escape and
return a Python string object. Return *NULL* if an exception was raised by the
codec.
-
-.. c:function:: PyObject* PyUnicode_AsUnicodeEscapeString(PyObject *unicode)
-
- Encode a Unicode object using Unicode-Escape and return the result as Python
- string object. Error handling is "strict". Return *NULL* if an exception was
- raised by the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsUnicodeEscapeString`.
Raw-Unicode-Escape Codecs
@@ -798,19 +1281,13 @@ Raw-Unicode-Escape Codecs
These are the "Raw Unicode Escape" codec APIs:
-.. c:function:: PyObject* PyUnicode_DecodeRawUnicodeEscape(const char *s, Py_ssize_t size, const char *errors)
+.. c:function:: PyObject* PyUnicode_DecodeRawUnicodeEscape(const char *s, \
+ Py_ssize_t size, const char *errors)
Create a Unicode object by decoding *size* bytes of the Raw-Unicode-Escape
encoded string *s*. Return *NULL* if an exception was raised by the codec.
-.. c:function:: PyObject* PyUnicode_EncodeRawUnicodeEscape(const Py_UNICODE *s, Py_ssize_t size, const char *errors)
-
- Encode the :c:type:`Py_UNICODE` buffer of the given *size* using Raw-Unicode-Escape
- and return a Python string object. Return *NULL* if an exception was raised by
- the codec.
-
-
.. c:function:: PyObject* PyUnicode_AsRawUnicodeEscapeString(PyObject *unicode)
Encode a Unicode object using Raw-Unicode-Escape and return the result as
@@ -818,6 +1295,18 @@ These are the "Raw Unicode Escape" codec APIs:
was raised by the codec.
+.. c:function:: PyObject* PyUnicode_EncodeRawUnicodeEscape(const Py_UNICODE *s, \
+ Py_ssize_t size, const char *errors)
+
+ Encode the :c:type:`Py_UNICODE` buffer of the given *size* using Raw-Unicode-Escape
+ and return a Python string object. Return *NULL* if an exception was raised by
+ the codec.
+
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsRawUnicodeEscapeString`.
+
+
Latin-1 Codecs
""""""""""""""
@@ -831,18 +1320,22 @@ ordinals and only these are accepted by the codecs during encoding.
*s*. Return *NULL* if an exception was raised by the codec.
+.. c:function:: PyObject* PyUnicode_AsLatin1String(PyObject *unicode)
+
+ Encode a Unicode object using Latin-1 and return the result as Python bytes
+ object. Error handling is "strict". Return *NULL* if an exception was
+ raised by the codec.
+
+
.. c:function:: PyObject* PyUnicode_EncodeLatin1(const Py_UNICODE *s, Py_ssize_t size, const char *errors)
Encode the :c:type:`Py_UNICODE` buffer of the given *size* using Latin-1 and
return a Python bytes object. Return *NULL* if an exception was raised by
the codec.
-
-.. c:function:: PyObject* PyUnicode_AsLatin1String(PyObject *unicode)
-
- Encode a Unicode object using Latin-1 and return the result as Python bytes
- object. Error handling is "strict". Return *NULL* if an exception was
- raised by the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsLatin1String`.
ASCII Codecs
@@ -858,18 +1351,22 @@ codes generate errors.
*s*. Return *NULL* if an exception was raised by the codec.
+.. c:function:: PyObject* PyUnicode_AsASCIIString(PyObject *unicode)
+
+ Encode a Unicode object using ASCII and return the result as Python bytes
+ object. Error handling is "strict". Return *NULL* if an exception was
+ raised by the codec.
+
+
.. c:function:: PyObject* PyUnicode_EncodeASCII(const Py_UNICODE *s, Py_ssize_t size, const char *errors)
Encode the :c:type:`Py_UNICODE` buffer of the given *size* using ASCII and
return a Python bytes object. Return *NULL* if an exception was raised by
the codec.
-
-.. c:function:: PyObject* PyUnicode_AsASCIIString(PyObject *unicode)
-
- Encode a Unicode object using ASCII and return the result as Python bytes
- object. Error handling is "strict". Return *NULL* if an exception was
- raised by the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsASCIIString`.
Character Map Codecs
@@ -898,7 +1395,8 @@ characters to different code points.
These are the mapping codec APIs:
-.. c:function:: PyObject* PyUnicode_DecodeCharmap(const char *s, Py_ssize_t size, PyObject *mapping, const char *errors)
+.. c:function:: PyObject* PyUnicode_DecodeCharmap(const char *s, Py_ssize_t size, \
+ PyObject *mapping, const char *errors)
Create a Unicode object by decoding *size* bytes of the encoded string *s* using
the given *mapping* object. Return *NULL* if an exception was raised by the
@@ -908,13 +1406,6 @@ These are the mapping codec APIs:
treated as "undefined mapping".
-.. c:function:: PyObject* PyUnicode_EncodeCharmap(const Py_UNICODE *s, Py_ssize_t size, PyObject *mapping, const char *errors)
-
- Encode the :c:type:`Py_UNICODE` buffer of the given *size* using the given
- *mapping* object and return a Python string object. Return *NULL* if an
- exception was raised by the codec.
-
-
.. c:function:: PyObject* PyUnicode_AsCharmapString(PyObject *unicode, PyObject *mapping)
Encode a Unicode object using the given *mapping* object and return the result
@@ -924,7 +1415,8 @@ These are the mapping codec APIs:
The following codec API is special in that maps Unicode to Unicode.
-.. c:function:: PyObject* PyUnicode_TranslateCharmap(const Py_UNICODE *s, Py_ssize_t size, PyObject *table, const char *errors)
+.. c:function:: PyObject* PyUnicode_TranslateCharmap(const Py_UNICODE *s, Py_ssize_t size, \
+ PyObject *table, const char *errors)
Translate a :c:type:`Py_UNICODE` buffer of the given *size* by applying a
character mapping *table* to it and return the resulting Unicode object. Return
@@ -937,6 +1429,22 @@ The following codec API is special in that maps Unicode to Unicode.
and sequences work well. Unmapped character ordinals (ones which cause a
:exc:`LookupError`) are left untouched and are copied as-is.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API.
+
+ .. XXX replace with what?
+
+
+.. c:function:: PyObject* PyUnicode_EncodeCharmap(const Py_UNICODE *s, Py_ssize_t size, \
+ PyObject *mapping, const char *errors)
+
+ Encode the :c:type:`Py_UNICODE` buffer of the given *size* using the given
+ *mapping* object and return a Python string object. Return *NULL* if an
+ exception was raised by the codec.
+
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsCharmapString`.
MBCS codecs for Windows
@@ -953,7 +1461,8 @@ the user settings on the machine running the codec.
Return *NULL* if an exception was raised by the codec.
-.. c:function:: PyObject* PyUnicode_DecodeMBCSStateful(const char *s, int size, const char *errors, int *consumed)
+.. c:function:: PyObject* PyUnicode_DecodeMBCSStateful(const char *s, int size, \
+ const char *errors, int *consumed)
If *consumed* is *NULL*, behave like :c:func:`PyUnicode_DecodeMBCS`. If
*consumed* is not *NULL*, :c:func:`PyUnicode_DecodeMBCSStateful` will not decode
@@ -961,18 +1470,31 @@ the user settings on the machine running the codec.
in *consumed*.
+.. c:function:: PyObject* PyUnicode_AsMBCSString(PyObject *unicode)
+
+ Encode a Unicode object using MBCS and return the result as Python bytes
+ object. Error handling is "strict". Return *NULL* if an exception was
+ raised by the codec.
+
+
+.. c:function:: PyObject* PyUnicode_EncodeCodePage(int code_page, PyObject *unicode, const char *errors)
+
+ Encode the Unicode object using the specified code page and return a Python
+ bytes object. Return *NULL* if an exception was raised by the codec. Use
+ :c:data:`CP_ACP` code page to get the MBCS encoder.
+
+ .. versionadded:: 3.3
+
+
.. c:function:: PyObject* PyUnicode_EncodeMBCS(const Py_UNICODE *s, Py_ssize_t size, const char *errors)
Encode the :c:type:`Py_UNICODE` buffer of the given *size* using MBCS and return
a Python bytes object. Return *NULL* if an exception was raised by the
codec.
-
-.. c:function:: PyObject* PyUnicode_AsMBCSString(PyObject *unicode)
-
- Encode a Unicode object using MBCS and return the result as Python bytes
- object. Error handling is "strict". Return *NULL* if an exception was
- raised by the codec.
+ .. deprecated-removed:: 3.3 4.0
+ Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
+ :c:func:`PyUnicode_AsMBCSString` or :c:func:`PyUnicode_EncodeCodePage`.
Methods & Slots
@@ -1011,7 +1533,8 @@ They all return *NULL* or ``-1`` if an exception occurs.
characters are not included in the resulting strings.
-.. c:function:: PyObject* PyUnicode_Translate(PyObject *str, PyObject *table, const char *errors)
+.. c:function:: PyObject* PyUnicode_Translate(PyObject *str, PyObject *table, \
+ const char *errors)
Translate a string by applying a character mapping table to it and return the
resulting Unicode object.
@@ -1033,14 +1556,16 @@ They all return *NULL* or ``-1`` if an exception occurs.
Unicode string.
-.. c:function:: int PyUnicode_Tailmatch(PyObject *str, PyObject *substr, Py_ssize_t start, Py_ssize_t end, int direction)
+.. c:function:: int PyUnicode_Tailmatch(PyObject *str, PyObject *substr, \
+ Py_ssize_t start, Py_ssize_t end, int direction)
Return 1 if *substr* matches ``str[start:end]`` at the given tail end
(*direction* == -1 means to do a prefix match, *direction* == 1 a suffix match),
0 otherwise. Return ``-1`` if an error occurred.
-.. c:function:: Py_ssize_t PyUnicode_Find(PyObject *str, PyObject *substr, Py_ssize_t start, Py_ssize_t end, int direction)
+.. c:function:: Py_ssize_t PyUnicode_Find(PyObject *str, PyObject *substr, \
+ Py_ssize_t start, Py_ssize_t end, int direction)
Return the first position of *substr* in ``str[start:end]`` using the given
*direction* (*direction* == 1 means to do a forward search, *direction* == -1 a
@@ -1049,13 +1574,27 @@ They all return *NULL* or ``-1`` if an exception occurs.
occurred and an exception has been set.
-.. c:function:: Py_ssize_t PyUnicode_Count(PyObject *str, PyObject *substr, Py_ssize_t start, Py_ssize_t end)
+.. c:function:: Py_ssize_t PyUnicode_FindChar(PyObject *str, Py_UCS4 ch, \
+ Py_ssize_t start, Py_ssize_t end, int direction)
+
+ Return the first position of the character *ch* in ``str[start:end]`` using
+ the given *direction* (*direction* == 1 means to do a forward search,
+ *direction* == -1 a backward search). The return value is the index of the
+ first match; a value of ``-1`` indicates that no match was found, and ``-2``
+ indicates that an error occurred and an exception has been set.
+
+ .. versionadded:: 3.3
+
+
+.. c:function:: Py_ssize_t PyUnicode_Count(PyObject *str, PyObject *substr, \
+ Py_ssize_t start, Py_ssize_t end)
Return the number of non-overlapping occurrences of *substr* in
``str[start:end]``. Return ``-1`` if an error occurred.
-.. c:function:: PyObject* PyUnicode_Replace(PyObject *str, PyObject *substr, PyObject *replstr, Py_ssize_t maxcount)
+.. c:function:: PyObject* PyUnicode_Replace(PyObject *str, PyObject *substr, \
+ PyObject *replstr, Py_ssize_t maxcount)
Replace at most *maxcount* occurrences of *substr* in *str* with *replstr* and
return the resulting Unicode object. *maxcount* == -1 means replace all
@@ -1103,8 +1642,8 @@ They all return *NULL* or ``-1`` if an exception occurs.
Check whether *element* is contained in *container* and return true or false
accordingly.
- *element* has to coerce to a one element Unicode string. ``-1`` is returned if
- there was an error.
+ *element* has to coerce to a one element Unicode string. ``-1`` is returned
+ if there was an error.
.. c:function:: void PyUnicode_InternInPlace(PyObject **string)
@@ -1123,7 +1662,6 @@ They all return *NULL* or ``-1`` if an exception occurs.
.. c:function:: PyObject* PyUnicode_InternFromString(const char *v)
A combination of :c:func:`PyUnicode_FromString` and
- :c:func:`PyUnicode_InternInPlace`, returning either a new unicode string object
- that has been interned, or a new ("owned") reference to an earlier interned
- string object with the same value.
-
+ :c:func:`PyUnicode_InternInPlace`, returning either a new unicode string
+ object that has been interned, or a new ("owned") reference to an earlier
+ interned string object with the same value.
diff --git a/Doc/contents.rst b/Doc/contents.rst
index c0c6af3..cc5c8e3 100644
--- a/Doc/contents.rst
+++ b/Doc/contents.rst
@@ -11,7 +11,7 @@
library/index.rst
extending/index.rst
c-api/index.rst
- distutils/index.rst
+ packaging/index.rst
install/index.rst
howto/index.rst
faq/index.rst
diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat
index c7d7bd1..a1004ad 100644
--- a/Doc/data/refcounts.dat
+++ b/Doc/data/refcounts.dat
@@ -465,6 +465,11 @@ PyFunction_New:PyObject*::+1:
PyFunction_New:PyObject*:code:+1:
PyFunction_New:PyObject*:globals:+1:
+PyFunction_NewWithQualName:PyObject*::+1:
+PyFunction_NewWithQualName:PyObject*:code:+1:
+PyFunction_NewWithQualName:PyObject*:globals:+1:
+PyFunction_NewWithQualName:PyObject*:qualname:+1:
+
PyFunction_SetClosure:int:::
PyFunction_SetClosure:PyObject*:op:0:
PyFunction_SetClosure:PyObject*:closure:+1:
diff --git a/Doc/distutils/apiref.rst b/Doc/distutils/apiref.rst
index e15dc76..71702e5 100644
--- a/Doc/distutils/apiref.rst
+++ b/Doc/distutils/apiref.rst
@@ -160,7 +160,7 @@ the full reference.
.. class:: Extension
The Extension class describes a single C or C++extension module in a setup
- script. It accepts the following keyword arguments in its constructor
+ script. It accepts the following keyword arguments in its constructor:
+------------------------+--------------------------------+---------------------------+
| argument name | value | type |
@@ -1157,12 +1157,11 @@ other utility module.
.. function:: grok_environment_error(exc[, prefix='error: '])
- Generate a useful error message from an :exc:`EnvironmentError` (:exc:`IOError`
- or :exc:`OSError`) exception object. Handles Python 1.5.1 and later styles,
- and does what it can to deal with exception objects that don't have a filename
- (which happens when the error is due to a two-file operation, such as
- :func:`rename` or :func:`link`). Returns the error message as a string
- prefixed with *prefix*.
+ Generate a useful error message from an :exc:`OSError` exception object.
+ Handles Python 1.5.1 and later styles, and does what it can to deal with
+ exception objects that don't have a filename (which happens when the error
+ is due to a two-file operation, such as :func:`rename` or :func:`link`).
+ Returns the error message as a string prefixed with *prefix*.
.. function:: split_quoted(s)
diff --git a/Doc/distutils/index.rst b/Doc/distutils/index.rst
index ace8280..c8dd9f4 100644
--- a/Doc/distutils/index.rst
+++ b/Doc/distutils/index.rst
@@ -14,9 +14,12 @@ the module developer's point of view, describing how to use the Distutils to
make Python modules and extensions easily available to a wider audience with
very little overhead for build/release/install mechanics.
+.. deprecated:: 3.3
+ :mod:`packaging` replaces Distutils. See :ref:`packaging-index` and
+ :ref:`packaging-install-index`.
+
.. toctree::
:maxdepth: 2
- :numbered:
introduction.rst
setupscript.rst
@@ -29,3 +32,10 @@ very little overhead for build/release/install mechanics.
extending.rst
commandref.rst
apiref.rst
+
+Another document describes how to install modules and extensions packaged
+following the above guidelines:
+
+.. toctree::
+
+ install.rst
diff --git a/Doc/distutils/install.rst b/Doc/distutils/install.rst
new file mode 100644
index 0000000..b20f1fb
--- /dev/null
+++ b/Doc/distutils/install.rst
@@ -0,0 +1,1086 @@
+.. highlightlang:: none
+
+.. _install-index:
+
+*****************************
+ Installing Python Modules
+*****************************
+
+:Author: Greg Ward
+:Release: |version|
+:Date: |today|
+
+.. TODO: Fill in XXX comments
+
+.. The audience for this document includes people who don't know anything
+ about Python and aren't about to learn the language just in order to
+ install and maintain it for their users, i.e. system administrators.
+ Thus, I have to be sure to explain the basics at some point:
+ sys.path and PYTHONPATH at least. Should probably give pointers to
+ other docs on "import site", PYTHONSTARTUP, PYTHONHOME, etc.
+
+ Finally, it might be useful to include all the material from my "Care
+ and Feeding of a Python Installation" talk in here somewhere. Yow!
+
+.. topic:: Abstract
+
+ This document describes the Python Distribution Utilities ("Distutils") from the
+ end-user's point-of-view, describing how to extend the capabilities of a
+ standard Python installation by building and installing third-party Python
+ modules and extensions.
+
+
+.. _inst-intro:
+
+Introduction
+============
+
+Although Python's extensive standard library covers many programming needs,
+there often comes a time when you need to add some new functionality to your
+Python installation in the form of third-party modules. This might be necessary
+to support your own programming, or to support an application that you want to
+use and that happens to be written in Python.
+
+In the past, there has been little support for adding third-party modules to an
+existing Python installation. With the introduction of the Python Distribution
+Utilities (Distutils for short) in Python 2.0, this changed.
+
+This document is aimed primarily at the people who need to install third-party
+Python modules: end-users and system administrators who just need to get some
+Python application running, and existing Python programmers who want to add some
+new goodies to their toolbox. You don't need to know Python to read this
+document; there will be some brief forays into using Python's interactive mode
+to explore your installation, but that's it. If you're looking for information
+on how to distribute your own Python modules so that others may use them, see
+the :ref:`distutils-index` manual.
+
+
+.. _inst-trivial-install:
+
+Best case: trivial installation
+-------------------------------
+
+In the best case, someone will have prepared a special version of the module
+distribution you want to install that is targeted specifically at your platform
+and is installed just like any other software on your platform. For example,
+the module developer might make an executable installer available for Windows
+users, an RPM package for users of RPM-based Linux systems (Red Hat, SuSE,
+Mandrake, and many others), a Debian package for users of Debian-based Linux
+systems, and so forth.
+
+In that case, you would download the installer appropriate to your platform and
+do the obvious thing with it: run it if it's an executable installer, ``rpm
+--install`` it if it's an RPM, etc. You don't need to run Python or a setup
+script, you don't need to compile anything---you might not even need to read any
+instructions (although it's always a good idea to do so anyway).
+
+Of course, things will not always be that easy. You might be interested in a
+module distribution that doesn't have an easy-to-use installer for your
+platform. In that case, you'll have to start with the source distribution
+released by the module's author/maintainer. Installing from a source
+distribution is not too hard, as long as the modules are packaged in the
+standard way. The bulk of this document is about building and installing
+modules from standard source distributions.
+
+
+.. _inst-new-standard:
+
+The new standard: Distutils
+---------------------------
+
+If you download a module source distribution, you can tell pretty quickly if it
+was packaged and distributed in the standard way, i.e. using the Distutils.
+First, the distribution's name and version number will be featured prominently
+in the name of the downloaded archive, e.g. :file:`foo-1.0.tar.gz` or
+:file:`widget-0.9.7.zip`. Next, the archive will unpack into a similarly-named
+directory: :file:`foo-1.0` or :file:`widget-0.9.7`. Additionally, the
+distribution will contain a setup script :file:`setup.py`, and a file named
+:file:`README.txt` or possibly just :file:`README`, which should explain that
+building and installing the module distribution is a simple matter of running
+one command from a terminal::
+
+ python setup.py install
+
+For Windows, this command should be run from a command prompt window
+(:menuselection:`Start --> Accessories`)::
+
+ setup.py install
+
+If all these things are true, then you already know how to build and install the
+modules you've just downloaded: Run the command above. Unless you need to
+install things in a non-standard way or customize the build process, you don't
+really need this manual. Or rather, the above command is everything you need to
+get out of this manual.
+
+
+.. _inst-standard-install:
+
+Standard Build and Install
+==========================
+
+As described in section :ref:`inst-new-standard`, building and installing a module
+distribution using the Distutils is usually one simple command to run from a
+terminal::
+
+ python setup.py install
+
+
+.. _inst-platform-variations:
+
+Platform variations
+-------------------
+
+You should always run the setup command from the distribution root directory,
+i.e. the top-level subdirectory that the module source distribution unpacks
+into. For example, if you've just downloaded a module source distribution
+:file:`foo-1.0.tar.gz` onto a Unix system, the normal thing to do is::
+
+ gunzip -c foo-1.0.tar.gz | tar xf - # unpacks into directory foo-1.0
+ cd foo-1.0
+ python setup.py install
+
+On Windows, you'd probably download :file:`foo-1.0.zip`. If you downloaded the
+archive file to :file:`C:\\Temp`, then it would unpack into
+:file:`C:\\Temp\\foo-1.0`; you can use either a archive manipulator with a
+graphical user interface (such as WinZip) or a command-line tool (such as
+:program:`unzip` or :program:`pkunzip`) to unpack the archive. Then, open a
+command prompt window and run::
+
+ cd c:\Temp\foo-1.0
+ python setup.py install
+
+
+.. _inst-splitting-up:
+
+Splitting the job up
+--------------------
+
+Running ``setup.py install`` builds and installs all modules in one run. If you
+prefer to work incrementally---especially useful if you want to customize the
+build process, or if things are going wrong---you can use the setup script to do
+one thing at a time. This is particularly helpful when the build and install
+will be done by different users---for example, you might want to build a module
+distribution and hand it off to a system administrator for installation (or do
+it yourself, with super-user privileges).
+
+For example, you can build everything in one step, and then install everything
+in a second step, by invoking the setup script twice::
+
+ python setup.py build
+ python setup.py install
+
+If you do this, you will notice that running the :command:`install` command
+first runs the :command:`build` command, which---in this case---quickly notices
+that it has nothing to do, since everything in the :file:`build` directory is
+up-to-date.
+
+You may not need this ability to break things down often if all you do is
+install modules downloaded off the 'net, but it's very handy for more advanced
+tasks. If you get into distributing your own Python modules and extensions,
+you'll run lots of individual Distutils commands on their own.
+
+
+.. _inst-how-build-works:
+
+How building works
+------------------
+
+As implied above, the :command:`build` command is responsible for putting the
+files to install into a *build directory*. By default, this is :file:`build`
+under the distribution root; if you're excessively concerned with speed, or want
+to keep the source tree pristine, you can change the build directory with the
+:option:`--build-base` option. For example::
+
+ python setup.py build --build-base=/tmp/pybuild/foo-1.0
+
+(Or you could do this permanently with a directive in your system or personal
+Distutils configuration file; see section :ref:`inst-config-files`.) Normally, this
+isn't necessary.
+
+The default layout for the build tree is as follows::
+
+ --- build/ --- lib/
+ or
+ --- build/ --- lib.<plat>/
+ temp.<plat>/
+
+where ``<plat>`` expands to a brief description of the current OS/hardware
+platform and Python version. The first form, with just a :file:`lib` directory,
+is used for "pure module distributions"---that is, module distributions that
+include only pure Python modules. If a module distribution contains any
+extensions (modules written in C/C++), then the second form, with two ``<plat>``
+directories, is used. In that case, the :file:`temp.{plat}` directory holds
+temporary files generated by the compile/link process that don't actually get
+installed. In either case, the :file:`lib` (or :file:`lib.{plat}`) directory
+contains all Python modules (pure Python and extensions) that will be installed.
+
+In the future, more directories will be added to handle Python scripts,
+documentation, binary executables, and whatever else is needed to handle the job
+of installing Python modules and applications.
+
+
+.. _inst-how-install-works:
+
+How installation works
+----------------------
+
+After the :command:`build` command runs (whether you run it explicitly, or the
+:command:`install` command does it for you), the work of the :command:`install`
+command is relatively simple: all it has to do is copy everything under
+:file:`build/lib` (or :file:`build/lib.{plat}`) to your chosen installation
+directory.
+
+If you don't choose an installation directory---i.e., if you just run ``setup.py
+install``\ ---then the :command:`install` command installs to the standard
+location for third-party Python modules. This location varies by platform and
+by how you built/installed Python itself. On Unix (and Mac OS X, which is also
+Unix-based), it also depends on whether the module distribution being installed
+is pure Python or contains extensions ("non-pure"):
+
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Platform | Standard installation location | Default value | Notes |
++=================+=====================================================+==================================================+=======+
+| Unix (pure) | :file:`{prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Unix (non-pure) | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Windows | :file:`{prefix}\\Lib\\site-packages` | :file:`C:\\Python{XY}\\Lib\\site-packages` | \(2) |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+
+Notes:
+
+(1)
+ Most Linux distributions include Python as a standard part of the system, so
+ :file:`{prefix}` and :file:`{exec-prefix}` are usually both :file:`/usr` on
+ Linux. If you build Python yourself on Linux (or any Unix-like system), the
+ default :file:`{prefix}` and :file:`{exec-prefix}` are :file:`/usr/local`.
+
+(2)
+ The default installation directory on Windows was :file:`C:\\Program
+ Files\\Python` under Python 1.6a1, 1.5.2, and earlier.
+
+:file:`{prefix}` and :file:`{exec-prefix}` stand for the directories that Python
+is installed to, and where it finds its libraries at run-time. They are always
+the same under Windows, and very often the same under Unix and Mac OS X. You
+can find out what your Python installation uses for :file:`{prefix}` and
+:file:`{exec-prefix}` by running Python in interactive mode and typing a few
+simple commands. Under Unix, just type ``python`` at the shell prompt. Under
+Windows, choose :menuselection:`Start --> Programs --> Python X.Y -->
+Python (command line)`. Once the interpreter is started, you type Python code
+at the prompt. For example, on my Linux system, I type the three Python
+statements shown below, and get the output as shown, to find out my
+:file:`{prefix}` and :file:`{exec-prefix}`::
+
+ Python 2.4 (#26, Aug 7 2004, 17:19:02)
+ Type "help", "copyright", "credits" or "license" for more information.
+ >>> import sys
+ >>> sys.prefix
+ '/usr'
+ >>> sys.exec_prefix
+ '/usr'
+
+A few other placeholders are used in this document: :file:`{X.Y}` stands for the
+version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by
+the value of :data:`sys.abiflags` or the empty string for platforms which don't
+define ABI flags; :file:`{distname}` will be replaced by the name of the module
+distribution being installed. Dots and capitalization are important in the
+paths; for example, a value that uses ``python3.2`` on UNIX will typically use
+``Python32`` on Windows.
+
+If you don't want to install modules to the standard location, or if you don't
+have permission to write there, then you need to read about alternate
+installations in section :ref:`inst-alt-install`. If you want to customize your
+installation directories more heavily, see section :ref:`inst-custom-install` on
+custom installations.
+
+
+.. _inst-alt-install:
+
+Alternate Installation
+======================
+
+Often, it is necessary or desirable to install modules to a location other than
+the standard location for third-party Python modules. For example, on a Unix
+system you might not have permission to write to the standard third-party module
+directory. Or you might wish to try out a module before making it a standard
+part of your local Python installation. This is especially true when upgrading
+a distribution already present: you want to make sure your existing base of
+scripts still works with the new version before actually upgrading.
+
+The Distutils :command:`install` command is designed to make installing module
+distributions to an alternate location simple and painless. The basic idea is
+that you supply a base directory for the installation, and the
+:command:`install` command picks a set of directories (called an *installation
+scheme*) under this base directory in which to install files. The details
+differ across platforms, so read whichever of the following sections applies to
+you.
+
+Note that the various alternate installation schemes are mutually exclusive: you
+can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or
+``--install-base`` and ``--install-platbase``, but you can't mix from these
+groups.
+
+
+.. _inst-alt-install-user:
+
+Alternate installation: the user scheme
+---------------------------------------
+
+This scheme is designed to be the most convenient solution for users that don't
+have write permission to the global site-packages directory or don't want to
+install into it. It is enabled with a simple option::
+
+ python setup.py install --user
+
+Files will be installed into subdirectories of :data:`site.USER_BASE` (written
+as :file:`{userbase}` hereafter). This scheme installs pure Python modules and
+extension modules in the same location (also known as :data:`site.USER_SITE`).
+Here are the values for UNIX, including Mac OS X:
+
+=============== ===========================================================
+Type of file Installation directory
+=============== ===========================================================
+modules :file:`{userbase}/lib/python{X.Y}/site-packages`
+scripts :file:`{userbase}/bin`
+data :file:`{userbase}`
+C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}`
+=============== ===========================================================
+
+And here are the values used on Windows:
+
+=============== ===========================================================
+Type of file Installation directory
+=============== ===========================================================
+modules :file:`{userbase}\\Python{XY}\\site-packages`
+scripts :file:`{userbase}\\Scripts`
+data :file:`{userbase}`
+C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}`
+=============== ===========================================================
+
+The advantage of using this scheme compared to the other ones described below is
+that the user site-packages directory is under normal conditions always included
+in :data:`sys.path` (see :mod:`site` for more information), which means that
+there is no additional step to perform after running the :file:`setup.py` script
+to finalize the installation.
+
+The :command:`build_ext` command also has a ``--user`` option to add
+:file:`{userbase}/include` to the compiler search path for header files and
+:file:`{userbase}/lib` to the compiler search path for libraries as well as to
+the runtime search path for shared C libraries (rpath).
+
+
+.. _inst-alt-install-home:
+
+Alternate installation: the home scheme
+---------------------------------------
+
+The idea behind the "home scheme" is that you build and maintain a personal
+stash of Python modules. This scheme's name is derived from the idea of a
+"home" directory on Unix, since it's not unusual for a Unix user to make their
+home directory have a layout similar to :file:`/usr/` or :file:`/usr/local/`.
+This scheme can be used by anyone, regardless of the operating system they
+are installing for.
+
+Installing a new module distribution is as simple as ::
+
+ python setup.py install --home=<dir>
+
+where you can supply any directory you like for the :option:`--home` option. On
+Unix, lazy typists can just type a tilde (``~``); the :command:`install` command
+will expand this to your home directory::
+
+ python setup.py install --home=~
+
+To make Python find the distributions installed with this scheme, you may have
+to :ref:`modify Python's search path <inst-search-path>` or edit
+:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit
+:data:`sys.path`.
+
+The :option:`--home` option defines the installation base directory. Files are
+installed to the following directories under the installation base as follows:
+
+=============== ===========================================================
+Type of file Installation directory
+=============== ===========================================================
+modules :file:`{home}/lib/python`
+scripts :file:`{home}/bin`
+data :file:`{home}`
+C headers :file:`{home}/include/python/{distname}`
+=============== ===========================================================
+
+(Mentally replace slashes with backslashes if you're on Windows.)
+
+
+.. _inst-alt-install-prefix-unix:
+
+Alternate installation: Unix (the prefix scheme)
+------------------------------------------------
+
+The "prefix scheme" is useful when you wish to use one Python installation to
+perform the build/install (i.e., to run the setup script), but install modules
+into the third-party module directory of a different Python installation (or
+something that looks like a different Python installation). If this sounds a
+trifle unusual, it is---that's why the user and home schemes come before. However,
+there are at least two known cases where the prefix scheme will be useful.
+
+First, consider that many Linux distributions put Python in :file:`/usr`, rather
+than the more traditional :file:`/usr/local`. This is entirely appropriate,
+since in those cases Python is part of "the system" rather than a local add-on.
+However, if you are installing Python modules from source, you probably want
+them to go in :file:`/usr/local/lib/python2.{X}` rather than
+:file:`/usr/lib/python2.{X}`. This can be done with ::
+
+ /usr/bin/python setup.py install --prefix=/usr/local
+
+Another possibility is a network filesystem where the name used to write to a
+remote directory is different from the name used to read it: for example, the
+Python interpreter accessed as :file:`/usr/local/bin/python` might search for
+modules in :file:`/usr/local/lib/python2.{X}`, but those modules would have to
+be installed to, say, :file:`/mnt/{@server}/export/lib/python2.{X}`. This could
+be done with ::
+
+ /usr/local/bin/python setup.py install --prefix=/mnt/@server/export
+
+In either case, the :option:`--prefix` option defines the installation base, and
+the :option:`--exec-prefix` option defines the platform-specific installation
+base, which is used for platform-specific files. (Currently, this just means
+non-pure module distributions, but could be expanded to C libraries, binary
+executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to
+:option:`--prefix`. Files are installed as follows:
+
+================= ==========================================================
+Type of file Installation directory
+================= ==========================================================
+Python modules :file:`{prefix}/lib/python{X.Y}/site-packages`
+extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages`
+scripts :file:`{prefix}/bin`
+data :file:`{prefix}`
+C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}`
+================= ==========================================================
+
+There is no requirement that :option:`--prefix` or :option:`--exec-prefix`
+actually point to an alternate Python installation; if the directories listed
+above do not already exist, they are created at installation time.
+
+Incidentally, the real reason the prefix scheme is important is simply that a
+standard Unix installation uses the prefix scheme, but with :option:`--prefix`
+and :option:`--exec-prefix` supplied by Python itself as ``sys.prefix`` and
+``sys.exec_prefix``. Thus, you might think you'll never use the prefix scheme,
+but every time you run ``python setup.py install`` without any other options,
+you're using it.
+
+Note that installing extensions to an alternate Python installation has no
+effect on how those extensions are built: in particular, the Python header files
+(:file:`Python.h` and friends) installed with the Python interpreter used to run
+the setup script will be used in compiling extensions. It is your
+responsibility to ensure that the interpreter used to run extensions installed
+in this way is compatible with the interpreter used to build them. The best way
+to do this is to ensure that the two interpreters are the same version of Python
+(possibly different builds, or possibly copies of the same build). (Of course,
+if your :option:`--prefix` and :option:`--exec-prefix` don't even point to an
+alternate Python installation, this is immaterial.)
+
+
+.. _inst-alt-install-prefix-windows:
+
+Alternate installation: Windows (the prefix scheme)
+---------------------------------------------------
+
+Windows has no concept of a user's home directory, and since the standard Python
+installation under Windows is simpler than under Unix, the :option:`--prefix`
+option has traditionally been used to install additional packages in separate
+locations on Windows. ::
+
+ python setup.py install --prefix="\Temp\Python"
+
+to install modules to the :file:`\\Temp\\Python` directory on the current drive.
+
+The installation base is defined by the :option:`--prefix` option; the
+:option:`--exec-prefix` option is not supported under Windows, which means that
+pure Python modules and extension modules are installed into the same location.
+Files are installed as follows:
+
+=============== ==========================================================
+Type of file Installation directory
+=============== ==========================================================
+modules :file:`{prefix}\\Lib\\site-packages`
+scripts :file:`{prefix}\\Scripts`
+data :file:`{prefix}`
+C headers :file:`{prefix}\\Include\\{distname}`
+=============== ==========================================================
+
+
+.. _inst-custom-install:
+
+Custom Installation
+===================
+
+Sometimes, the alternate installation schemes described in section
+:ref:`inst-alt-install` just don't do what you want. You might want to tweak just
+one or two directories while keeping everything under the same base directory,
+or you might want to completely redefine the installation scheme. In either
+case, you're creating a *custom installation scheme*.
+
+To create a custom installation scheme, you start with one of the alternate
+schemes and override some of the installation directories used for the various
+types of files, using these options:
+
+====================== =======================
+Type of file Override option
+====================== =======================
+Python modules ``--install-purelib``
+extension modules ``--install-platlib``
+all modules ``--install-lib``
+scripts ``--install-scripts``
+data ``--install-data``
+C headers ``--install-headers``
+====================== =======================
+
+These override options can be relative, absolute,
+or explicitly defined in terms of one of the installation base directories.
+(There are two installation base directories, and they are normally the same---
+they only differ when you use the Unix "prefix scheme" and supply different
+``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will
+override values computed or given for ``--install-purelib`` and
+``--install-platlib``, and is recommended for schemes that don't make a
+difference between Python and extension modules.)
+
+For example, say you're installing a module distribution to your home directory
+under Unix---but you want scripts to go in :file:`~/scripts` rather than
+:file:`~/bin`. As you might expect, you can override this directory with the
+:option:`--install-scripts` option; in this case, it makes most sense to supply
+a relative path, which will be interpreted relative to the installation base
+directory (your home directory, in this case)::
+
+ python setup.py install --home=~ --install-scripts=scripts
+
+Another Unix example: suppose your Python installation was built and installed
+with a prefix of :file:`/usr/local/python`, so under a standard installation
+scripts will wind up in :file:`/usr/local/python/bin`. If you want them in
+:file:`/usr/local/bin` instead, you would supply this absolute directory for the
+:option:`--install-scripts` option::
+
+ python setup.py install --install-scripts=/usr/local/bin
+
+(This performs an installation using the "prefix scheme," where the prefix is
+whatever your Python interpreter was installed with--- :file:`/usr/local/python`
+in this case.)
+
+If you maintain Python on Windows, you might want third-party modules to live in
+a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}`
+itself. This is almost as easy as customizing the script installation directory
+---you just have to remember that there are two types of modules to worry about,
+Python and extension modules, which can conveniently be both controlled by one
+option::
+
+ python setup.py install --install-lib=Site
+
+The specified installation directory is relative to :file:`{prefix}`. Of
+course, you also have to ensure that this directory is in Python's module
+search path, such as by putting a :file:`.pth` file in a site directory (see
+:mod:`site`). See section :ref:`inst-search-path` to find out how to modify
+Python's search path.
+
+If you want to define an entire installation scheme, you just have to supply all
+of the installation directory options. The recommended way to do this is to
+supply relative paths; for example, if you want to maintain all Python
+module-related files under :file:`python` in your home directory, and you want a
+separate directory for each platform that you use your home directory from, you
+might define the following installation scheme::
+
+ python setup.py install --home=~ \
+ --install-purelib=python/lib \
+ --install-platlib=python/lib.$PLAT \
+ --install-scripts=python/scripts
+ --install-data=python/data
+
+or, equivalently, ::
+
+ python setup.py install --home=~/python \
+ --install-purelib=lib \
+ --install-platlib='lib.$PLAT' \
+ --install-scripts=scripts
+ --install-data=data
+
+``$PLAT`` is not (necessarily) an environment variable---it will be expanded by
+the Distutils as it parses your command line options, just as it does when
+parsing your configuration file(s).
+
+Obviously, specifying the entire installation scheme every time you install a
+new module distribution would be very tedious. Thus, you can put these options
+into your Distutils config file (see section :ref:`inst-config-files`)::
+
+ [install]
+ install-base=$HOME
+ install-purelib=python/lib
+ install-platlib=python/lib.$PLAT
+ install-scripts=python/scripts
+ install-data=python/data
+
+or, equivalently, ::
+
+ [install]
+ install-base=$HOME/python
+ install-purelib=lib
+ install-platlib=lib.$PLAT
+ install-scripts=scripts
+ install-data=data
+
+Note that these two are *not* equivalent if you supply a different installation
+base directory when you run the setup script. For example, ::
+
+ python setup.py install --install-base=/tmp
+
+would install pure modules to :file:`/tmp/python/lib` in the first case, and
+to :file:`/tmp/lib` in the second case. (For the second case, you probably
+want to supply an installation base of :file:`/tmp/python`.)
+
+You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample
+configuration file input. These are Distutils configuration variables, which
+bear a strong resemblance to environment variables. In fact, you can use
+environment variables in config files on platforms that have such a notion but
+the Distutils additionally define a few extra variables that may not be in your
+environment, such as ``$PLAT``. (And of course, on systems that don't have
+environment variables, such as Mac OS 9, the configuration variables supplied by
+the Distutils are the only ones you can use.) See section :ref:`inst-config-files`
+for details.
+
+.. XXX need some Windows examples---when would custom installation schemes be
+ needed on those platforms?
+
+
+.. XXX Move this to Doc/using
+
+.. _inst-search-path:
+
+Modifying Python's Search Path
+------------------------------
+
+When the Python interpreter executes an :keyword:`import` statement, it searches
+for both Python code and extension modules along a search path. A default value
+for the path is configured into the Python binary when the interpreter is built.
+You can determine the path by importing the :mod:`sys` module and printing the
+value of ``sys.path``. ::
+
+ $ python
+ Python 2.2 (#11, Oct 3 2002, 13:31:27)
+ [GCC 2.96 20000731 (Red Hat Linux 7.3 2.96-112)] on linux2
+ Type "help", "copyright", "credits" or "license" for more information.
+ >>> import sys
+ >>> sys.path
+ ['', '/usr/local/lib/python2.3', '/usr/local/lib/python2.3/plat-linux2',
+ '/usr/local/lib/python2.3/lib-tk', '/usr/local/lib/python2.3/lib-dynload',
+ '/usr/local/lib/python2.3/site-packages']
+ >>>
+
+The null string in ``sys.path`` represents the current working directory.
+
+The expected convention for locally installed packages is to put them in the
+:file:`{...}/site-packages/` directory, but you may want to install Python
+modules into some arbitrary directory. For example, your site may have a
+convention of keeping all software related to the web server under :file:`/www`.
+Add-on Python modules might then belong in :file:`/www/python`, and in order to
+import them, this directory must be added to ``sys.path``. There are several
+different ways to add the directory.
+
+The most convenient way is to add a path configuration file to a directory
+that's already on Python's path, usually to the :file:`.../site-packages/`
+directory. Path configuration files have an extension of :file:`.pth`, and each
+line must contain a single path that will be appended to ``sys.path``. (Because
+the new paths are appended to ``sys.path``, modules in the added directories
+will not override standard modules. This means you can't use this mechanism for
+installing fixed versions of standard modules.)
+
+Paths can be absolute or relative, in which case they're relative to the
+directory containing the :file:`.pth` file. See the documentation of
+the :mod:`site` module for more information.
+
+A slightly less convenient way is to edit the :file:`site.py` file in Python's
+standard library, and modify ``sys.path``. :file:`site.py` is automatically
+imported when the Python interpreter is executed, unless the :option:`-S` switch
+is supplied to suppress this behaviour. So you could simply edit
+:file:`site.py` and add two lines to it::
+
+ import sys
+ sys.path.append('/www/python/')
+
+However, if you reinstall the same major version of Python (perhaps when
+upgrading from 2.2 to 2.2.2, for example) :file:`site.py` will be overwritten by
+the stock version. You'd have to remember that it was modified and save a copy
+before doing the installation.
+
+There are two environment variables that can modify ``sys.path``.
+:envvar:`PYTHONHOME` sets an alternate value for the prefix of the Python
+installation. For example, if :envvar:`PYTHONHOME` is set to ``/www/python``,
+the search path will be set to ``['', '/www/python/lib/pythonX.Y/',
+'/www/python/lib/pythonX.Y/plat-linux2', ...]``.
+
+The :envvar:`PYTHONPATH` variable can be set to a list of paths that will be
+added to the beginning of ``sys.path``. For example, if :envvar:`PYTHONPATH` is
+set to ``/www/python:/opt/py``, the search path will begin with
+``['/www/python', '/opt/py']``. (Note that directories must exist in order to
+be added to ``sys.path``; the :mod:`site` module removes paths that don't
+exist.)
+
+Finally, ``sys.path`` is just a regular Python list, so any Python application
+can modify it by adding or removing entries.
+
+
+.. _inst-config-files:
+
+Distutils Configuration Files
+=============================
+
+As mentioned above, you can use Distutils configuration files to record personal
+or site preferences for any Distutils options. That is, any option to any
+command can be stored in one of two or three (depending on your platform)
+configuration files, which will be consulted before the command-line is parsed.
+This means that configuration files will override default values, and the
+command-line will in turn override configuration files. Furthermore, if
+multiple configuration files apply, values from "earlier" files are overridden
+by "later" files.
+
+
+.. _inst-config-filenames:
+
+Location and names of config files
+----------------------------------
+
+The names and locations of the configuration files vary slightly across
+platforms. On Unix and Mac OS X, the three configuration files (in the order
+they are processed) are:
+
++--------------+----------------------------------------------------------+-------+
+| Type of file | Location and filename | Notes |
++==============+==========================================================+=======+
+| system | :file:`{prefix}/lib/python{ver}/distutils/distutils.cfg` | \(1) |
++--------------+----------------------------------------------------------+-------+
+| personal | :file:`$HOME/.pydistutils.cfg` | \(2) |
++--------------+----------------------------------------------------------+-------+
+| local | :file:`setup.cfg` | \(3) |
++--------------+----------------------------------------------------------+-------+
+
+And on Windows, the configuration files are:
+
++--------------+-------------------------------------------------+-------+
+| Type of file | Location and filename | Notes |
++==============+=================================================+=======+
+| system | :file:`{prefix}\\Lib\\distutils\\distutils.cfg` | \(4) |
++--------------+-------------------------------------------------+-------+
+| personal | :file:`%HOME%\\pydistutils.cfg` | \(5) |
++--------------+-------------------------------------------------+-------+
+| local | :file:`setup.cfg` | \(3) |
++--------------+-------------------------------------------------+-------+
+
+On all platforms, the "personal" file can be temporarily disabled by
+passing the `--no-user-cfg` option.
+
+Notes:
+
+(1)
+ Strictly speaking, the system-wide configuration file lives in the directory
+ where the Distutils are installed; under Python 1.6 and later on Unix, this is
+ as shown. For Python 1.5.2, the Distutils will normally be installed to
+ :file:`{prefix}/lib/python1.5/site-packages/distutils`, so the system
+ configuration file should be put there under Python 1.5.2.
+
+(2)
+ On Unix, if the :envvar:`HOME` environment variable is not defined, the user's
+ home directory will be determined with the :func:`getpwuid` function from the
+ standard :mod:`pwd` module. This is done by the :func:`os.path.expanduser`
+ function used by Distutils.
+
+(3)
+ I.e., in the current directory (usually the location of the setup script).
+
+(4)
+ (See also note (1).) Under Python 1.6 and later, Python's default "installation
+ prefix" is :file:`C:\\Python`, so the system configuration file is normally
+ :file:`C:\\Python\\Lib\\distutils\\distutils.cfg`. Under Python 1.5.2, the
+ default prefix was :file:`C:\\Program Files\\Python`, and the Distutils were not
+ part of the standard library---so the system configuration file would be
+ :file:`C:\\Program Files\\Python\\distutils\\distutils.cfg` in a standard Python
+ 1.5.2 installation under Windows.
+
+(5)
+ On Windows, if the :envvar:`HOME` environment variable is not defined,
+ :envvar:`USERPROFILE` then :envvar:`HOMEDRIVE` and :envvar:`HOMEPATH` will
+ be tried. This is done by the :func:`os.path.expanduser` function used
+ by Distutils.
+
+
+.. _inst-config-syntax:
+
+Syntax of config files
+----------------------
+
+The Distutils configuration files all have the same syntax. The config files
+are grouped into sections. There is one section for each Distutils command,
+plus a ``global`` section for global options that affect every command. Each
+section consists of one option per line, specified as ``option=value``.
+
+For example, the following is a complete config file that just forces all
+commands to run quietly by default::
+
+ [global]
+ verbose=0
+
+If this is installed as the system config file, it will affect all processing of
+any Python module distribution by any user on the current system. If it is
+installed as your personal config file (on systems that support them), it will
+affect only module distributions processed by you. And if it is used as the
+:file:`setup.cfg` for a particular module distribution, it affects only that
+distribution.
+
+You could override the default "build base" directory and make the
+:command:`build\*` commands always forcibly rebuild all files with the
+following::
+
+ [build]
+ build-base=blib
+ force=1
+
+which corresponds to the command-line arguments ::
+
+ python setup.py build --build-base=blib --force
+
+except that including the :command:`build` command on the command-line means
+that command will be run. Including a particular command in config files has no
+such implication; it only means that if the command is run, the options in the
+config file will apply. (Or if other commands that derive values from it are
+run, they will use the values in the config file.)
+
+You can find out the complete list of options for any command using the
+:option:`--help` option, e.g.::
+
+ python setup.py build --help
+
+and you can find out the complete list of global options by using
+:option:`--help` without a command::
+
+ python setup.py --help
+
+See also the "Reference" section of the "Distributing Python Modules" manual.
+
+
+.. _inst-building-ext:
+
+Building Extensions: Tips and Tricks
+====================================
+
+Whenever possible, the Distutils try to use the configuration information made
+available by the Python interpreter used to run the :file:`setup.py` script.
+For example, the same compiler and linker flags used to compile Python will also
+be used for compiling extensions. Usually this will work well, but in
+complicated situations this might be inappropriate. This section discusses how
+to override the usual Distutils behaviour.
+
+
+.. _inst-tweak-flags:
+
+Tweaking compiler/linker flags
+------------------------------
+
+Compiling a Python extension written in C or C++ will sometimes require
+specifying custom flags for the compiler and linker in order to use a particular
+library or produce a special kind of object code. This is especially true if the
+extension hasn't been tested on your platform, or if you're trying to
+cross-compile Python.
+
+In the most general case, the extension author might have foreseen that
+compiling the extensions would be complicated, and provided a :file:`Setup` file
+for you to edit. This will likely only be done if the module distribution
+contains many separate extension modules, or if they often require elaborate
+sets of compiler flags in order to work.
+
+A :file:`Setup` file, if present, is parsed in order to get a list of extensions
+to build. Each line in a :file:`Setup` describes a single module. Lines have
+the following structure::
+
+ module ... [sourcefile ...] [cpparg ...] [library ...]
+
+
+Let's examine each of the fields in turn.
+
+* *module* is the name of the extension module to be built, and should be a
+ valid Python identifier. You can't just change this in order to rename a module
+ (edits to the source code would also be needed), so this should be left alone.
+
+* *sourcefile* is anything that's likely to be a source code file, at least
+ judging by the filename. Filenames ending in :file:`.c` are assumed to be
+ written in C, filenames ending in :file:`.C`, :file:`.cc`, and :file:`.c++` are
+ assumed to be C++, and filenames ending in :file:`.m` or :file:`.mm` are assumed
+ to be in Objective C.
+
+* *cpparg* is an argument for the C preprocessor, and is anything starting with
+ :option:`-I`, :option:`-D`, :option:`-U` or :option:`-C`.
+
+* *library* is anything ending in :file:`.a` or beginning with :option:`-l` or
+ :option:`-L`.
+
+If a particular platform requires a special library on your platform, you can
+add it by editing the :file:`Setup` file and running ``python setup.py build``.
+For example, if the module defined by the line ::
+
+ foo foomodule.c
+
+must be linked with the math library :file:`libm.a` on your platform, simply add
+:option:`-lm` to the line::
+
+ foo foomodule.c -lm
+
+Arbitrary switches intended for the compiler or the linker can be supplied with
+the :option:`-Xcompiler` *arg* and :option:`-Xlinker` *arg* options::
+
+ foo foomodule.c -Xcompiler -o32 -Xlinker -shared -lm
+
+The next option after :option:`-Xcompiler` and :option:`-Xlinker` will be
+appended to the proper command line, so in the above example the compiler will
+be passed the :option:`-o32` option, and the linker will be passed
+:option:`-shared`. If a compiler option requires an argument, you'll have to
+supply multiple :option:`-Xcompiler` options; for example, to pass ``-x c++``
+the :file:`Setup` file would have to contain ``-Xcompiler -x -Xcompiler c++``.
+
+Compiler flags can also be supplied through setting the :envvar:`CFLAGS`
+environment variable. If set, the contents of :envvar:`CFLAGS` will be added to
+the compiler flags specified in the :file:`Setup` file.
+
+
+.. _inst-non-ms-compilers:
+
+Using non-Microsoft compilers on Windows
+----------------------------------------
+
+.. sectionauthor:: Rene Liebscher <R.Liebscher@gmx.de>
+
+
+
+Borland/CodeGear C++
+^^^^^^^^^^^^^^^^^^^^
+
+This subsection describes the necessary steps to use Distutils with the Borland
+C++ compiler version 5.5. First you have to know that Borland's object file
+format (OMF) is different from the format used by the Python version you can
+download from the Python or ActiveState Web site. (Python is built with
+Microsoft Visual C++, which uses COFF as the object file format.) For this
+reason you have to convert Python's library :file:`python25.lib` into the
+Borland format. You can do this as follows:
+
+.. Should we mention that users have to create cfg-files for the compiler?
+.. see also http://community.borland.com/article/0,1410,21205,00.html
+
+::
+
+ coff2omf python25.lib python25_bcpp.lib
+
+The :file:`coff2omf` program comes with the Borland compiler. The file
+:file:`python25.lib` is in the :file:`Libs` directory of your Python
+installation. If your extension uses other libraries (zlib, ...) you have to
+convert them too.
+
+The converted files have to reside in the same directories as the normal
+libraries.
+
+How does Distutils manage to use these libraries with their changed names? If
+the extension needs a library (eg. :file:`foo`) Distutils checks first if it
+finds a library with suffix :file:`_bcpp` (eg. :file:`foo_bcpp.lib`) and then
+uses this library. In the case it doesn't find such a special library it uses
+the default name (:file:`foo.lib`.) [#]_
+
+To let Distutils compile your extension with Borland C++ you now have to type::
+
+ python setup.py build --compiler=bcpp
+
+If you want to use the Borland C++ compiler as the default, you could specify
+this in your personal or system-wide configuration file for Distutils (see
+section :ref:`inst-config-files`.)
+
+
+.. seealso::
+
+ `C++Builder Compiler <http://www.codegear.com/downloads/free/cppbuilder>`_
+ Information about the free C++ compiler from Borland, including links to the
+ download pages.
+
+ `Creating Python Extensions Using Borland's Free Compiler <http://www.cyberus.ca/~g_will/pyExtenDL.shtml>`_
+ Document describing how to use Borland's free command-line C++ compiler to build
+ Python.
+
+
+GNU C / Cygwin / MinGW
+^^^^^^^^^^^^^^^^^^^^^^
+
+This section describes the necessary steps to use Distutils with the GNU C/C++
+compilers in their Cygwin and MinGW distributions. [#]_ For a Python interpreter
+that was built with Cygwin, everything should work without any of these
+following steps.
+
+Not all extensions can be built with MinGW or Cygwin, but many can. Extensions
+most likely to not work are those that use C++ or depend on Microsoft Visual C
+extensions.
+
+To let Distutils compile your extension with Cygwin you have to type::
+
+ python setup.py build --compiler=cygwin
+
+and for Cygwin in no-cygwin mode [#]_ or for MinGW type::
+
+ python setup.py build --compiler=mingw32
+
+If you want to use any of these options/compilers as default, you should
+consider writing it in your personal or system-wide configuration file for
+Distutils (see section :ref:`inst-config-files`.)
+
+Older Versions of Python and MinGW
+""""""""""""""""""""""""""""""""""
+The following instructions only apply if you're using a version of Python
+inferior to 2.4.1 with a MinGW inferior to 3.0.0 (with
+binutils-2.13.90-20030111-1).
+
+These compilers require some special libraries. This task is more complex than
+for Borland's C++, because there is no program to convert the library. First
+you have to create a list of symbols which the Python DLL exports. (You can find
+a good program for this task at
+http://www.emmestech.com/software/pexports-0.43/download_pexports.html).
+
+.. I don't understand what the next line means. --amk
+.. (inclusive the references on data structures.)
+
+::
+
+ pexports python25.dll >python25.def
+
+The location of an installed :file:`python25.dll` will depend on the
+installation options and the version and language of Windows. In a "just for
+me" installation, it will appear in the root of the installation directory. In
+a shared installation, it will be located in the system directory.
+
+Then you can create from these information an import library for gcc. ::
+
+ /cygwin/bin/dlltool --dllname python25.dll --def python25.def --output-lib libpython25.a
+
+The resulting library has to be placed in the same directory as
+:file:`python25.lib`. (Should be the :file:`libs` directory under your Python
+installation directory.)
+
+If your extension uses other libraries (zlib,...) you might have to convert
+them too. The converted files have to reside in the same directories as the
+normal libraries do.
+
+
+.. seealso::
+
+ `Building Python modules on MS Windows platform with MinGW <http://www.zope.org/Members/als/tips/win32_mingw_modules>`_
+ Information about building the required libraries for the MinGW environment.
+
+
+.. rubric:: Footnotes
+
+.. [#] This also means you could replace all existing COFF-libraries with OMF-libraries
+ of the same name.
+
+.. [#] Check http://sources.redhat.com/cygwin/ and http://www.mingw.org/ for more
+ information
+
+.. [#] Then you have no POSIX emulation available, but you also don't need
+ :file:`cygwin1.dll`.
diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst
index c4ced1a..3261580 100644
--- a/Doc/extending/extending.rst
+++ b/Doc/extending/extending.rst
@@ -321,7 +321,7 @@ parameters to be passed in as a tuple acceptable for parsing via
The :const:`METH_KEYWORDS` bit may be set in the third field if keyword
arguments should be passed to the function. In this case, the C function should
-accept a third ``PyObject \*`` parameter which will be a dictionary of keywords.
+accept a third ``PyObject *`` parameter which will be a dictionary of keywords.
Use :c:func:`PyArg_ParseTupleAndKeywords` to parse the arguments to such a
function.
@@ -384,9 +384,6 @@ optionally followed by an import of the module::
imports it. */
PyImport_ImportModule("spam");
-An example may be found in the file :file:`Demo/embed/demo.c` in the Python
-source distribution.
-
.. note::
Removing entries from ``sys.modules`` or importing compiled modules into
diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst
index 2ba01bc..76c55fc 100644
--- a/Doc/extending/newtypes.rst
+++ b/Doc/extending/newtypes.rst
@@ -26,11 +26,12 @@ The Basics
==========
The Python runtime sees all Python objects as variables of type
-:c:type:`PyObject\*`. A :c:type:`PyObject` is not a very magnificent object - it
-just contains the refcount and a pointer to the object's "type object". This is
-where the action is; the type object determines which (C) functions get called
-when, for instance, an attribute gets looked up on an object or it is multiplied
-by another object. These C functions are called "type methods".
+:c:type:`PyObject\*`, which serves as a "base type" for all Python objects.
+:c:type:`PyObject` itself only contains the refcount and a pointer to the
+object's "type object". This is where the action is; the type object determines
+which (C) functions get called when, for instance, an attribute gets looked
+up on an object or it is multiplied by another object. These C functions
+are called "type methods".
So, if you want to define a new object type, you need to create a new type
object.
@@ -50,15 +51,15 @@ The first bit that will be new is::
PyObject_HEAD
} noddy_NoddyObject;
-This is what a Noddy object will contain---in this case, nothing more than every
-Python object contains, namely a refcount and a pointer to a type object. These
-are the fields the ``PyObject_HEAD`` macro brings in. The reason for the macro
-is to standardize the layout and to enable special debugging fields in debug
-builds. Note that there is no semicolon after the ``PyObject_HEAD`` macro; one
-is included in the macro definition. Be wary of adding one by accident; it's
-easy to do from habit, and your compiler might not complain, but someone else's
-probably will! (On Windows, MSVC is known to call this an error and refuse to
-compile the code.)
+This is what a Noddy object will contain---in this case, nothing more than what
+every Python object contains---a refcount and a pointer to a type object.
+These are the fields the ``PyObject_HEAD`` macro brings in. The reason for the
+macro is to standardize the layout and to enable special debugging fields in
+debug builds. Note that there is no semicolon after the ``PyObject_HEAD``
+macro; one is included in the macro definition. Be wary of adding one by
+accident; it's easy to do from habit, and your compiler might not complain,
+but someone else's probably will! (On Windows, MSVC is known to call this an
+error and refuse to compile the code.)
For contrast, let's take a look at the corresponding definition for standard
Python floats::
@@ -224,7 +225,7 @@ doesn't do anything. It can't even be subclassed.
Adding data and methods to the Basic example
--------------------------------------------
-Let's expend the basic example to add some data and methods. Let's also make
+Let's extend the basic example to add some data and methods. Let's also make
the type usable as a base class. We'll create a new module, :mod:`noddy2` that
adds these capabilities:
@@ -288,18 +289,16 @@ strings, so we provide a new method::
self = (Noddy *)type->tp_alloc(type, 0);
if (self != NULL) {
self->first = PyString_FromString("");
- if (self->first == NULL)
- {
+ if (self->first == NULL) {
Py_DECREF(self);
return NULL;
- }
+ }
self->last = PyString_FromString("");
- if (self->last == NULL)
- {
+ if (self->last == NULL) {
Py_DECREF(self);
return NULL;
- }
+ }
self->number = 0;
}
@@ -327,8 +326,8 @@ any arguments passed when the type was called, and that returns the new object
created. New methods always accept positional and keyword arguments, but they
often ignore the arguments, leaving the argument handling to initializer
methods. Note that if the type supports subclassing, the type passed may not be
-the type being defined. The new method calls the tp_alloc slot to allocate
-memory. We don't fill the :attr:`tp_alloc` slot ourselves. Rather
+the type being defined. The new method calls the :attr:`tp_alloc` slot to
+allocate memory. We don't fill the :attr:`tp_alloc` slot ourselves. Rather
:c:func:`PyType_Ready` fills it for us by inheriting it from our base class,
which is :class:`object` by default. Most types use the default allocation.
@@ -445,15 +444,6 @@ concatenation of the first and last names. ::
static PyObject *
Noddy_name(Noddy* self)
{
- static PyObject *format = NULL;
- PyObject *args, *result;
-
- if (format == NULL) {
- format = PyString_FromString("%s %s");
- if (format == NULL)
- return NULL;
- }
-
if (self->first == NULL) {
PyErr_SetString(PyExc_AttributeError, "first");
return NULL;
@@ -464,20 +454,13 @@ concatenation of the first and last names. ::
return NULL;
}
- args = Py_BuildValue("OO", self->first, self->last);
- if (args == NULL)
- return NULL;
-
- result = PyString_Format(format, args);
- Py_DECREF(args);
-
- return result;
+ return PyUnicode_FromFormat("%S %S", self->first, self->last);
}
The method is implemented as a C function that takes a :class:`Noddy` (or
:class:`Noddy` subclass) instance as the first argument. Methods always take an
instance as the first argument. Methods often take positional and keyword
-arguments as well, but in this cased we don't take any and don't need to accept
+arguments as well, but in this case we don't take any and don't need to accept
a positional argument tuple or keyword argument dictionary. This method is
equivalent to the Python method::
@@ -1124,9 +1107,6 @@ needed for methods inherited from a base type. One additional entry is needed
at the end; it is a sentinel that marks the end of the array. The
:attr:`ml_name` field of the sentinel must be *NULL*.
-XXX Need to refer to some unified discussion of the structure fields, shared
-with the next section.
-
The second table is used to define attributes which map directly to data stored
in the instance. A variety of primitive C types are supported, and access may
be read-only or read-write. The structures in the table are defined as::
@@ -1146,8 +1126,6 @@ type which will be able to extract a value from the instance structure. The
convert Python values to and from C values. The :attr:`flags` field is used to
store flags which control how the attribute can be accessed.
-XXX Need to move some of this to a shared section!
-
The following flag constants are defined in :file:`structmember.h`; they may be
combined using bitwise-OR.
@@ -1372,7 +1350,7 @@ Here is a desultory example of the implementation of the call function. ::
return result;
}
-XXX some fields need to be added here... ::
+::
/* Iterators */
getiterfunc tp_iter;
diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst
index e45aaaa..5441fd4 100644
--- a/Doc/faq/design.rst
+++ b/Doc/faq/design.rst
@@ -526,14 +526,16 @@ far) under most circumstances, and the implementation is simpler.
Dictionaries work by computing a hash code for each key stored in the dictionary
using the :func:`hash` built-in function. The hash code varies widely depending
-on the key; for example, "Python" hashes to -539294296 while "python", a string
-that differs by a single bit, hashes to 1142331976. The hash code is then used
-to calculate a location in an internal array where the value will be stored.
-Assuming that you're storing keys that all have different hash values, this
-means that dictionaries take constant time -- O(1), in computer science notation
--- to retrieve a key. It also means that no sorted order of the keys is
-maintained, and traversing the array as the ``.keys()`` and ``.items()`` do will
-output the dictionary's content in some arbitrary jumbled order.
+on the key and a per-process seed; for example, "Python" could hash to
+-539294296 while "python", a string that differs by a single bit, could hash
+to 1142331976. The hash code is then used to calculate a location in an
+internal array where the value will be stored. Assuming that you're storing
+keys that all have different hash values, this means that dictionaries take
+constant time -- O(1), in computer science notation -- to retrieve a key. It
+also means that no sorted order of the keys is maintained, and traversing the
+array as the ``.keys()`` and ``.items()`` do will output the dictionary's
+content in some arbitrary jumbled order that can change with every invocation of
+a program.
Why must dictionary keys be immutable?
@@ -645,7 +647,7 @@ construction of large programs.
Python 2.6 adds an :mod:`abc` module that lets you define Abstract Base Classes
(ABCs). You can then use :func:`isinstance` and :func:`issubclass` to check
whether an instance or a class implements a particular ABC. The
-:mod:`collections` module defines a set of useful ABCs such as
+:mod:`collections.abc` module defines a set of useful ABCs such as
:class:`Iterable`, :class:`Container`, and :class:`MutableMapping`.
For Python, many of the advantages of interface specifications can be obtained
diff --git a/Doc/faq/extending.rst b/Doc/faq/extending.rst
index 7c684a0..fa245c7 100644
--- a/Doc/faq/extending.rst
+++ b/Doc/faq/extending.rst
@@ -445,34 +445,3 @@ In Python 2.2, you can inherit from built-in classes such as :class:`int`,
The Boost Python Library (BPL, http://www.boost.org/libs/python/doc/index.html)
provides a way of doing this from C++ (i.e. you can inherit from an extension
class written in C++ using the BPL).
-
-
-When importing module X, why do I get "undefined symbol: PyUnicodeUCS2*"?
--------------------------------------------------------------------------
-
-You are using a version of Python that uses a 4-byte representation for Unicode
-characters, but some C extension module you are importing was compiled using a
-Python that uses a 2-byte representation for Unicode characters (the default).
-
-If instead the name of the undefined symbol starts with ``PyUnicodeUCS4``, the
-problem is the reverse: Python was built using 2-byte Unicode characters, and
-the extension module was compiled using a Python with 4-byte Unicode characters.
-
-This can easily occur when using pre-built extension packages. RedHat Linux
-7.x, in particular, provided a "python2" binary that is compiled with 4-byte
-Unicode. This only causes the link failure if the extension uses any of the
-``PyUnicode_*()`` functions. It is also a problem if an extension uses any of
-the Unicode-related format specifiers for :c:func:`Py_BuildValue` (or similar) or
-parameter specifications for :c:func:`PyArg_ParseTuple`.
-
-You can check the size of the Unicode character a Python interpreter is using by
-checking the value of sys.maxunicode:
-
- >>> import sys
- >>> if sys.maxunicode > 65535:
- ... print('UCS4 build')
- ... else:
- ... print('UCS2 build')
-
-The only way to solve this problem is to use extension modules compiled with a
-Python binary built using the same size for Unicode characters.
diff --git a/Doc/glossary.rst b/Doc/glossary.rst
index 2f1277c..062918a 100644
--- a/Doc/glossary.rst
+++ b/Doc/glossary.rst
@@ -34,7 +34,7 @@ Glossary
subclasses, which are classes that don't inherit from a class but are
still recognized by :func:`isinstance` and :func:`issubclass`; see the
:mod:`abc` module documentation. Python comes with many built-in ABCs for
- data structures (in the :mod:`collections` module), numbers (in the
+ data structures (in the :mod:`collections.abc` module), numbers (in the
:mod:`numbers` module), streams (in the :mod:`io` module), import finders
and loaders (in the :mod:`importlib.abc` module). You can create your own
ABCs with the :mod:`abc` module.
@@ -434,8 +434,8 @@ Glossary
mapping
A container object that supports arbitrary key lookups and implements the
- methods specified in the :class:`~collections.Mapping` or
- :class:`~collections.MutableMapping`
+ methods specified in the :class:`~collections.abc.Mapping` or
+ :class:`~collections.abc.MutableMapping`
:ref:`abstract base classes <collections-abstract-base-classes>`. Examples
include :class:`dict`, :class:`collections.defaultdict`,
:class:`collections.OrderedDict` and :class:`collections.Counter`.
@@ -544,6 +544,24 @@ Glossary
for piece in food:
print(piece)
+ qualified name
+ A dotted name showing the "path" from a module's global scope to a
+ class, function or method defined in that module, as defined in
+ :pep:`3155`. For top-level functions and classes, the qualified name
+ is the same as the object's name::
+
+ >>> class C:
+ ... class D:
+ ... def meth(self):
+ ... pass
+ ...
+ >>> C.__qualname__
+ 'C'
+ >>> C.D.__qualname__
+ 'C.D'
+ >>> C.D.meth.__qualname__
+ 'C.D.meth'
+
reference count
The number of references to an object. When the reference count of an
object drops to zero, it is deallocated. Reference counting is
@@ -586,6 +604,14 @@ Glossary
an :term:`expression` or a one of several constructs with a keyword, such
as :keyword:`if`, :keyword:`while` or :keyword:`for`.
+ struct sequence
+ A tuple with named elements. Struct sequences expose an interface similar
+ to :term:`named tuple` in that elements can either be accessed either by
+ index or as an attribute. However, they do not have any of the named tuple
+ methods like :meth:`~collections.somenamedtuple._make` or
+ :meth:`~collections.somenamedtuple._asdict`. Examples of struct sequences
+ include :data:`sys.float_info` and the return value of :func:`os.stat`.
+
triple-quoted string
A string which is bound by three instances of either a quotation mark
(") or an apostrophe ('). While they don't provide any functionality
diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst
index 1616f67..0b513f9 100644
--- a/Doc/howto/descriptor.rst
+++ b/Doc/howto/descriptor.rst
@@ -36,9 +36,7 @@ continuing through the base classes of ``type(a)`` excluding metaclasses. If the
looked-up value is an object defining one of the descriptor methods, then Python
may override the default behavior and invoke the descriptor method instead.
Where this occurs in the precedence chain depends on which descriptor methods
-were defined. Note that descriptors are only invoked for new style objects or
-classes (a class is new style if it inherits from :class:`object` or
-:class:`type`).
+were defined.
Descriptors are a powerful, general purpose protocol. They are the mechanism
behind properties, methods, static methods, class methods, and :func:`super()`.
@@ -89,8 +87,6 @@ of ``obj``. If ``d`` defines the method :meth:`__get__`, then ``d.__get__(obj)`
is invoked according to the precedence rules listed below.
The details of invocation depend on whether ``obj`` is an object or a class.
-Either way, descriptors only work for new style objects and classes. A class is
-new style if it is a subclass of :class:`object`.
For objects, the machinery is in :meth:`object.__getattribute__` which
transforms ``b.x`` into ``type(b).__dict__['x'].__get__(b, type(b))``. The
@@ -115,7 +111,6 @@ The important points to remember are:
* descriptors are invoked by the :meth:`__getattribute__` method
* overriding :meth:`__getattribute__` prevents automatic descriptor calls
-* :meth:`__getattribute__` is only available with new style classes and objects
* :meth:`object.__getattribute__` and :meth:`type.__getattribute__` make
different calls to :meth:`__get__`.
* data descriptors always override instance dictionaries.
@@ -128,10 +123,7 @@ and then returns ``A.__dict__['m'].__get__(obj, A)``. If not a descriptor,
``m`` is returned unchanged. If not in the dictionary, ``m`` reverts to a
search using :meth:`object.__getattribute__`.
-Note, in Python 2.2, ``super(B, obj).m()`` would only invoke :meth:`__get__` if
-``m`` was a data descriptor. In Python 2.3, non-data descriptors also get
-invoked unless an old-style class is involved. The implementation details are
-in :c:func:`super_getattro()` in
+The implementation details are in :c:func:`super_getattro()` in
`Objects/typeobject.c <http://svn.python.org/view/python/trunk/Objects/typeobject.c?view=markup>`_
and a pure Python equivalent can be found in `Guido's Tutorial`_.
diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst
index aa4a7df..1f5bd37 100644
--- a/Doc/howto/logging-cookbook.rst
+++ b/Doc/howto/logging-cookbook.rst
@@ -1315,3 +1315,31 @@ This dictionary is passed to :func:`~logging.config.dictConfig` to put the confi
For more information about this configuration, you can see the `relevant
section <https://docs.djangoproject.com/en/1.3/topics/logging/#configuring-logging>`_
of the Django documentation.
+
+.. _cookbook-rotator-namer:
+
+Using a rotator and namer to customise log rotation processing
+--------------------------------------------------------------
+
+An example of how you can define a namer and rotator is given in the following
+snippet, which shows zlib-based compression of the log file::
+
+ def namer(name):
+ return name + ".gz"
+
+ def rotator(source, dest):
+ with open(source, "rb") as sf:
+ data = sf.read()
+ compressed = zlib.compress(data, 9)
+ with open(dest, "wb") as df:
+ df.write(compressed)
+ os.remove(source)
+
+ rh = logging.handlers.RotatingFileHandler(...)
+ rh.rotator = rotator
+ rh.namer = namer
+
+These are not "true" .gz files, as they are bare compressed data, with no
+"container" such as you’d find in an actual gzip file. This snippet is just
+for illustration purposes.
+
diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst
index a4ae9c0..d240008 100644
--- a/Doc/howto/sockets.rst
+++ b/Doc/howto/sockets.rst
@@ -25,8 +25,8 @@ It's not really a tutorial - you'll still have work to do in getting things
working. It doesn't cover the fine points (and there are a lot of them), but I
hope it will give you enough background to begin using them decently.
-I'm only going to talk about INET sockets, but they account for at least 99% of
-the sockets in use. And I'll only talk about STREAM sockets - unless you really
+I'm only going to talk about INET (i.e. IPv4) sockets, but they account for at least 99% of
+the sockets in use. And I'll only talk about STREAM (i.e. TCP) sockets - unless you really
know what you're doing (in which case this HOWTO isn't for you!), you'll get
better behavior and performance from a STREAM socket than anything else. I will
try to clear up the mystery of what a socket is, as well as some hints on how to
@@ -208,10 +208,10 @@ length message::
totalsent = totalsent + sent
def myreceive(self):
- msg = ''
+ msg = b''
while len(msg) < MSGLEN:
chunk = self.sock.recv(MSGLEN-len(msg))
- if chunk == '':
+ if chunk == b'':
raise RuntimeError("socket connection broken")
msg = msg + chunk
return msg
@@ -371,12 +371,6 @@ have created a new socket to ``connect`` to someone else, put it in the
potential_writers list. If it shows up in the writable list, you have a decent
chance that it has connected.
-One very nasty problem with ``select``: if somewhere in those input lists of
-sockets is one which has died a nasty death, the ``select`` will fail. You then
-need to loop through every single damn socket in all those lists and do a
-``select([sock],[],[],0)`` until you find the bad one. That timeout of 0 means
-it won't take long, but it's ugly.
-
Actually, ``select`` can be handy even with blocking sockets. It's one way of
determining whether you will block - the socket returns as readable when there's
something in the buffers. However, this still doesn't help with the problem of
@@ -386,26 +380,6 @@ determining whether the other end is done, or just busy with something else.
files. Don't try this on Windows. On Windows, ``select`` works with sockets
only. Also note that in C, many of the more advanced socket options are done
differently on Windows. In fact, on Windows I usually use threads (which work
-very, very well) with my sockets. Face it, if you want any kind of performance,
-your code will look very different on Windows than on Unix.
-
-
-Performance
------------
+very, very well) with my sockets.
-There's no question that the fastest sockets code uses non-blocking sockets and
-select to multiplex them. You can put together something that will saturate a
-LAN connection without putting any strain on the CPU.
-
-The trouble is that an app written this way can't do much of anything else -
-it needs to be ready to shuffle bytes around at all times. Assuming that your
-app is actually supposed to do something more than that, threading is the
-optimal solution, (and using non-blocking sockets will be faster than using
-blocking sockets).
-
-Finally, remember that even though blocking sockets are somewhat slower than
-non-blocking, in many cases they are the "right" solution. After all, if your
-app is driven by the data it receives over a socket, there's not much sense in
-complicating the logic just so your app can wait on ``select`` instead of
-``recv``.
diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst
index 567c1b1..3ac8312 100644
--- a/Doc/howto/urllib2.rst
+++ b/Doc/howto/urllib2.rst
@@ -56,6 +56,13 @@ The simplest way to use urllib.request is as follows::
response = urllib.request.urlopen('http://python.org/')
html = response.read()
+If you wish to retrieve a resource via URL and store it in a temporary location,
+you can do so via the :func:`urlretrieve` function::
+
+ import urllib.request
+ local_filename, headers = urllib.request.urlretrieve('http://python.org/')
+ html = open(local_filename)
+
Many uses of urllib will be that simple (note that instead of an 'http:' URL we
could have used an URL starting with 'ftp:', 'file:', etc.). However, it's the
purpose of this tutorial to explain the more complicated cases, concentrating on
diff --git a/Doc/includes/noddy2.c b/Doc/includes/noddy2.c
index 9b8eafb..9641558 100644
--- a/Doc/includes/noddy2.c
+++ b/Doc/includes/noddy2.c
@@ -24,18 +24,16 @@ Noddy_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
self = (Noddy *)type->tp_alloc(type, 0);
if (self != NULL) {
self->first = PyUnicode_FromString("");
- if (self->first == NULL)
- {
+ if (self->first == NULL) {
Py_DECREF(self);
return NULL;
- }
-
+ }
+
self->last = PyUnicode_FromString("");
- if (self->last == NULL)
- {
+ if (self->last == NULL) {
Py_DECREF(self);
return NULL;
- }
+ }
self->number = 0;
}
@@ -50,10 +48,10 @@ Noddy_init(Noddy *self, PyObject *args, PyObject *kwds)
static char *kwlist[] = {"first", "last", "number", NULL};
- if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist,
- &first, &last,
+ if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist,
+ &first, &last,
&self->number))
- return -1;
+ return -1;
if (first) {
tmp = self->first;
@@ -86,15 +84,6 @@ static PyMemberDef Noddy_members[] = {
static PyObject *
Noddy_name(Noddy* self)
{
- static PyObject *format = NULL;
- PyObject *args, *result;
-
- if (format == NULL) {
- format = PyUnicode_FromString("%s %s");
- if (format == NULL)
- return NULL;
- }
-
if (self->first == NULL) {
PyErr_SetString(PyExc_AttributeError, "first");
return NULL;
@@ -105,14 +94,7 @@ Noddy_name(Noddy* self)
return NULL;
}
- args = Py_BuildValue("OO", self->first, self->last);
- if (args == NULL)
- return NULL;
-
- result = PyUnicode_Format(format, args);
- Py_DECREF(args);
-
- return result;
+ return PyUnicode_FromFormat("%S %S", self->first, self->last);
}
static PyMethodDef Noddy_methods[] = {
@@ -145,12 +127,12 @@ static PyTypeObject NoddyType = {
Py_TPFLAGS_DEFAULT |
Py_TPFLAGS_BASETYPE, /* tp_flags */
"Noddy objects", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
Noddy_methods, /* tp_methods */
Noddy_members, /* tp_members */
0, /* tp_getset */
@@ -173,7 +155,7 @@ static PyModuleDef noddy2module = {
};
PyMODINIT_FUNC
-PyInit_noddy2(void)
+PyInit_noddy2(void)
{
PyObject* m;
diff --git a/Doc/includes/noddy3.c b/Doc/includes/noddy3.c
index 89f3a77..8a5a753 100644
--- a/Doc/includes/noddy3.c
+++ b/Doc/includes/noddy3.c
@@ -24,18 +24,16 @@ Noddy_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
self = (Noddy *)type->tp_alloc(type, 0);
if (self != NULL) {
self->first = PyUnicode_FromString("");
- if (self->first == NULL)
- {
+ if (self->first == NULL) {
Py_DECREF(self);
return NULL;
- }
-
+ }
+
self->last = PyUnicode_FromString("");
- if (self->last == NULL)
- {
+ if (self->last == NULL) {
Py_DECREF(self);
return NULL;
- }
+ }
self->number = 0;
}
@@ -50,10 +48,10 @@ Noddy_init(Noddy *self, PyObject *args, PyObject *kwds)
static char *kwlist[] = {"first", "last", "number", NULL};
- if (! PyArg_ParseTupleAndKeywords(args, kwds, "|SSi", kwlist,
- &first, &last,
+ if (! PyArg_ParseTupleAndKeywords(args, kwds, "|SSi", kwlist,
+ &first, &last,
&self->number))
- return -1;
+ return -1;
if (first) {
tmp = self->first;
@@ -88,22 +86,22 @@ Noddy_getfirst(Noddy *self, void *closure)
static int
Noddy_setfirst(Noddy *self, PyObject *value, void *closure)
{
- if (value == NULL) {
- PyErr_SetString(PyExc_TypeError, "Cannot delete the first attribute");
- return -1;
- }
-
- if (! PyUnicode_Check(value)) {
- PyErr_SetString(PyExc_TypeError,
- "The first attribute value must be a string");
- return -1;
- }
-
- Py_DECREF(self->first);
- Py_INCREF(value);
- self->first = value;
-
- return 0;
+ if (value == NULL) {
+ PyErr_SetString(PyExc_TypeError, "Cannot delete the first attribute");
+ return -1;
+ }
+
+ if (! PyUnicode_Check(value)) {
+ PyErr_SetString(PyExc_TypeError,
+ "The first attribute value must be a string");
+ return -1;
+ }
+
+ Py_DECREF(self->first);
+ Py_INCREF(value);
+ self->first = value;
+
+ return 0;
}
static PyObject *
@@ -116,30 +114,30 @@ Noddy_getlast(Noddy *self, void *closure)
static int
Noddy_setlast(Noddy *self, PyObject *value, void *closure)
{
- if (value == NULL) {
- PyErr_SetString(PyExc_TypeError, "Cannot delete the last attribute");
- return -1;
- }
-
- if (! PyUnicode_Check(value)) {
- PyErr_SetString(PyExc_TypeError,
- "The last attribute value must be a string");
- return -1;
- }
-
- Py_DECREF(self->last);
- Py_INCREF(value);
- self->last = value;
-
- return 0;
+ if (value == NULL) {
+ PyErr_SetString(PyExc_TypeError, "Cannot delete the last attribute");
+ return -1;
+ }
+
+ if (! PyUnicode_Check(value)) {
+ PyErr_SetString(PyExc_TypeError,
+ "The last attribute value must be a string");
+ return -1;
+ }
+
+ Py_DECREF(self->last);
+ Py_INCREF(value);
+ self->last = value;
+
+ return 0;
}
static PyGetSetDef Noddy_getseters[] = {
- {"first",
+ {"first",
(getter)Noddy_getfirst, (setter)Noddy_setfirst,
"first name",
NULL},
- {"last",
+ {"last",
(getter)Noddy_getlast, (setter)Noddy_setlast,
"last name",
NULL},
@@ -149,23 +147,7 @@ static PyGetSetDef Noddy_getseters[] = {
static PyObject *
Noddy_name(Noddy* self)
{
- static PyObject *format = NULL;
- PyObject *args, *result;
-
- if (format == NULL) {
- format = PyUnicode_FromString("%s %s");
- if (format == NULL)
- return NULL;
- }
-
- args = Py_BuildValue("OO", self->first, self->last);
- if (args == NULL)
- return NULL;
-
- result = PyUnicode_Format(format, args);
- Py_DECREF(args);
-
- return result;
+ return PyUnicode_FromFormat("%S %S", self->first, self->last);
}
static PyMethodDef Noddy_methods[] = {
@@ -198,12 +180,12 @@ static PyTypeObject NoddyType = {
Py_TPFLAGS_DEFAULT |
Py_TPFLAGS_BASETYPE, /* tp_flags */
"Noddy objects", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
Noddy_methods, /* tp_methods */
Noddy_members, /* tp_members */
Noddy_getseters, /* tp_getset */
@@ -226,7 +208,7 @@ static PyModuleDef noddy3module = {
};
PyMODINIT_FUNC
-PyInit_noddy3(void)
+PyInit_noddy3(void)
{
PyObject* m;
diff --git a/Doc/includes/noddy4.c b/Doc/includes/noddy4.c
index 6a96fac..eb9622a 100644
--- a/Doc/includes/noddy4.c
+++ b/Doc/includes/noddy4.c
@@ -27,7 +27,7 @@ Noddy_traverse(Noddy *self, visitproc visit, void *arg)
return 0;
}
-static int
+static int
Noddy_clear(Noddy *self)
{
PyObject *tmp;
@@ -58,18 +58,16 @@ Noddy_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
self = (Noddy *)type->tp_alloc(type, 0);
if (self != NULL) {
self->first = PyUnicode_FromString("");
- if (self->first == NULL)
- {
+ if (self->first == NULL) {
Py_DECREF(self);
return NULL;
- }
-
+ }
+
self->last = PyUnicode_FromString("");
- if (self->last == NULL)
- {
+ if (self->last == NULL) {
Py_DECREF(self);
return NULL;
- }
+ }
self->number = 0;
}
@@ -84,10 +82,10 @@ Noddy_init(Noddy *self, PyObject *args, PyObject *kwds)
static char *kwlist[] = {"first", "last", "number", NULL};
- if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist,
- &first, &last,
+ if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist,
+ &first, &last,
&self->number))
- return -1;
+ return -1;
if (first) {
tmp = self->first;
@@ -120,15 +118,6 @@ static PyMemberDef Noddy_members[] = {
static PyObject *
Noddy_name(Noddy* self)
{
- static PyObject *format = NULL;
- PyObject *args, *result;
-
- if (format == NULL) {
- format = PyUnicode_FromString("%s %s");
- if (format == NULL)
- return NULL;
- }
-
if (self->first == NULL) {
PyErr_SetString(PyExc_AttributeError, "first");
return NULL;
@@ -139,14 +128,7 @@ Noddy_name(Noddy* self)
return NULL;
}
- args = Py_BuildValue("OO", self->first, self->last);
- if (args == NULL)
- return NULL;
-
- result = PyUnicode_Format(format, args);
- Py_DECREF(args);
-
- return result;
+ return PyUnicode_FromFormat("%S %S", self->first, self->last);
}
static PyMethodDef Noddy_methods[] = {
@@ -182,10 +164,10 @@ static PyTypeObject NoddyType = {
"Noddy objects", /* tp_doc */
(traverseproc)Noddy_traverse, /* tp_traverse */
(inquiry)Noddy_clear, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
Noddy_methods, /* tp_methods */
Noddy_members, /* tp_members */
0, /* tp_getset */
@@ -208,7 +190,7 @@ static PyModuleDef noddy4module = {
};
PyMODINIT_FUNC
-PyInit_noddy4(void)
+PyInit_noddy4(void)
{
PyObject* m;
diff --git a/Doc/install/index.rst b/Doc/install/index.rst
index b20f1fb..bb2e9c5 100644
--- a/Doc/install/index.rst
+++ b/Doc/install/index.rst
@@ -1,12 +1,10 @@
-.. highlightlang:: none
+.. _packaging-install-index:
-.. _install-index:
+******************************
+ Installing Python Projects
+******************************
-*****************************
- Installing Python Modules
-*****************************
-
-:Author: Greg Ward
+:Author: The Fellowship of the Packaging
:Release: |version|
:Date: |today|
@@ -16,1071 +14,43 @@
about Python and aren't about to learn the language just in order to
install and maintain it for their users, i.e. system administrators.
Thus, I have to be sure to explain the basics at some point:
- sys.path and PYTHONPATH at least. Should probably give pointers to
+ sys.path and PYTHONPATH at least. Should probably give pointers to
other docs on "import site", PYTHONSTARTUP, PYTHONHOME, etc.
Finally, it might be useful to include all the material from my "Care
- and Feeding of a Python Installation" talk in here somewhere. Yow!
+ and Feeding of a Python Installation" talk in here somewhere. Yow!
.. topic:: Abstract
- This document describes the Python Distribution Utilities ("Distutils") from the
- end-user's point-of-view, describing how to extend the capabilities of a
- standard Python installation by building and installing third-party Python
- modules and extensions.
-
-
-.. _inst-intro:
-
-Introduction
-============
-
-Although Python's extensive standard library covers many programming needs,
-there often comes a time when you need to add some new functionality to your
-Python installation in the form of third-party modules. This might be necessary
-to support your own programming, or to support an application that you want to
-use and that happens to be written in Python.
-
-In the past, there has been little support for adding third-party modules to an
-existing Python installation. With the introduction of the Python Distribution
-Utilities (Distutils for short) in Python 2.0, this changed.
-
-This document is aimed primarily at the people who need to install third-party
-Python modules: end-users and system administrators who just need to get some
-Python application running, and existing Python programmers who want to add some
-new goodies to their toolbox. You don't need to know Python to read this
-document; there will be some brief forays into using Python's interactive mode
-to explore your installation, but that's it. If you're looking for information
-on how to distribute your own Python modules so that others may use them, see
-the :ref:`distutils-index` manual.
-
-
-.. _inst-trivial-install:
-
-Best case: trivial installation
--------------------------------
-
-In the best case, someone will have prepared a special version of the module
-distribution you want to install that is targeted specifically at your platform
-and is installed just like any other software on your platform. For example,
-the module developer might make an executable installer available for Windows
-users, an RPM package for users of RPM-based Linux systems (Red Hat, SuSE,
-Mandrake, and many others), a Debian package for users of Debian-based Linux
-systems, and so forth.
-
-In that case, you would download the installer appropriate to your platform and
-do the obvious thing with it: run it if it's an executable installer, ``rpm
---install`` it if it's an RPM, etc. You don't need to run Python or a setup
-script, you don't need to compile anything---you might not even need to read any
-instructions (although it's always a good idea to do so anyway).
-
-Of course, things will not always be that easy. You might be interested in a
-module distribution that doesn't have an easy-to-use installer for your
-platform. In that case, you'll have to start with the source distribution
-released by the module's author/maintainer. Installing from a source
-distribution is not too hard, as long as the modules are packaged in the
-standard way. The bulk of this document is about building and installing
-modules from standard source distributions.
-
-
-.. _inst-new-standard:
-
-The new standard: Distutils
----------------------------
-
-If you download a module source distribution, you can tell pretty quickly if it
-was packaged and distributed in the standard way, i.e. using the Distutils.
-First, the distribution's name and version number will be featured prominently
-in the name of the downloaded archive, e.g. :file:`foo-1.0.tar.gz` or
-:file:`widget-0.9.7.zip`. Next, the archive will unpack into a similarly-named
-directory: :file:`foo-1.0` or :file:`widget-0.9.7`. Additionally, the
-distribution will contain a setup script :file:`setup.py`, and a file named
-:file:`README.txt` or possibly just :file:`README`, which should explain that
-building and installing the module distribution is a simple matter of running
-one command from a terminal::
-
- python setup.py install
-
-For Windows, this command should be run from a command prompt window
-(:menuselection:`Start --> Accessories`)::
-
- setup.py install
-
-If all these things are true, then you already know how to build and install the
-modules you've just downloaded: Run the command above. Unless you need to
-install things in a non-standard way or customize the build process, you don't
-really need this manual. Or rather, the above command is everything you need to
-get out of this manual.
-
-
-.. _inst-standard-install:
-
-Standard Build and Install
-==========================
-
-As described in section :ref:`inst-new-standard`, building and installing a module
-distribution using the Distutils is usually one simple command to run from a
-terminal::
-
- python setup.py install
-
-
-.. _inst-platform-variations:
-
-Platform variations
--------------------
-
-You should always run the setup command from the distribution root directory,
-i.e. the top-level subdirectory that the module source distribution unpacks
-into. For example, if you've just downloaded a module source distribution
-:file:`foo-1.0.tar.gz` onto a Unix system, the normal thing to do is::
-
- gunzip -c foo-1.0.tar.gz | tar xf - # unpacks into directory foo-1.0
- cd foo-1.0
- python setup.py install
-
-On Windows, you'd probably download :file:`foo-1.0.zip`. If you downloaded the
-archive file to :file:`C:\\Temp`, then it would unpack into
-:file:`C:\\Temp\\foo-1.0`; you can use either a archive manipulator with a
-graphical user interface (such as WinZip) or a command-line tool (such as
-:program:`unzip` or :program:`pkunzip`) to unpack the archive. Then, open a
-command prompt window and run::
-
- cd c:\Temp\foo-1.0
- python setup.py install
-
-
-.. _inst-splitting-up:
-
-Splitting the job up
---------------------
-
-Running ``setup.py install`` builds and installs all modules in one run. If you
-prefer to work incrementally---especially useful if you want to customize the
-build process, or if things are going wrong---you can use the setup script to do
-one thing at a time. This is particularly helpful when the build and install
-will be done by different users---for example, you might want to build a module
-distribution and hand it off to a system administrator for installation (or do
-it yourself, with super-user privileges).
-
-For example, you can build everything in one step, and then install everything
-in a second step, by invoking the setup script twice::
-
- python setup.py build
- python setup.py install
-
-If you do this, you will notice that running the :command:`install` command
-first runs the :command:`build` command, which---in this case---quickly notices
-that it has nothing to do, since everything in the :file:`build` directory is
-up-to-date.
-
-You may not need this ability to break things down often if all you do is
-install modules downloaded off the 'net, but it's very handy for more advanced
-tasks. If you get into distributing your own Python modules and extensions,
-you'll run lots of individual Distutils commands on their own.
-
-
-.. _inst-how-build-works:
-
-How building works
-------------------
-
-As implied above, the :command:`build` command is responsible for putting the
-files to install into a *build directory*. By default, this is :file:`build`
-under the distribution root; if you're excessively concerned with speed, or want
-to keep the source tree pristine, you can change the build directory with the
-:option:`--build-base` option. For example::
-
- python setup.py build --build-base=/tmp/pybuild/foo-1.0
-
-(Or you could do this permanently with a directive in your system or personal
-Distutils configuration file; see section :ref:`inst-config-files`.) Normally, this
-isn't necessary.
-
-The default layout for the build tree is as follows::
-
- --- build/ --- lib/
- or
- --- build/ --- lib.<plat>/
- temp.<plat>/
-
-where ``<plat>`` expands to a brief description of the current OS/hardware
-platform and Python version. The first form, with just a :file:`lib` directory,
-is used for "pure module distributions"---that is, module distributions that
-include only pure Python modules. If a module distribution contains any
-extensions (modules written in C/C++), then the second form, with two ``<plat>``
-directories, is used. In that case, the :file:`temp.{plat}` directory holds
-temporary files generated by the compile/link process that don't actually get
-installed. In either case, the :file:`lib` (or :file:`lib.{plat}`) directory
-contains all Python modules (pure Python and extensions) that will be installed.
-
-In the future, more directories will be added to handle Python scripts,
-documentation, binary executables, and whatever else is needed to handle the job
-of installing Python modules and applications.
-
-
-.. _inst-how-install-works:
-
-How installation works
-----------------------
-
-After the :command:`build` command runs (whether you run it explicitly, or the
-:command:`install` command does it for you), the work of the :command:`install`
-command is relatively simple: all it has to do is copy everything under
-:file:`build/lib` (or :file:`build/lib.{plat}`) to your chosen installation
-directory.
-
-If you don't choose an installation directory---i.e., if you just run ``setup.py
-install``\ ---then the :command:`install` command installs to the standard
-location for third-party Python modules. This location varies by platform and
-by how you built/installed Python itself. On Unix (and Mac OS X, which is also
-Unix-based), it also depends on whether the module distribution being installed
-is pure Python or contains extensions ("non-pure"):
-
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-| Platform | Standard installation location | Default value | Notes |
-+=================+=====================================================+==================================================+=======+
-| Unix (pure) | :file:`{prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) |
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-| Unix (non-pure) | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) |
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-| Windows | :file:`{prefix}\\Lib\\site-packages` | :file:`C:\\Python{XY}\\Lib\\site-packages` | \(2) |
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-
-Notes:
-
-(1)
- Most Linux distributions include Python as a standard part of the system, so
- :file:`{prefix}` and :file:`{exec-prefix}` are usually both :file:`/usr` on
- Linux. If you build Python yourself on Linux (or any Unix-like system), the
- default :file:`{prefix}` and :file:`{exec-prefix}` are :file:`/usr/local`.
-
-(2)
- The default installation directory on Windows was :file:`C:\\Program
- Files\\Python` under Python 1.6a1, 1.5.2, and earlier.
-
-:file:`{prefix}` and :file:`{exec-prefix}` stand for the directories that Python
-is installed to, and where it finds its libraries at run-time. They are always
-the same under Windows, and very often the same under Unix and Mac OS X. You
-can find out what your Python installation uses for :file:`{prefix}` and
-:file:`{exec-prefix}` by running Python in interactive mode and typing a few
-simple commands. Under Unix, just type ``python`` at the shell prompt. Under
-Windows, choose :menuselection:`Start --> Programs --> Python X.Y -->
-Python (command line)`. Once the interpreter is started, you type Python code
-at the prompt. For example, on my Linux system, I type the three Python
-statements shown below, and get the output as shown, to find out my
-:file:`{prefix}` and :file:`{exec-prefix}`::
-
- Python 2.4 (#26, Aug 7 2004, 17:19:02)
- Type "help", "copyright", "credits" or "license" for more information.
- >>> import sys
- >>> sys.prefix
- '/usr'
- >>> sys.exec_prefix
- '/usr'
-
-A few other placeholders are used in this document: :file:`{X.Y}` stands for the
-version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by
-the value of :data:`sys.abiflags` or the empty string for platforms which don't
-define ABI flags; :file:`{distname}` will be replaced by the name of the module
-distribution being installed. Dots and capitalization are important in the
-paths; for example, a value that uses ``python3.2`` on UNIX will typically use
-``Python32`` on Windows.
-
-If you don't want to install modules to the standard location, or if you don't
-have permission to write there, then you need to read about alternate
-installations in section :ref:`inst-alt-install`. If you want to customize your
-installation directories more heavily, see section :ref:`inst-custom-install` on
-custom installations.
-
-
-.. _inst-alt-install:
-
-Alternate Installation
-======================
-
-Often, it is necessary or desirable to install modules to a location other than
-the standard location for third-party Python modules. For example, on a Unix
-system you might not have permission to write to the standard third-party module
-directory. Or you might wish to try out a module before making it a standard
-part of your local Python installation. This is especially true when upgrading
-a distribution already present: you want to make sure your existing base of
-scripts still works with the new version before actually upgrading.
-
-The Distutils :command:`install` command is designed to make installing module
-distributions to an alternate location simple and painless. The basic idea is
-that you supply a base directory for the installation, and the
-:command:`install` command picks a set of directories (called an *installation
-scheme*) under this base directory in which to install files. The details
-differ across platforms, so read whichever of the following sections applies to
-you.
-
-Note that the various alternate installation schemes are mutually exclusive: you
-can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or
-``--install-base`` and ``--install-platbase``, but you can't mix from these
-groups.
-
-
-.. _inst-alt-install-user:
-
-Alternate installation: the user scheme
----------------------------------------
-
-This scheme is designed to be the most convenient solution for users that don't
-have write permission to the global site-packages directory or don't want to
-install into it. It is enabled with a simple option::
-
- python setup.py install --user
-
-Files will be installed into subdirectories of :data:`site.USER_BASE` (written
-as :file:`{userbase}` hereafter). This scheme installs pure Python modules and
-extension modules in the same location (also known as :data:`site.USER_SITE`).
-Here are the values for UNIX, including Mac OS X:
-
-=============== ===========================================================
-Type of file Installation directory
-=============== ===========================================================
-modules :file:`{userbase}/lib/python{X.Y}/site-packages`
-scripts :file:`{userbase}/bin`
-data :file:`{userbase}`
-C headers :file:`{userbase}/include/python{X.Y}{abiflags}/{distname}`
-=============== ===========================================================
-
-And here are the values used on Windows:
-
-=============== ===========================================================
-Type of file Installation directory
-=============== ===========================================================
-modules :file:`{userbase}\\Python{XY}\\site-packages`
-scripts :file:`{userbase}\\Scripts`
-data :file:`{userbase}`
-C headers :file:`{userbase}\\Python{XY}\\Include\\{distname}`
-=============== ===========================================================
-
-The advantage of using this scheme compared to the other ones described below is
-that the user site-packages directory is under normal conditions always included
-in :data:`sys.path` (see :mod:`site` for more information), which means that
-there is no additional step to perform after running the :file:`setup.py` script
-to finalize the installation.
-
-The :command:`build_ext` command also has a ``--user`` option to add
-:file:`{userbase}/include` to the compiler search path for header files and
-:file:`{userbase}/lib` to the compiler search path for libraries as well as to
-the runtime search path for shared C libraries (rpath).
-
-
-.. _inst-alt-install-home:
-
-Alternate installation: the home scheme
----------------------------------------
-
-The idea behind the "home scheme" is that you build and maintain a personal
-stash of Python modules. This scheme's name is derived from the idea of a
-"home" directory on Unix, since it's not unusual for a Unix user to make their
-home directory have a layout similar to :file:`/usr/` or :file:`/usr/local/`.
-This scheme can be used by anyone, regardless of the operating system they
-are installing for.
-
-Installing a new module distribution is as simple as ::
-
- python setup.py install --home=<dir>
-
-where you can supply any directory you like for the :option:`--home` option. On
-Unix, lazy typists can just type a tilde (``~``); the :command:`install` command
-will expand this to your home directory::
-
- python setup.py install --home=~
-
-To make Python find the distributions installed with this scheme, you may have
-to :ref:`modify Python's search path <inst-search-path>` or edit
-:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit
-:data:`sys.path`.
-
-The :option:`--home` option defines the installation base directory. Files are
-installed to the following directories under the installation base as follows:
-
-=============== ===========================================================
-Type of file Installation directory
-=============== ===========================================================
-modules :file:`{home}/lib/python`
-scripts :file:`{home}/bin`
-data :file:`{home}`
-C headers :file:`{home}/include/python/{distname}`
-=============== ===========================================================
-
-(Mentally replace slashes with backslashes if you're on Windows.)
-
-
-.. _inst-alt-install-prefix-unix:
-
-Alternate installation: Unix (the prefix scheme)
-------------------------------------------------
-
-The "prefix scheme" is useful when you wish to use one Python installation to
-perform the build/install (i.e., to run the setup script), but install modules
-into the third-party module directory of a different Python installation (or
-something that looks like a different Python installation). If this sounds a
-trifle unusual, it is---that's why the user and home schemes come before. However,
-there are at least two known cases where the prefix scheme will be useful.
-
-First, consider that many Linux distributions put Python in :file:`/usr`, rather
-than the more traditional :file:`/usr/local`. This is entirely appropriate,
-since in those cases Python is part of "the system" rather than a local add-on.
-However, if you are installing Python modules from source, you probably want
-them to go in :file:`/usr/local/lib/python2.{X}` rather than
-:file:`/usr/lib/python2.{X}`. This can be done with ::
-
- /usr/bin/python setup.py install --prefix=/usr/local
-
-Another possibility is a network filesystem where the name used to write to a
-remote directory is different from the name used to read it: for example, the
-Python interpreter accessed as :file:`/usr/local/bin/python` might search for
-modules in :file:`/usr/local/lib/python2.{X}`, but those modules would have to
-be installed to, say, :file:`/mnt/{@server}/export/lib/python2.{X}`. This could
-be done with ::
-
- /usr/local/bin/python setup.py install --prefix=/mnt/@server/export
-
-In either case, the :option:`--prefix` option defines the installation base, and
-the :option:`--exec-prefix` option defines the platform-specific installation
-base, which is used for platform-specific files. (Currently, this just means
-non-pure module distributions, but could be expanded to C libraries, binary
-executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to
-:option:`--prefix`. Files are installed as follows:
-
-================= ==========================================================
-Type of file Installation directory
-================= ==========================================================
-Python modules :file:`{prefix}/lib/python{X.Y}/site-packages`
-extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages`
-scripts :file:`{prefix}/bin`
-data :file:`{prefix}`
-C headers :file:`{prefix}/include/python{X.Y}{abiflags}/{distname}`
-================= ==========================================================
-
-There is no requirement that :option:`--prefix` or :option:`--exec-prefix`
-actually point to an alternate Python installation; if the directories listed
-above do not already exist, they are created at installation time.
-
-Incidentally, the real reason the prefix scheme is important is simply that a
-standard Unix installation uses the prefix scheme, but with :option:`--prefix`
-and :option:`--exec-prefix` supplied by Python itself as ``sys.prefix`` and
-``sys.exec_prefix``. Thus, you might think you'll never use the prefix scheme,
-but every time you run ``python setup.py install`` without any other options,
-you're using it.
-
-Note that installing extensions to an alternate Python installation has no
-effect on how those extensions are built: in particular, the Python header files
-(:file:`Python.h` and friends) installed with the Python interpreter used to run
-the setup script will be used in compiling extensions. It is your
-responsibility to ensure that the interpreter used to run extensions installed
-in this way is compatible with the interpreter used to build them. The best way
-to do this is to ensure that the two interpreters are the same version of Python
-(possibly different builds, or possibly copies of the same build). (Of course,
-if your :option:`--prefix` and :option:`--exec-prefix` don't even point to an
-alternate Python installation, this is immaterial.)
-
-
-.. _inst-alt-install-prefix-windows:
-
-Alternate installation: Windows (the prefix scheme)
----------------------------------------------------
-
-Windows has no concept of a user's home directory, and since the standard Python
-installation under Windows is simpler than under Unix, the :option:`--prefix`
-option has traditionally been used to install additional packages in separate
-locations on Windows. ::
-
- python setup.py install --prefix="\Temp\Python"
-
-to install modules to the :file:`\\Temp\\Python` directory on the current drive.
-
-The installation base is defined by the :option:`--prefix` option; the
-:option:`--exec-prefix` option is not supported under Windows, which means that
-pure Python modules and extension modules are installed into the same location.
-Files are installed as follows:
-
-=============== ==========================================================
-Type of file Installation directory
-=============== ==========================================================
-modules :file:`{prefix}\\Lib\\site-packages`
-scripts :file:`{prefix}\\Scripts`
-data :file:`{prefix}`
-C headers :file:`{prefix}\\Include\\{distname}`
-=============== ==========================================================
-
-
-.. _inst-custom-install:
-
-Custom Installation
-===================
-
-Sometimes, the alternate installation schemes described in section
-:ref:`inst-alt-install` just don't do what you want. You might want to tweak just
-one or two directories while keeping everything under the same base directory,
-or you might want to completely redefine the installation scheme. In either
-case, you're creating a *custom installation scheme*.
-
-To create a custom installation scheme, you start with one of the alternate
-schemes and override some of the installation directories used for the various
-types of files, using these options:
-
-====================== =======================
-Type of file Override option
-====================== =======================
-Python modules ``--install-purelib``
-extension modules ``--install-platlib``
-all modules ``--install-lib``
-scripts ``--install-scripts``
-data ``--install-data``
-C headers ``--install-headers``
-====================== =======================
-
-These override options can be relative, absolute,
-or explicitly defined in terms of one of the installation base directories.
-(There are two installation base directories, and they are normally the same---
-they only differ when you use the Unix "prefix scheme" and supply different
-``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will
-override values computed or given for ``--install-purelib`` and
-``--install-platlib``, and is recommended for schemes that don't make a
-difference between Python and extension modules.)
-
-For example, say you're installing a module distribution to your home directory
-under Unix---but you want scripts to go in :file:`~/scripts` rather than
-:file:`~/bin`. As you might expect, you can override this directory with the
-:option:`--install-scripts` option; in this case, it makes most sense to supply
-a relative path, which will be interpreted relative to the installation base
-directory (your home directory, in this case)::
-
- python setup.py install --home=~ --install-scripts=scripts
-
-Another Unix example: suppose your Python installation was built and installed
-with a prefix of :file:`/usr/local/python`, so under a standard installation
-scripts will wind up in :file:`/usr/local/python/bin`. If you want them in
-:file:`/usr/local/bin` instead, you would supply this absolute directory for the
-:option:`--install-scripts` option::
-
- python setup.py install --install-scripts=/usr/local/bin
-
-(This performs an installation using the "prefix scheme," where the prefix is
-whatever your Python interpreter was installed with--- :file:`/usr/local/python`
-in this case.)
-
-If you maintain Python on Windows, you might want third-party modules to live in
-a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}`
-itself. This is almost as easy as customizing the script installation directory
----you just have to remember that there are two types of modules to worry about,
-Python and extension modules, which can conveniently be both controlled by one
-option::
-
- python setup.py install --install-lib=Site
-
-The specified installation directory is relative to :file:`{prefix}`. Of
-course, you also have to ensure that this directory is in Python's module
-search path, such as by putting a :file:`.pth` file in a site directory (see
-:mod:`site`). See section :ref:`inst-search-path` to find out how to modify
-Python's search path.
-
-If you want to define an entire installation scheme, you just have to supply all
-of the installation directory options. The recommended way to do this is to
-supply relative paths; for example, if you want to maintain all Python
-module-related files under :file:`python` in your home directory, and you want a
-separate directory for each platform that you use your home directory from, you
-might define the following installation scheme::
+ This document describes Packaging from the end-user's point of view: it
+ explains how to extend the functionality of a standard Python installation by
+ building and installing third-party Python modules and applications.
- python setup.py install --home=~ \
- --install-purelib=python/lib \
- --install-platlib=python/lib.$PLAT \
- --install-scripts=python/scripts
- --install-data=python/data
-or, equivalently, ::
+This guide is split into a simple overview followed by a longer presentation of
+the :program:`pysetup` script, the Python package management tool used to
+build, distribute, search for, install, remove and list Python distributions.
- python setup.py install --home=~/python \
- --install-purelib=lib \
- --install-platlib='lib.$PLAT' \
- --install-scripts=scripts
- --install-data=data
+.. TODO integrate install and pysetup instead of duplicating
-``$PLAT`` is not (necessarily) an environment variable---it will be expanded by
-the Distutils as it parses your command line options, just as it does when
-parsing your configuration file(s).
+.. toctree::
+ :maxdepth: 2
+ :numbered:
-Obviously, specifying the entire installation scheme every time you install a
-new module distribution would be very tedious. Thus, you can put these options
-into your Distutils config file (see section :ref:`inst-config-files`)::
-
- [install]
- install-base=$HOME
- install-purelib=python/lib
- install-platlib=python/lib.$PLAT
- install-scripts=python/scripts
- install-data=python/data
-
-or, equivalently, ::
-
- [install]
- install-base=$HOME/python
- install-purelib=lib
- install-platlib=lib.$PLAT
- install-scripts=scripts
- install-data=data
-
-Note that these two are *not* equivalent if you supply a different installation
-base directory when you run the setup script. For example, ::
-
- python setup.py install --install-base=/tmp
-
-would install pure modules to :file:`/tmp/python/lib` in the first case, and
-to :file:`/tmp/lib` in the second case. (For the second case, you probably
-want to supply an installation base of :file:`/tmp/python`.)
-
-You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample
-configuration file input. These are Distutils configuration variables, which
-bear a strong resemblance to environment variables. In fact, you can use
-environment variables in config files on platforms that have such a notion but
-the Distutils additionally define a few extra variables that may not be in your
-environment, such as ``$PLAT``. (And of course, on systems that don't have
-environment variables, such as Mac OS 9, the configuration variables supplied by
-the Distutils are the only ones you can use.) See section :ref:`inst-config-files`
-for details.
-
-.. XXX need some Windows examples---when would custom installation schemes be
- needed on those platforms?
-
-
-.. XXX Move this to Doc/using
-
-.. _inst-search-path:
-
-Modifying Python's Search Path
-------------------------------
-
-When the Python interpreter executes an :keyword:`import` statement, it searches
-for both Python code and extension modules along a search path. A default value
-for the path is configured into the Python binary when the interpreter is built.
-You can determine the path by importing the :mod:`sys` module and printing the
-value of ``sys.path``. ::
-
- $ python
- Python 2.2 (#11, Oct 3 2002, 13:31:27)
- [GCC 2.96 20000731 (Red Hat Linux 7.3 2.96-112)] on linux2
- Type "help", "copyright", "credits" or "license" for more information.
- >>> import sys
- >>> sys.path
- ['', '/usr/local/lib/python2.3', '/usr/local/lib/python2.3/plat-linux2',
- '/usr/local/lib/python2.3/lib-tk', '/usr/local/lib/python2.3/lib-dynload',
- '/usr/local/lib/python2.3/site-packages']
- >>>
-
-The null string in ``sys.path`` represents the current working directory.
-
-The expected convention for locally installed packages is to put them in the
-:file:`{...}/site-packages/` directory, but you may want to install Python
-modules into some arbitrary directory. For example, your site may have a
-convention of keeping all software related to the web server under :file:`/www`.
-Add-on Python modules might then belong in :file:`/www/python`, and in order to
-import them, this directory must be added to ``sys.path``. There are several
-different ways to add the directory.
-
-The most convenient way is to add a path configuration file to a directory
-that's already on Python's path, usually to the :file:`.../site-packages/`
-directory. Path configuration files have an extension of :file:`.pth`, and each
-line must contain a single path that will be appended to ``sys.path``. (Because
-the new paths are appended to ``sys.path``, modules in the added directories
-will not override standard modules. This means you can't use this mechanism for
-installing fixed versions of standard modules.)
-
-Paths can be absolute or relative, in which case they're relative to the
-directory containing the :file:`.pth` file. See the documentation of
-the :mod:`site` module for more information.
-
-A slightly less convenient way is to edit the :file:`site.py` file in Python's
-standard library, and modify ``sys.path``. :file:`site.py` is automatically
-imported when the Python interpreter is executed, unless the :option:`-S` switch
-is supplied to suppress this behaviour. So you could simply edit
-:file:`site.py` and add two lines to it::
-
- import sys
- sys.path.append('/www/python/')
-
-However, if you reinstall the same major version of Python (perhaps when
-upgrading from 2.2 to 2.2.2, for example) :file:`site.py` will be overwritten by
-the stock version. You'd have to remember that it was modified and save a copy
-before doing the installation.
-
-There are two environment variables that can modify ``sys.path``.
-:envvar:`PYTHONHOME` sets an alternate value for the prefix of the Python
-installation. For example, if :envvar:`PYTHONHOME` is set to ``/www/python``,
-the search path will be set to ``['', '/www/python/lib/pythonX.Y/',
-'/www/python/lib/pythonX.Y/plat-linux2', ...]``.
-
-The :envvar:`PYTHONPATH` variable can be set to a list of paths that will be
-added to the beginning of ``sys.path``. For example, if :envvar:`PYTHONPATH` is
-set to ``/www/python:/opt/py``, the search path will begin with
-``['/www/python', '/opt/py']``. (Note that directories must exist in order to
-be added to ``sys.path``; the :mod:`site` module removes paths that don't
-exist.)
-
-Finally, ``sys.path`` is just a regular Python list, so any Python application
-can modify it by adding or removing entries.
-
-
-.. _inst-config-files:
-
-Distutils Configuration Files
-=============================
-
-As mentioned above, you can use Distutils configuration files to record personal
-or site preferences for any Distutils options. That is, any option to any
-command can be stored in one of two or three (depending on your platform)
-configuration files, which will be consulted before the command-line is parsed.
-This means that configuration files will override default values, and the
-command-line will in turn override configuration files. Furthermore, if
-multiple configuration files apply, values from "earlier" files are overridden
-by "later" files.
-
-
-.. _inst-config-filenames:
-
-Location and names of config files
-----------------------------------
-
-The names and locations of the configuration files vary slightly across
-platforms. On Unix and Mac OS X, the three configuration files (in the order
-they are processed) are:
-
-+--------------+----------------------------------------------------------+-------+
-| Type of file | Location and filename | Notes |
-+==============+==========================================================+=======+
-| system | :file:`{prefix}/lib/python{ver}/distutils/distutils.cfg` | \(1) |
-+--------------+----------------------------------------------------------+-------+
-| personal | :file:`$HOME/.pydistutils.cfg` | \(2) |
-+--------------+----------------------------------------------------------+-------+
-| local | :file:`setup.cfg` | \(3) |
-+--------------+----------------------------------------------------------+-------+
-
-And on Windows, the configuration files are:
-
-+--------------+-------------------------------------------------+-------+
-| Type of file | Location and filename | Notes |
-+==============+=================================================+=======+
-| system | :file:`{prefix}\\Lib\\distutils\\distutils.cfg` | \(4) |
-+--------------+-------------------------------------------------+-------+
-| personal | :file:`%HOME%\\pydistutils.cfg` | \(5) |
-+--------------+-------------------------------------------------+-------+
-| local | :file:`setup.cfg` | \(3) |
-+--------------+-------------------------------------------------+-------+
-
-On all platforms, the "personal" file can be temporarily disabled by
-passing the `--no-user-cfg` option.
-
-Notes:
-
-(1)
- Strictly speaking, the system-wide configuration file lives in the directory
- where the Distutils are installed; under Python 1.6 and later on Unix, this is
- as shown. For Python 1.5.2, the Distutils will normally be installed to
- :file:`{prefix}/lib/python1.5/site-packages/distutils`, so the system
- configuration file should be put there under Python 1.5.2.
-
-(2)
- On Unix, if the :envvar:`HOME` environment variable is not defined, the user's
- home directory will be determined with the :func:`getpwuid` function from the
- standard :mod:`pwd` module. This is done by the :func:`os.path.expanduser`
- function used by Distutils.
-
-(3)
- I.e., in the current directory (usually the location of the setup script).
-
-(4)
- (See also note (1).) Under Python 1.6 and later, Python's default "installation
- prefix" is :file:`C:\\Python`, so the system configuration file is normally
- :file:`C:\\Python\\Lib\\distutils\\distutils.cfg`. Under Python 1.5.2, the
- default prefix was :file:`C:\\Program Files\\Python`, and the Distutils were not
- part of the standard library---so the system configuration file would be
- :file:`C:\\Program Files\\Python\\distutils\\distutils.cfg` in a standard Python
- 1.5.2 installation under Windows.
-
-(5)
- On Windows, if the :envvar:`HOME` environment variable is not defined,
- :envvar:`USERPROFILE` then :envvar:`HOMEDRIVE` and :envvar:`HOMEPATH` will
- be tried. This is done by the :func:`os.path.expanduser` function used
- by Distutils.
-
-
-.. _inst-config-syntax:
-
-Syntax of config files
-----------------------
-
-The Distutils configuration files all have the same syntax. The config files
-are grouped into sections. There is one section for each Distutils command,
-plus a ``global`` section for global options that affect every command. Each
-section consists of one option per line, specified as ``option=value``.
-
-For example, the following is a complete config file that just forces all
-commands to run quietly by default::
-
- [global]
- verbose=0
-
-If this is installed as the system config file, it will affect all processing of
-any Python module distribution by any user on the current system. If it is
-installed as your personal config file (on systems that support them), it will
-affect only module distributions processed by you. And if it is used as the
-:file:`setup.cfg` for a particular module distribution, it affects only that
-distribution.
-
-You could override the default "build base" directory and make the
-:command:`build\*` commands always forcibly rebuild all files with the
-following::
-
- [build]
- build-base=blib
- force=1
-
-which corresponds to the command-line arguments ::
-
- python setup.py build --build-base=blib --force
-
-except that including the :command:`build` command on the command-line means
-that command will be run. Including a particular command in config files has no
-such implication; it only means that if the command is run, the options in the
-config file will apply. (Or if other commands that derive values from it are
-run, they will use the values in the config file.)
-
-You can find out the complete list of options for any command using the
-:option:`--help` option, e.g.::
-
- python setup.py build --help
-
-and you can find out the complete list of global options by using
-:option:`--help` without a command::
-
- python setup.py --help
-
-See also the "Reference" section of the "Distributing Python Modules" manual.
-
-
-.. _inst-building-ext:
-
-Building Extensions: Tips and Tricks
-====================================
-
-Whenever possible, the Distutils try to use the configuration information made
-available by the Python interpreter used to run the :file:`setup.py` script.
-For example, the same compiler and linker flags used to compile Python will also
-be used for compiling extensions. Usually this will work well, but in
-complicated situations this might be inappropriate. This section discusses how
-to override the usual Distutils behaviour.
-
-
-.. _inst-tweak-flags:
-
-Tweaking compiler/linker flags
-------------------------------
-
-Compiling a Python extension written in C or C++ will sometimes require
-specifying custom flags for the compiler and linker in order to use a particular
-library or produce a special kind of object code. This is especially true if the
-extension hasn't been tested on your platform, or if you're trying to
-cross-compile Python.
-
-In the most general case, the extension author might have foreseen that
-compiling the extensions would be complicated, and provided a :file:`Setup` file
-for you to edit. This will likely only be done if the module distribution
-contains many separate extension modules, or if they often require elaborate
-sets of compiler flags in order to work.
-
-A :file:`Setup` file, if present, is parsed in order to get a list of extensions
-to build. Each line in a :file:`Setup` describes a single module. Lines have
-the following structure::
-
- module ... [sourcefile ...] [cpparg ...] [library ...]
-
-
-Let's examine each of the fields in turn.
-
-* *module* is the name of the extension module to be built, and should be a
- valid Python identifier. You can't just change this in order to rename a module
- (edits to the source code would also be needed), so this should be left alone.
-
-* *sourcefile* is anything that's likely to be a source code file, at least
- judging by the filename. Filenames ending in :file:`.c` are assumed to be
- written in C, filenames ending in :file:`.C`, :file:`.cc`, and :file:`.c++` are
- assumed to be C++, and filenames ending in :file:`.m` or :file:`.mm` are assumed
- to be in Objective C.
-
-* *cpparg* is an argument for the C preprocessor, and is anything starting with
- :option:`-I`, :option:`-D`, :option:`-U` or :option:`-C`.
-
-* *library* is anything ending in :file:`.a` or beginning with :option:`-l` or
- :option:`-L`.
-
-If a particular platform requires a special library on your platform, you can
-add it by editing the :file:`Setup` file and running ``python setup.py build``.
-For example, if the module defined by the line ::
-
- foo foomodule.c
-
-must be linked with the math library :file:`libm.a` on your platform, simply add
-:option:`-lm` to the line::
-
- foo foomodule.c -lm
-
-Arbitrary switches intended for the compiler or the linker can be supplied with
-the :option:`-Xcompiler` *arg* and :option:`-Xlinker` *arg* options::
-
- foo foomodule.c -Xcompiler -o32 -Xlinker -shared -lm
-
-The next option after :option:`-Xcompiler` and :option:`-Xlinker` will be
-appended to the proper command line, so in the above example the compiler will
-be passed the :option:`-o32` option, and the linker will be passed
-:option:`-shared`. If a compiler option requires an argument, you'll have to
-supply multiple :option:`-Xcompiler` options; for example, to pass ``-x c++``
-the :file:`Setup` file would have to contain ``-Xcompiler -x -Xcompiler c++``.
-
-Compiler flags can also be supplied through setting the :envvar:`CFLAGS`
-environment variable. If set, the contents of :envvar:`CFLAGS` will be added to
-the compiler flags specified in the :file:`Setup` file.
-
-
-.. _inst-non-ms-compilers:
-
-Using non-Microsoft compilers on Windows
-----------------------------------------
-
-.. sectionauthor:: Rene Liebscher <R.Liebscher@gmx.de>
-
-
-
-Borland/CodeGear C++
-^^^^^^^^^^^^^^^^^^^^
-
-This subsection describes the necessary steps to use Distutils with the Borland
-C++ compiler version 5.5. First you have to know that Borland's object file
-format (OMF) is different from the format used by the Python version you can
-download from the Python or ActiveState Web site. (Python is built with
-Microsoft Visual C++, which uses COFF as the object file format.) For this
-reason you have to convert Python's library :file:`python25.lib` into the
-Borland format. You can do this as follows:
-
-.. Should we mention that users have to create cfg-files for the compiler?
-.. see also http://community.borland.com/article/0,1410,21205,00.html
-
-::
-
- coff2omf python25.lib python25_bcpp.lib
-
-The :file:`coff2omf` program comes with the Borland compiler. The file
-:file:`python25.lib` is in the :file:`Libs` directory of your Python
-installation. If your extension uses other libraries (zlib, ...) you have to
-convert them too.
-
-The converted files have to reside in the same directories as the normal
-libraries.
-
-How does Distutils manage to use these libraries with their changed names? If
-the extension needs a library (eg. :file:`foo`) Distutils checks first if it
-finds a library with suffix :file:`_bcpp` (eg. :file:`foo_bcpp.lib`) and then
-uses this library. In the case it doesn't find such a special library it uses
-the default name (:file:`foo.lib`.) [#]_
-
-To let Distutils compile your extension with Borland C++ you now have to type::
-
- python setup.py build --compiler=bcpp
-
-If you want to use the Borland C++ compiler as the default, you could specify
-this in your personal or system-wide configuration file for Distutils (see
-section :ref:`inst-config-files`.)
-
-
-.. seealso::
-
- `C++Builder Compiler <http://www.codegear.com/downloads/free/cppbuilder>`_
- Information about the free C++ compiler from Borland, including links to the
- download pages.
-
- `Creating Python Extensions Using Borland's Free Compiler <http://www.cyberus.ca/~g_will/pyExtenDL.shtml>`_
- Document describing how to use Borland's free command-line C++ compiler to build
- Python.
-
-
-GNU C / Cygwin / MinGW
-^^^^^^^^^^^^^^^^^^^^^^
-
-This section describes the necessary steps to use Distutils with the GNU C/C++
-compilers in their Cygwin and MinGW distributions. [#]_ For a Python interpreter
-that was built with Cygwin, everything should work without any of these
-following steps.
-
-Not all extensions can be built with MinGW or Cygwin, but many can. Extensions
-most likely to not work are those that use C++ or depend on Microsoft Visual C
-extensions.
-
-To let Distutils compile your extension with Cygwin you have to type::
-
- python setup.py build --compiler=cygwin
-
-and for Cygwin in no-cygwin mode [#]_ or for MinGW type::
-
- python setup.py build --compiler=mingw32
-
-If you want to use any of these options/compilers as default, you should
-consider writing it in your personal or system-wide configuration file for
-Distutils (see section :ref:`inst-config-files`.)
-
-Older Versions of Python and MinGW
-""""""""""""""""""""""""""""""""""
-The following instructions only apply if you're using a version of Python
-inferior to 2.4.1 with a MinGW inferior to 3.0.0 (with
-binutils-2.13.90-20030111-1).
-
-These compilers require some special libraries. This task is more complex than
-for Borland's C++, because there is no program to convert the library. First
-you have to create a list of symbols which the Python DLL exports. (You can find
-a good program for this task at
-http://www.emmestech.com/software/pexports-0.43/download_pexports.html).
-
-.. I don't understand what the next line means. --amk
-.. (inclusive the references on data structures.)
-
-::
-
- pexports python25.dll >python25.def
-
-The location of an installed :file:`python25.dll` will depend on the
-installation options and the version and language of Windows. In a "just for
-me" installation, it will appear in the root of the installation directory. In
-a shared installation, it will be located in the system directory.
-
-Then you can create from these information an import library for gcc. ::
-
- /cygwin/bin/dlltool --dllname python25.dll --def python25.def --output-lib libpython25.a
-
-The resulting library has to be placed in the same directory as
-:file:`python25.lib`. (Should be the :file:`libs` directory under your Python
-installation directory.)
-
-If your extension uses other libraries (zlib,...) you might have to convert
-them too. The converted files have to reside in the same directories as the
-normal libraries do.
+ install
+ pysetup
+ pysetup-config
+ pysetup-servers
.. seealso::
- `Building Python modules on MS Windows platform with MinGW <http://www.zope.org/Members/als/tips/win32_mingw_modules>`_
- Information about building the required libraries for the MinGW environment.
-
-
-.. rubric:: Footnotes
-
-.. [#] This also means you could replace all existing COFF-libraries with OMF-libraries
- of the same name.
-
-.. [#] Check http://sources.redhat.com/cygwin/ and http://www.mingw.org/ for more
- information
+ :ref:`packaging-index`
+ The manual for developers of Python projects who want to package and
+ distribute them. This describes how to use :mod:`packaging` to make
+ projects easily found and added to an existing Python installation.
-.. [#] Then you have no POSIX emulation available, but you also don't need
- :file:`cygwin1.dll`.
+ :mod:`packaging`
+ A library reference for developers of packaging tools wanting to use
+ standalone building blocks like :mod:`~packaging.version` or
+ :mod:`~packaging.metadata`, or extend Packaging itself.
diff --git a/Doc/install/install.rst b/Doc/install/install.rst
new file mode 100644
index 0000000..b3e655b
--- /dev/null
+++ b/Doc/install/install.rst
@@ -0,0 +1,1119 @@
+.. highlightlang:: none
+
+====================================
+Installing Python projects: overview
+====================================
+
+.. _packaging-install-intro:
+
+Introduction
+============
+
+Although Python's extensive standard library covers many programming needs,
+there often comes a time when you need to add new functionality to your Python
+installation in the form of third-party modules. This might be necessary to
+support your own programming, or to support an application that you want to use
+and that happens to be written in Python.
+
+In the past, there was little support for adding third-party modules to an
+existing Python installation. With the introduction of the Python Distribution
+Utilities (Distutils for short) in Python 2.0, this changed. However, not all
+problems were solved; end-users had to rely on ``easy_install`` or
+``pip`` to download third-party modules from PyPI, uninstall distributions or do
+other maintenance operations. Packaging is a more complete replacement for
+Distutils, in the standard library, with a backport named Distutils2 available
+for older Python versions.
+
+This document is aimed primarily at people who need to install third-party
+Python modules: end-users and system administrators who just need to get some
+Python application running, and existing Python programmers who want to add
+new goodies to their toolbox. You don't need to know Python to read this
+document; there will be some brief forays into using Python's interactive mode
+to explore your installation, but that's it. If you're looking for information
+on how to distribute your own Python modules so that others may use them, see
+the :ref:`packaging-index` manual.
+
+
+.. _packaging-trivial-install:
+
+Best case: trivial installation
+-------------------------------
+
+In the best case, someone will have prepared a special version of the module
+distribution you want to install that is targeted specifically at your platform
+and can be installed just like any other software on your platform. For example,
+the module's developer might make an executable installer available for Windows
+users, an RPM package for users of RPM-based Linux systems (Red Hat, SuSE,
+Mandrake, and many others), a Debian package for users of Debian and derivative
+systems, and so forth.
+
+In that case, you would use the standard system tools to download and install
+the specific installer for your platform and its dependencies.
+
+Of course, things will not always be that easy. You might be interested in a
+module whose distribution doesn't have an easy-to-use installer for your
+platform. In that case, you'll have to start with the source distribution
+released by the module's author/maintainer. Installing from a source
+distribution is not too hard, as long as the modules are packaged in the
+standard way. The bulk of this document addresses the building and installing
+of modules from standard source distributions.
+
+
+.. _packaging-distutils:
+
+The Python standard: Distutils
+------------------------------
+
+If you download a source distribution of a module, it will be obvious whether
+it was packaged and distributed using Distutils. First, the distribution's name
+and version number will be featured prominently in the name of the downloaded
+archive, e.g. :file:`foo-1.0.tar.gz` or :file:`widget-0.9.7.zip`. Next, the
+archive will unpack into a similarly-named directory: :file:`foo-1.0` or
+:file:`widget-0.9.7`. Additionally, the distribution may contain a
+:file:`setup.cfg` file and a file named :file:`README.txt` ---or possibly just
+:file:`README`--- explaining that building and installing the module
+distribution is a simple matter of issuing the following command at your shell's
+prompt::
+
+ python setup.py install
+
+Third-party projects have extended Distutils to work around its limitations or
+add functionality. After some years of near-inactivity in Distutils, a new
+maintainer has started to standardize good ideas in PEPs and implement them in a
+new, improved version of Distutils, called Distutils2 or Packaging.
+
+
+.. _packaging-new-standard:
+
+The new standard: Packaging
+---------------------------
+
+The rules described in the first paragraph above apply to Packaging-based
+projects too: a source distribution will have a name like
+:file:`widget-0.9.7.zip`. One of the main differences with Distutils is that
+distributions no longer have a :file:`setup.py` script; it used to cause a
+number of issues. Now there is a unique script installed with Python itself::
+
+ pysetup install widget-0.9.7.zip
+
+Running this command is enough to build and install projects (Python modules or
+packages, scripts or whole applications), without even having to unpack the
+archive. It is also compatible with Distutils-based distributions.
+
+Unless you have to perform non-standard installations or customize the build
+process, you can stop reading this manual ---the above command is everything you
+need to get out of it.
+
+With :program:`pysetup`, you won't even have to manually download a distribution
+before installing it; see :ref:`packaging-pysetup`.
+
+
+.. _packaging-standard-install:
+
+Standard build and install
+==========================
+
+As described in section :ref:`packaging-new-standard`, building and installing
+a module distribution using Packaging usually comes down to one simple
+command::
+
+ pysetup run install_dist
+
+This is a command that should be run in a terminal. On Windows, it is called a
+command prompt and found in :menuselection:`Start --> Accessories`; Powershell
+is a popular alternative.
+
+
+.. _packaging-platform-variations:
+
+Platform variations
+-------------------
+
+The setup command is meant to be run from the root directory of the source
+distribution, i.e. the top-level subdirectory that the module source
+distribution unpacks into. For example, if you've just downloaded a module
+source distribution :file:`foo-1.0.tar.gz` onto a Unix system, the normal
+steps to follow are these::
+
+ gunzip -c foo-1.0.tar.gz | tar xf - # unpacks into directory foo-1.0
+ cd foo-1.0
+ pysetup run install_dist
+
+On Windows, you'd probably download :file:`foo-1.0.zip`. If you downloaded the
+archive file to :file:`C:\\Temp`, then it would unpack into
+:file:`C:\\Temp\\foo-1.0`. To actually unpack the archive, you can use either
+an archive manipulator with a graphical user interface (such as WinZip or 7-Zip)
+or a command-line tool (such as :program:`unzip`, :program:`pkunzip` or, again,
+:program:`7z`). Then, open a command prompt window and run::
+
+ cd c:\Temp\foo-1.0
+ pysetup run install_dist
+
+
+.. _packaging-splitting-up:
+
+Splitting the job up
+--------------------
+
+Running ``pysetup run install_dist`` builds and installs all modules in one go. If you
+prefer to work incrementally ---especially useful if you want to customize the
+build process, or if things are going wrong--- you can use the setup script to
+do one thing at a time. This is a valuable tool when different users will perform
+separately the build and install steps. For example, you might want to build a
+module distribution and hand it off to a system administrator for installation
+(or do it yourself, but with super-user or admin privileges).
+
+For example, to build everything in one step and then install everything
+in a second step, you aptly invoke two distinct Packaging commands::
+
+ pysetup run build
+ pysetup run install_dist
+
+If you do this, you will notice that invoking the :command:`install_dist` command
+first runs the :command:`build` command, which ---in this case--- quickly
+notices it can spare itself the work, since everything in the :file:`build`
+directory is up-to-date.
+
+You may often ignore this ability to divide the process in steps if all you do
+is installing modules downloaded from the Internet, but it's very handy for
+more advanced tasks. If you find yourself in the need for distributing your own
+Python modules and extensions, though, you'll most likely run many individual
+Packaging commands.
+
+
+.. _packaging-how-build-works:
+
+How building works
+------------------
+
+As implied above, the :command:`build` command is responsible for collecting
+and placing the files to be installed into a *build directory*. By default,
+this is :file:`build`, under the distribution root. If you're excessively
+concerned with speed, or want to keep the source tree pristine, you can specify
+a different build directory with the :option:`--build-base` option. For example::
+
+ pysetup run build --build-base /tmp/pybuild/foo-1.0
+
+(Or you could do this permanently with a directive in your system or personal
+Packaging configuration file; see section :ref:`packaging-config-files`.)
+In the usual case, however, all this is unnecessary.
+
+The build tree's default layout looks like so::
+
+ --- build/ --- lib/
+ or
+ --- build/ --- lib.<plat>/
+ temp.<plat>/
+
+where ``<plat>`` expands to a brief description of the current OS/hardware
+platform and Python version. The first form, with just a :file:`lib` directory,
+is used for pure module distributions (module distributions that
+include only pure Python modules). If a module distribution contains any
+extensions (modules written in C/C++), then the second form, with two ``<plat>``
+directories, is used. In that case, the :file:`temp.{plat}` directory holds
+temporary files generated during the compile/link process which are not intended
+to be installed. In either case, the :file:`lib` (or :file:`lib.{plat}`) directory
+contains all Python modules (pure Python and extensions) to be installed.
+
+In the future, more directories will be added to handle Python scripts,
+documentation, binary executables, and whatever else is required to install
+Python modules and applications.
+
+
+.. _packaging-how-install-works:
+
+How installation works
+----------------------
+
+After the :command:`build` command is run (whether explicitly or by the
+:command:`install_dist` command on your behalf), the work of the :command:`install_dist`
+command is relatively simple: all it has to do is copy the contents of
+:file:`build/lib` (or :file:`build/lib.{plat}`) to the installation directory
+of your choice.
+
+If you don't choose an installation directory ---i.e., if you just run
+``pysetup run install_dist``\ --- then the :command:`install_dist` command
+installs to the standard location for third-party Python modules. This location
+varies by platform and depending on how you built/installed Python itself. On
+Unix (and Mac OS X, which is also Unix-based), it also depends on whether the
+module distribution being installed is pure Python or contains extensions
+("non-pure"):
+
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Platform | Standard installation location | Default value | Notes |
++=================+=====================================================+==================================================+=======+
+| Unix (pure) | :file:`{prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Unix (non-pure) | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Windows | :file:`{prefix}\\Lib\\site-packages` | :file:`C:\\Python{XY}\\Lib\\site-packages` | \(2) |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+
+Notes:
+
+(1)
+ Most Linux distributions include Python as a standard part of the system, so
+ :file:`{prefix}` and :file:`{exec-prefix}` are usually both :file:`/usr` on
+ Linux. If you build Python yourself on Linux (or any Unix-like system), the
+ default :file:`{prefix}` and :file:`{exec-prefix}` are :file:`/usr/local`.
+
+(2)
+ The default installation directory on Windows was :file:`C:\\Program
+ Files\\Python` under Python 1.6a1, 1.5.2, and earlier.
+
+:file:`{prefix}` and :file:`{exec-prefix}` stand for the directories that Python
+is installed to, and where it finds its libraries at run-time. They are always
+the same under Windows, and very often the same under Unix and Mac OS X. You
+can find out what your Python installation uses for :file:`{prefix}` and
+:file:`{exec-prefix}` by running Python in interactive mode and typing a few
+simple commands.
+
+.. TODO link to Doc/using instead of duplicating
+
+To start the interactive Python interpreter, you need to follow a slightly
+different recipe for each platform. Under Unix, just type :command:`python` at
+the shell prompt. Under Windows (assuming the Python executable is on your
+:envvar:`PATH`, which is the usual case), you can choose :menuselection:`Start --> Run`,
+type ``python`` and press ``enter``. Alternatively, you can simply execute
+:command:`python` at a command prompt (:menuselection:`Start --> Accessories`)
+or in Powershell.
+
+Once the interpreter is started, you type Python code at the prompt. For
+example, on my Linux system, I type the three Python statements shown below,
+and get the output as shown, to find out my :file:`{prefix}` and :file:`{exec-prefix}`::
+
+ Python 3.3 (r32:88445, Apr 2 2011, 10:43:54)
+ Type "help", "copyright", "credits" or "license" for more information.
+ >>> import sys
+ >>> sys.prefix
+ '/usr'
+ >>> sys.exec_prefix
+ '/usr'
+
+A few other placeholders are used in this document: :file:`{X.Y}` stands for the
+version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by
+the value of :data:`sys.abiflags` or the empty string for platforms which don't
+define ABI flags; :file:`{distname}` will be replaced by the name of the module
+distribution being installed. Dots and capitalization are important in the
+paths; for example, a value that uses ``python3.2`` on UNIX will typically use
+``Python32`` on Windows.
+
+If you don't want to install modules to the standard location, or if you don't
+have permission to write there, then you need to read about alternate
+installations in section :ref:`packaging-alt-install`. If you want to customize your
+installation directories more heavily, see section :ref:`packaging-custom-install`.
+
+
+.. _packaging-alt-install:
+
+Alternate installation
+======================
+
+Often, it is necessary or desirable to install modules to a location other than
+the standard location for third-party Python modules. For example, on a Unix
+system you might not have permission to write to the standard third-party module
+directory. Or you might wish to try out a module before making it a standard
+part of your local Python installation. This is especially true when upgrading
+a distribution already present: you want to make sure your existing base of
+scripts still works with the new version before actually upgrading.
+
+The Packaging :command:`install_dist` command is designed to make installing module
+distributions to an alternate location simple and painless. The basic idea is
+that you supply a base directory for the installation, and the
+:command:`install_dist` command picks a set of directories (called an *installation
+scheme*) under this base directory in which to install files. The details
+differ across platforms, so read whichever of the following sections applies to
+you.
+
+Note that the various alternate installation schemes are mutually exclusive: you
+can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or
+``--install-base`` and ``--install-platbase``, but you can't mix from these
+groups.
+
+
+.. _packaging-alt-install-user:
+
+Alternate installation: the user scheme
+---------------------------------------
+
+This scheme is designed to be the most convenient solution for users that don't
+have write permission to the global site-packages directory or don't want to
+install into it. It is enabled with a simple option::
+
+ pysetup run install_dist --user
+
+Files will be installed into subdirectories of :data:`site.USER_BASE` (written
+as :file:`{userbase}` hereafter). This scheme installs pure Python modules and
+extension modules in the same location (also known as :data:`site.USER_SITE`).
+Here are the values for UNIX, including non-framework builds on Mac OS X:
+
+=============== ===========================================================
+Type of file Installation directory
+=============== ===========================================================
+modules :file:`{userbase}/lib/python{X.Y}/site-packages`
+scripts :file:`{userbase}/bin`
+data :file:`{userbase}`
+C headers :file:`{userbase}/include/python{X.Y}`
+=============== ===========================================================
+
+Framework builds on Mac OS X use these paths:
+
+=============== ===========================================================
+Type of file Installation directory
+=============== ===========================================================
+modules :file:`{userbase}/lib/python/site-packages`
+scripts :file:`{userbase}/bin`
+data :file:`{userbase}`
+C headers :file:`{userbase}/include/python`
+=============== ===========================================================
+
+And here are the values used on Windows:
+
+=============== ===========================================================
+Type of file Installation directory
+=============== ===========================================================
+modules :file:`{userbase}\\Python{XY}\\site-packages`
+scripts :file:`{userbase}\\Scripts`
+data :file:`{userbase}`
+C headers :file:`{userbase}\\Python{XY}\\Include`
+=============== ===========================================================
+
+The advantage of using this scheme compared to the other ones described below is
+that the user site-packages directory is under normal conditions always included
+in :data:`sys.path` (see :mod:`site` for more information), which means that
+there is no additional step to perform after running ``pysetup`` to finalize the
+installation.
+
+The :command:`build_ext` command also has a ``--user`` option to add
+:file:`{userbase}/include` to the compiler search path for header files and
+:file:`{userbase}/lib` to the compiler search path for libraries as well as to
+the runtime search path for shared C libraries (rpath).
+
+
+.. _packaging-alt-install-home:
+
+Alternate installation: the home scheme
+---------------------------------------
+
+The idea behind the "home scheme" is that you build and maintain a personal
+stash of Python modules. This scheme's name is derived from the concept of a
+"home" directory on Unix, since it's not unusual for a Unix user to make their
+home directory have a layout similar to :file:`/usr/` or :file:`/usr/local/`.
+In spite of its name's origin, this scheme can be used by anyone, regardless
+of the operating system.
+
+Installing a new module distribution in this way is as simple as ::
+
+ pysetup run install_dist --home <dir>
+
+where you can supply any directory you like for the :option:`--home` option. On
+Unix, lazy typists can just type a tilde (``~``); the :command:`install_dist` command
+will expand this to your home directory::
+
+ pysetup run install_dist --home ~
+
+To make Python find the distributions installed with this scheme, you may have
+to :ref:`modify Python's search path <inst-search-path>` or edit
+:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit
+:data:`sys.path`.
+
+The :option:`--home` option defines the base directory for the installation.
+Under it, files are installed to the following directories:
+
+=============== ===========================================================
+Type of file Installation directory
+=============== ===========================================================
+modules :file:`{home}/lib/python`
+scripts :file:`{home}/bin`
+data :file:`{home}`
+C headers :file:`{home}/include/python`
+=============== ===========================================================
+
+(Mentally replace slashes with backslashes if you're on Windows.)
+
+
+.. _packaging-alt-install-prefix-unix:
+
+Alternate installation: Unix (the prefix scheme)
+------------------------------------------------
+
+The "prefix scheme" is useful when you wish to use one Python installation to
+run the build command, but install modules into the third-party module directory
+of a different Python installation (or something that looks like a different
+Python installation). If this sounds a trifle unusual, it is ---that's why the
+user and home schemes come before. However, there are at least two known cases
+where the prefix scheme will be useful.
+
+First, consider that many Linux distributions put Python in :file:`/usr`, rather
+than the more traditional :file:`/usr/local`. This is entirely appropriate,
+since in those cases Python is part of "the system" rather than a local add-on.
+However, if you are installing Python modules from source, you probably want
+them to go in :file:`/usr/local/lib/python2.{X}` rather than
+:file:`/usr/lib/python2.{X}`. This can be done with ::
+
+ pysetup run install_dist --prefix /usr/local
+
+Another possibility is a network filesystem where the name used to write to a
+remote directory is different from the name used to read it: for example, the
+Python interpreter accessed as :file:`/usr/local/bin/python` might search for
+modules in :file:`/usr/local/lib/python2.{X}`, but those modules would have to
+be installed to, say, :file:`/mnt/{@server}/export/lib/python2.{X}`. This could
+be done with ::
+
+ pysetup run install_dist --prefix=/mnt/@server/export
+
+In either case, the :option:`--prefix` option defines the installation base, and
+the :option:`--exec-prefix` option defines the platform-specific installation
+base, which is used for platform-specific files. (Currently, this just means
+non-pure module distributions, but could be expanded to C libraries, binary
+executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to
+:option:`--prefix`. Files are installed as follows:
+
+================= ==========================================================
+Type of file Installation directory
+================= ==========================================================
+Python modules :file:`{prefix}/lib/python{X.Y}/site-packages`
+extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages`
+scripts :file:`{prefix}/bin`
+data :file:`{prefix}`
+C headers :file:`{prefix}/include/python{X.Y}{abiflags}`
+================= ==========================================================
+
+.. XXX misses an entry for platinclude
+
+There is no requirement that :option:`--prefix` or :option:`--exec-prefix`
+actually point to an alternate Python installation; if the directories listed
+above do not already exist, they are created at installation time.
+
+Incidentally, the real reason the prefix scheme is important is simply that a
+standard Unix installation uses the prefix scheme, but with :option:`--prefix`
+and :option:`--exec-prefix` supplied by Python itself as ``sys.prefix`` and
+``sys.exec_prefix``. Thus, you might think you'll never use the prefix scheme,
+but every time you run ``pysetup run install_dist`` without any other
+options, you're using it.
+
+Note that installing extensions to an alternate Python installation doesn't have
+anything to do with how those extensions are built: in particular, extensions
+will be compiled using the Python header files (:file:`Python.h` and friends)
+installed with the Python interpreter used to run the build command. It is
+therefore your responsibility to ensure compatibility between the interpreter
+intended to run extensions installed in this way and the interpreter used to
+build these same extensions. To avoid problems, it is best to make sure that
+the two interpreters are the same version of Python (possibly different builds,
+or possibly copies of the same build). (Of course, if your :option:`--prefix`
+and :option:`--exec-prefix` don't even point to an alternate Python installation,
+this is immaterial.)
+
+
+.. _packaging-alt-install-prefix-windows:
+
+Alternate installation: Windows (the prefix scheme)
+---------------------------------------------------
+
+Windows has a different and vaguer notion of home directories than Unix, and
+since its standard Python installation is simpler, the :option:`--prefix` option
+has traditionally been used to install additional packages to arbitrary
+locations. ::
+
+ pysetup run install_dist --prefix "\Temp\Python"
+
+to install modules to the :file:`\\Temp\\Python` directory on the current drive.
+
+The installation base is defined by the :option:`--prefix` option; the
+:option:`--exec-prefix` option is not supported under Windows, which means that
+pure Python modules and extension modules are installed into the same location.
+Files are installed as follows:
+
+=============== ==========================================================
+Type of file Installation directory
+=============== ==========================================================
+modules :file:`{prefix}\\Lib\\site-packages`
+scripts :file:`{prefix}\\Scripts`
+data :file:`{prefix}`
+C headers :file:`{prefix}\\Include`
+=============== ==========================================================
+
+
+.. _packaging-custom-install:
+
+Custom installation
+===================
+
+Sometimes, the alternate installation schemes described in section
+:ref:`packaging-alt-install` just don't do what you want. You might want to tweak
+just one or two directories while keeping everything under the same base
+directory, or you might want to completely redefine the installation scheme.
+In either case, you're creating a *custom installation scheme*.
+
+To create a custom installation scheme, you start with one of the alternate
+schemes and override some of the installation directories used for the various
+types of files, using these options:
+
+====================== =======================
+Type of file Override option
+====================== =======================
+Python modules ``--install-purelib``
+extension modules ``--install-platlib``
+all modules ``--install-lib``
+scripts ``--install-scripts``
+data ``--install-data``
+C headers ``--install-headers``
+====================== =======================
+
+These override options can be relative, absolute,
+or explicitly defined in terms of one of the installation base directories.
+(There are two installation base directories, and they are normally the same
+---they only differ when you use the Unix "prefix scheme" and supply different
+``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will
+override values computed or given for ``--install-purelib`` and
+``--install-platlib``, and is recommended for schemes that don't make a
+difference between Python and extension modules.)
+
+For example, say you're installing a module distribution to your home directory
+under Unix, but you want scripts to go in :file:`~/scripts` rather than
+:file:`~/bin`. As you might expect, you can override this directory with the
+:option:`--install-scripts` option and, in this case, it makes most sense to supply
+a relative path, which will be interpreted relative to the installation base
+directory (in our example, your home directory)::
+
+ pysetup run install_dist --home ~ --install-scripts scripts
+
+Another Unix example: suppose your Python installation was built and installed
+with a prefix of :file:`/usr/local/python`. Thus, in a standard installation,
+scripts will wind up in :file:`/usr/local/python/bin`. If you want them in
+:file:`/usr/local/bin` instead, you would supply this absolute directory for
+the :option:`--install-scripts` option::
+
+ pysetup run install_dist --install-scripts /usr/local/bin
+
+This command performs an installation using the "prefix scheme", where the
+prefix is whatever your Python interpreter was installed with ---in this case,
+:file:`/usr/local/python`.
+
+If you maintain Python on Windows, you might want third-party modules to live in
+a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}`
+itself. This is almost as easy as customizing the script installation directory
+---you just have to remember that there are two types of modules to worry about,
+Python and extension modules, which can conveniently be both controlled by one
+option::
+
+ pysetup run install_dist --install-lib Site
+
+.. XXX Nothing is installed right under prefix in windows, is it??
+
+The specified installation directory is relative to :file:`{prefix}`. Of
+course, you also have to ensure that this directory is in Python's module
+search path, such as by putting a :file:`.pth` file in a site directory (see
+:mod:`site`). See section :ref:`packaging-search-path` to find out how to modify
+Python's search path.
+
+If you want to define an entire installation scheme, you just have to supply all
+of the installation directory options. Using relative paths is recommended here.
+For example, if you want to maintain all Python module-related files under
+:file:`python` in your home directory, and you want a separate directory for
+each platform that you use your home directory from, you might define the
+following installation scheme::
+
+ pysetup run install_dist --home ~ \
+ --install-purelib python/lib \
+ --install-platlib python/'lib.$PLAT' \
+ --install-scripts python/scripts \
+ --install-data python/data
+
+or, equivalently, ::
+
+ pysetup run install_dist --home ~/python \
+ --install-purelib lib \
+ --install-platlib 'lib.$PLAT' \
+ --install-scripts scripts \
+ --install-data data
+
+``$PLAT`` doesn't need to be defined as an environment variable ---it will also
+be expanded by Packaging as it parses your command line options, just as it
+does when parsing your configuration file(s). (More on that later.)
+
+Obviously, specifying the entire installation scheme every time you install a
+new module distribution would be very tedious. To spare you all that work, you
+can store it in a Packaging configuration file instead (see section
+:ref:`packaging-config-files`), like so::
+
+ [install_dist]
+ install-base = $HOME
+ install-purelib = python/lib
+ install-platlib = python/lib.$PLAT
+ install-scripts = python/scripts
+ install-data = python/data
+
+or, equivalently, ::
+
+ [install_dist]
+ install-base = $HOME/python
+ install-purelib = lib
+ install-platlib = lib.$PLAT
+ install-scripts = scripts
+ install-data = data
+
+Note that these two are *not* equivalent if you override their installation
+base directory when running the setup script. For example, ::
+
+ pysetup run install_dist --install-base /tmp
+
+would install pure modules to :file:`/tmp/python/lib` in the first case, and
+to :file:`/tmp/lib` in the second case. (For the second case, you'd probably
+want to supply an installation base of :file:`/tmp/python`.)
+
+You may have noticed the use of ``$HOME`` and ``$PLAT`` in the sample
+configuration file. These are Packaging configuration variables, which
+bear a strong resemblance to environment variables. In fact, you can use
+environment variables in configuration files on platforms that have such a notion, but
+Packaging additionally defines a few extra variables that may not be in your
+environment, such as ``$PLAT``. Of course, on systems that don't have
+environment variables, such as Mac OS 9, the configuration variables supplied by
+the Packaging are the only ones you can use. See section :ref:`packaging-config-files`
+for details.
+
+.. XXX which vars win out eventually in case of clash env or Packaging?
+
+.. XXX need some Windows examples---when would custom installation schemes be
+ needed on those platforms?
+
+
+.. XXX Move this section to Doc/using
+
+.. _packaging-search-path:
+
+Modifying Python's search path
+------------------------------
+
+When the Python interpreter executes an :keyword:`import` statement, it searches
+for both Python code and extension modules along a search path. A default value
+for this path is configured into the Python binary when the interpreter is built.
+You can obtain the search path by importing the :mod:`sys` module and printing
+the value of ``sys.path``. ::
+
+ $ python
+ Python 2.2 (#11, Oct 3 2002, 13:31:27)
+ [GCC 2.96 20000731 (Red Hat Linux 7.3 2.96-112)] on linux2
+ Type "help", "copyright", "credits" or "license" for more information.
+ >>> import sys
+ >>> sys.path
+ ['', '/usr/local/lib/python2.3', '/usr/local/lib/python2.3/plat-linux2',
+ '/usr/local/lib/python2.3/lib-tk', '/usr/local/lib/python2.3/lib-dynload',
+ '/usr/local/lib/python2.3/site-packages']
+ >>>
+
+The null string in ``sys.path`` represents the current working directory.
+
+The expected convention for locally installed packages is to put them in the
+:file:`{...}/site-packages/` directory, but you may want to choose a different
+location for some reason. For example, if your site kept by convention all web
+server-related software under :file:`/www`. Add-on Python modules might then
+belong in :file:`/www/python`, and in order to import them, this directory would
+have to be added to ``sys.path``. There are several ways to solve this problem.
+
+The most convenient way is to add a path configuration file to a directory
+that's already on Python's path, usually to the :file:`.../site-packages/`
+directory. Path configuration files have an extension of :file:`.pth`, and each
+line must contain a single path that will be appended to ``sys.path``. (Because
+the new paths are appended to ``sys.path``, modules in the added directories
+will not override standard modules. This means you can't use this mechanism for
+installing fixed versions of standard modules.)
+
+Paths can be absolute or relative, in which case they're relative to the
+directory containing the :file:`.pth` file. See the documentation of
+the :mod:`site` module for more information.
+
+A slightly less convenient way is to edit the :file:`site.py` file in Python's
+standard library, and modify ``sys.path``. :file:`site.py` is automatically
+imported when the Python interpreter is executed, unless the :option:`-S` switch
+is supplied to suppress this behaviour. So you could simply edit
+:file:`site.py` and add two lines to it::
+
+ import sys
+ sys.path.append('/www/python/')
+
+However, if you reinstall the same major version of Python (perhaps when
+upgrading from 3.3 to 3.3.1, for example) :file:`site.py` will be overwritten by
+the stock version. You'd have to remember that it was modified and save a copy
+before doing the installation.
+
+Alternatively, there are two environment variables that can modify ``sys.path``.
+:envvar:`PYTHONHOME` sets an alternate value for the prefix of the Python
+installation. For example, if :envvar:`PYTHONHOME` is set to ``/www/python``,
+the search path will be set to ``['', '/www/python/lib/pythonX.Y/',
+'/www/python/lib/pythonX.Y/plat-linux2', ...]``.
+
+The :envvar:`PYTHONPATH` variable can be set to a list of paths that will be
+added to the beginning of ``sys.path``. For example, if :envvar:`PYTHONPATH` is
+set to ``/www/python:/opt/py``, the search path will begin with
+``['/www/python', '/opt/py']``. (Note that directories must exist in order to
+be added to ``sys.path``; the :mod:`site` module removes non-existent paths.)
+
+Finally, ``sys.path`` is just a regular Python list, so any Python application
+can modify it by adding or removing entries.
+
+
+.. _packaging-config-files:
+
+Configuration files for Packaging
+=================================
+
+As mentioned above, you can use configuration files to store personal or site
+preferences for any option supported by any Packaging command. Depending on your
+platform, you can use one of two or three possible configuration files. These
+files will be read before parsing the command-line, so they take precedence over
+default values. In turn, the command-line will override configuration files.
+Lastly, if there are multiple configuration files, values from files read
+earlier will be overridden by values from files read later.
+
+.. XXX "one of two or three possible..." seems wrong info. Below always 3 files
+ are indicated in the tables.
+
+
+.. _packaging-config-filenames:
+
+Location and names of configuration files
+-----------------------------------------
+
+The name and location of the configuration files vary slightly across
+platforms. On Unix and Mac OS X, these are the three configuration files listed
+in the order they are processed:
+
++--------------+----------------------------------------------------------+-------+
+| Type of file | Location and filename | Notes |
++==============+==========================================================+=======+
+| system | :file:`{prefix}/lib/python{ver}/packaging/packaging.cfg` | \(1) |
++--------------+----------------------------------------------------------+-------+
+| personal | :file:`$HOME/.pydistutils.cfg` | \(2) |
++--------------+----------------------------------------------------------+-------+
+| local | :file:`setup.cfg` | \(3) |
++--------------+----------------------------------------------------------+-------+
+
+Similarly, the configuration files on Windows ---also listed in the order they
+are processed--- are these:
+
++--------------+-------------------------------------------------+-------+
+| Type of file | Location and filename | Notes |
++==============+=================================================+=======+
+| system | :file:`{prefix}\\Lib\\packaging\\packaging.cfg` | \(4) |
++--------------+-------------------------------------------------+-------+
+| personal | :file:`%HOME%\\pydistutils.cfg` | \(5) |
++--------------+-------------------------------------------------+-------+
+| local | :file:`setup.cfg` | \(3) |
++--------------+-------------------------------------------------+-------+
+
+On all platforms, the *personal* file can be temporarily disabled by
+means of the `--no-user-cfg` option.
+
+Notes:
+
+(1)
+ Strictly speaking, the system-wide configuration file lives in the directory
+ where Packaging is installed.
+
+(2)
+ On Unix, if the :envvar:`HOME` environment variable is not defined, the
+ user's home directory will be determined with the :func:`getpwuid` function
+ from the standard :mod:`pwd` module. Packaging uses the
+ :func:`os.path.expanduser` function to do this.
+
+(3)
+ I.e., in the current directory (usually the location of the setup script).
+
+(4)
+ (See also note (1).) Python's default installation prefix is
+ :file:`C:\\Python`, so the system configuration file is normally
+ :file:`C:\\Python\\Lib\\packaging\\packaging.cfg`.
+
+(5)
+ On Windows, if the :envvar:`HOME` environment variable is not defined,
+ :envvar:`USERPROFILE` then :envvar:`HOMEDRIVE` and :envvar:`HOMEPATH` will
+ be tried. Packaging uses the :func:`os.path.expanduser` function to do this.
+
+
+.. _packaging-config-syntax:
+
+Syntax of configuration files
+-----------------------------
+
+All Packaging configuration files share the same syntax. Options defined in
+them are grouped into sections, and each Packaging command gets its own section.
+Additionally, there's a ``global`` section for options that affect every command.
+Sections consist of one or more lines containing a single option specified as
+``option = value``.
+
+.. XXX use dry-run in the next example or use a pysetup option as example
+
+For example, here's a complete configuration file that forces all commands to
+run quietly by default::
+
+ [global]
+ verbose = 0
+
+If this was the system configuration file, it would affect all processing
+of any Python module distribution by any user on the current system. If it was
+installed as your personal configuration file (on systems that support them),
+it would affect only module distributions processed by you. Lastly, if it was
+used as the :file:`setup.cfg` for a particular module distribution, it would
+affect that distribution only.
+
+.. XXX "(on systems that support them)" seems wrong info
+
+If you wanted to, you could override the default "build base" directory and
+make the :command:`build\*` commands always forcibly rebuild all files with
+the following::
+
+ [build]
+ build-base = blib
+ force = 1
+
+which corresponds to the command-line arguments::
+
+ pysetup run build --build-base blib --force
+
+except that including the :command:`build` command on the command-line means
+that command will be run. Including a particular command in configuration files
+has no such implication; it only means that if the command is run, the options
+for it in the configuration file will apply. (This is also true if you run
+other commands that derive values from it.)
+
+You can find out the complete list of options for any command using the
+:option:`--help` option, e.g.::
+
+ pysetup run build --help
+
+and you can find out the complete list of global options by using
+:option:`--help` without a command::
+
+ pysetup run --help
+
+See also the "Reference" section of the "Distributing Python Modules" manual.
+
+.. XXX no links to the relevant section exist.
+
+
+.. _packaging-building-ext:
+
+Building extensions: tips and tricks
+====================================
+
+Whenever possible, Packaging tries to use the configuration information made
+available by the Python interpreter used to run `pysetup`.
+For example, the same compiler and linker flags used to compile Python will also
+be used for compiling extensions. Usually this will work well, but in
+complicated situations this might be inappropriate. This section discusses how
+to override the usual Packaging behaviour.
+
+
+.. _packaging-tweak-flags:
+
+Tweaking compiler/linker flags
+------------------------------
+
+Compiling a Python extension written in C or C++ will sometimes require
+specifying custom flags for the compiler and linker in order to use a particular
+library or produce a special kind of object code. This is especially true if the
+extension hasn't been tested on your platform, or if you're trying to
+cross-compile Python.
+
+.. TODO update to new setup.cfg
+
+In the most general case, the extension author might have foreseen that
+compiling the extensions would be complicated, and provided a :file:`Setup` file
+for you to edit. This will likely only be done if the module distribution
+contains many separate extension modules, or if they often require elaborate
+sets of compiler flags in order to work.
+
+A :file:`Setup` file, if present, is parsed in order to get a list of extensions
+to build. Each line in a :file:`Setup` describes a single module. Lines have
+the following structure::
+
+ module ... [sourcefile ...] [cpparg ...] [library ...]
+
+
+Let's examine each of the fields in turn.
+
+* *module* is the name of the extension module to be built, and should be a
+ valid Python identifier. You can't just change this in order to rename a module
+ (edits to the source code would also be needed), so this should be left alone.
+
+* *sourcefile* is anything that's likely to be a source code file, at least
+ judging by the filename. Filenames ending in :file:`.c` are assumed to be
+ written in C, filenames ending in :file:`.C`, :file:`.cc`, and :file:`.c++` are
+ assumed to be C++, and filenames ending in :file:`.m` or :file:`.mm` are assumed
+ to be in Objective C.
+
+* *cpparg* is an argument for the C preprocessor, and is anything starting with
+ :option:`-I`, :option:`-D`, :option:`-U` or :option:`-C`.
+
+* *library* is anything ending in :file:`.a` or beginning with :option:`-l` or
+ :option:`-L`.
+
+If a particular platform requires a special library on your platform, you can
+add it by editing the :file:`Setup` file and running ``pysetup run build``.
+For example, if the module defined by the line ::
+
+ foo foomodule.c
+
+must be linked with the math library :file:`libm.a` on your platform, simply add
+:option:`-lm` to the line::
+
+ foo foomodule.c -lm
+
+Arbitrary switches intended for the compiler or the linker can be supplied with
+the :option:`-Xcompiler` *arg* and :option:`-Xlinker` *arg* options::
+
+ foo foomodule.c -Xcompiler -o32 -Xlinker -shared -lm
+
+The next option after :option:`-Xcompiler` and :option:`-Xlinker` will be
+appended to the proper command line, so in the above example the compiler will
+be passed the :option:`-o32` option, and the linker will be passed
+:option:`-shared`. If a compiler option requires an argument, you'll have to
+supply multiple :option:`-Xcompiler` options; for example, to pass ``-x c++``
+the :file:`Setup` file would have to contain ``-Xcompiler -x -Xcompiler c++``.
+
+Compiler flags can also be supplied through setting the :envvar:`CFLAGS`
+environment variable. If set, the contents of :envvar:`CFLAGS` will be added to
+the compiler flags specified in the :file:`Setup` file.
+
+
+.. _packaging-non-ms-compilers:
+
+Using non-Microsoft compilers on Windows
+----------------------------------------
+
+.. sectionauthor:: Rene Liebscher <R.Liebscher@gmx.de>
+
+
+
+Borland/CodeGear C++
+^^^^^^^^^^^^^^^^^^^^
+
+This subsection describes the necessary steps to use Packaging with the Borland
+C++ compiler version 5.5. First you have to know that Borland's object file
+format (OMF) is different from the format used by the Python version you can
+download from the Python or ActiveState Web site. (Python is built with
+Microsoft Visual C++, which uses COFF as the object file format.) For this
+reason, you have to convert Python's library :file:`python25.lib` into the
+Borland format. You can do this as follows:
+
+.. Should we mention that users have to create cfg-files for the compiler?
+.. see also http://community.borland.com/article/0,1410,21205,00.html
+
+::
+
+ coff2omf python25.lib python25_bcpp.lib
+
+The :file:`coff2omf` program comes with the Borland compiler. The file
+:file:`python25.lib` is in the :file:`Libs` directory of your Python
+installation. If your extension uses other libraries (zlib, ...) you have to
+convert them too.
+
+The converted files have to reside in the same directories as the normal
+libraries.
+
+How does Packaging manage to use these libraries with their changed names? If
+the extension needs a library (eg. :file:`foo`) Packaging checks first if it
+finds a library with suffix :file:`_bcpp` (eg. :file:`foo_bcpp.lib`) and then
+uses this library. In the case it doesn't find such a special library it uses
+the default name (:file:`foo.lib`.) [#]_
+
+To let Packaging compile your extension with Borland, C++ you now have to
+type::
+
+ pysetup run build --compiler bcpp
+
+If you want to use the Borland C++ compiler as the default, you could specify
+this in your personal or system-wide configuration file for Packaging (see
+section :ref:`packaging-config-files`.)
+
+
+.. seealso::
+
+ `C++Builder Compiler <http://www.codegear.com/downloads/free/cppbuilder>`_
+ Information about the free C++ compiler from Borland, including links to the
+ download pages.
+
+ `Creating Python Extensions Using Borland's Free Compiler <http://www.cyberus.ca/~g_will/pyExtenDL.shtml>`_
+ Document describing how to use Borland's free command-line C++ compiler to build
+ Python.
+
+
+GNU C / Cygwin / MinGW
+^^^^^^^^^^^^^^^^^^^^^^
+
+This section describes the necessary steps to use Packaging with the GNU C/C++
+compilers in their Cygwin and MinGW distributions. [#]_ For a Python interpreter
+that was built with Cygwin, everything should work without any of these
+following steps.
+
+Not all extensions can be built with MinGW or Cygwin, but many can. Extensions
+most likely to not work are those that use C++ or depend on Microsoft Visual C
+extensions.
+
+To let Packaging compile your extension with Cygwin, you have to type::
+
+ pysetup run build --compiler=cygwin
+
+and for Cygwin in no-cygwin mode [#]_ or for MinGW, type::
+
+ pysetup run build --compiler=mingw32
+
+If you want to use any of these options/compilers as default, you should
+consider writing it in your personal or system-wide configuration file for
+Packaging (see section :ref:`packaging-config-files`.)
+
+Older Versions of Python and MinGW
+""""""""""""""""""""""""""""""""""
+The following instructions only apply if you're using a version of Python
+inferior to 2.4.1 with a MinGW inferior to 3.0.0 (with
+:file:`binutils-2.13.90-20030111-1`).
+
+These compilers require some special libraries. This task is more complex than
+for Borland's C++, because there is no program to convert the library. First
+you have to create a list of symbols which the Python DLL exports. (You can find
+a good program for this task at
+http://www.emmestech.com/software/pexports-0.43/download_pexports.html).
+
+.. I don't understand what the next line means. --amk
+ (inclusive the references on data structures.)
+
+::
+
+ pexports python25.dll > python25.def
+
+The location of an installed :file:`python25.dll` will depend on the
+installation options and the version and language of Windows. In a "just for
+me" installation, it will appear in the root of the installation directory. In
+a shared installation, it will be located in the system directory.
+
+Then you can create from these information an import library for gcc. ::
+
+ /cygwin/bin/dlltool --dllname python25.dll --def python25.def --output-lib libpython25.a
+
+The resulting library has to be placed in the same directory as
+:file:`python25.lib`. (Should be the :file:`libs` directory under your Python
+installation directory.)
+
+If your extension uses other libraries (zlib,...) you might have to convert
+them too. The converted files have to reside in the same directories as the
+normal libraries do.
+
+
+.. seealso::
+
+ `Building Python modules on MS Windows platform with MinGW <http://www.zope.org/Members/als/tips/win32_mingw_modules>`_
+ Information about building the required libraries for the MinGW
+ environment.
+
+
+.. rubric:: Footnotes
+
+.. [#] This also means you could replace all existing COFF-libraries with
+ OMF-libraries of the same name.
+
+.. [#] Check http://sources.redhat.com/cygwin/ and http://www.mingw.org/ for
+ more information.
+
+.. [#] Then you have no POSIX emulation available, but you also don't need
+ :file:`cygwin1.dll`.
diff --git a/Doc/install/pysetup-config.rst b/Doc/install/pysetup-config.rst
new file mode 100644
index 0000000..a473bfe
--- /dev/null
+++ b/Doc/install/pysetup-config.rst
@@ -0,0 +1,44 @@
+.. _packaging-pysetup-config:
+
+=====================
+Pysetup Configuration
+=====================
+
+Pysetup supports two configuration files: :file:`.pypirc` and :file:`packaging.cfg`.
+
+.. FIXME integrate with configfile instead of duplicating
+
+Configuring indexes
+-------------------
+
+You can configure additional indexes in :file:`.pypirc` to be used for index-related
+operations. By default, all configured index-servers and package-servers will be used
+in an additive fashion. To limit operations to specific indexes, use the :option:`--index`
+and :option:`--package-server options`::
+
+ $ pysetup install --index pypi --package-server django some.project
+
+Adding indexes to :file:`.pypirc`::
+
+ [packaging]
+ index-servers =
+ pypi
+ other
+
+ package-servers =
+ django
+
+ [pypi]
+ repository: <repository-url>
+ username: <username>
+ password: <password>
+
+ [other]
+ repository: <repository-url>
+ username: <username>
+ password: <password>
+
+ [django]
+ repository: <repository-url>
+ username: <username>
+ password: <password>
diff --git a/Doc/install/pysetup-servers.rst b/Doc/install/pysetup-servers.rst
new file mode 100644
index 0000000..c6106de
--- /dev/null
+++ b/Doc/install/pysetup-servers.rst
@@ -0,0 +1,61 @@
+.. _packaging-pysetup-servers:
+
+===============
+Package Servers
+===============
+
+Pysetup supports installing Python packages from *Package Servers* in addition
+to PyPI indexes and mirrors.
+
+Package Servers are simple directory listings of Python distributions. Directories
+can be served via HTTP or a local file system. This is useful when you want to
+dump source distributions in a directory and not worry about the full index structure.
+
+Serving distributions from Apache
+---------------------------------
+::
+
+ $ mkdir -p /var/www/html/python/distributions
+ $ cp *.tar.gz /var/www/html/python/distributions/
+
+ <VirtualHost python.example.org:80>
+ ServerAdmin webmaster@domain.com
+ DocumentRoot "/var/www/html/python"
+ ServerName python.example.org
+ ErrorLog logs/python.example.org-error.log
+ CustomLog logs/python.example.org-access.log common
+ Options Indexes FollowSymLinks MultiViews
+ DirectoryIndex index.html index.htm
+
+ <Directory "/var/www/html/python/distributions">
+ Options Indexes FollowSymLinks MultiViews
+ Order allow,deny
+ Allow from all
+ </Directory>
+ </VirtualHost>
+
+Add the Apache based distribution server to :file:`.pypirc`::
+
+ [packaging]
+ package-servers =
+ apache
+
+ [apache]
+ repository: http://python.example.org/distributions/
+
+
+Serving distributions from a file system
+----------------------------------------
+::
+
+ $ mkdir -p /data/python/distributions
+ $ cp *.tar.gz /data/python/distributions/
+
+Add the directory to :file:`.pypirc`::
+
+ [packaging]
+ package-servers =
+ local
+
+ [local]
+ repository: file:///data/python/distributions/
diff --git a/Doc/install/pysetup.rst b/Doc/install/pysetup.rst
new file mode 100644
index 0000000..d472c24
--- /dev/null
+++ b/Doc/install/pysetup.rst
@@ -0,0 +1,164 @@
+.. _packaging-pysetup:
+
+================
+Pysetup Tutorial
+================
+
+Getting started
+---------------
+
+Pysetup is a simple script that supports the following features:
+
+- install, remove, list, and verify Python packages;
+- search for available packages on PyPI or any *Simple Index*;
+- verify installed packages (md5sum, installed files, version).
+
+
+Finding out what's installed
+----------------------------
+
+Pysetup makes it easy to find out what Python packages are installed::
+
+ $ pysetup list virtualenv
+ 'virtualenv' 1.6 at '/opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info'
+
+ $ pysetup list
+ 'pyverify' 0.8.1 at '/opt/python3.3/lib/python3.3/site-packages/pyverify-0.8.1.dist-info'
+ 'virtualenv' 1.6 at '/opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info'
+ ...
+
+
+Installing a distribution
+-------------------------
+
+Pysetup can install a Python project from the following sources:
+
+- PyPI and Simple Indexes;
+- source directories containing a valid :file:`setup.py` or :file:`setup.cfg`;
+- distribution source archives (:file:`project-1.0.tar.gz`, :file:`project-1.0.zip`);
+- HTTP (http://host/packages/project-1.0.tar.gz).
+
+
+Installing from PyPI and Simple Indexes::
+
+ $ pysetup install project
+ $ pysetup install project==1.0
+
+Installing from a distribution source archive::
+
+ $ pysetup install project-1.0.tar.gz
+
+Installing from a source directory containing a valid :file:`setup.py` or
+:file:`setup.cfg`::
+
+ $ cd path/to/source/directory
+ $ pysetup install
+
+ $ pysetup install path/to/source/directory
+
+Installing from HTTP::
+
+ $ pysetup install http://host/packages/project-1.0.tar.gz
+
+
+Retrieving metadata
+-------------------
+
+You can gather metadata from two sources, a project's source directory or an
+installed distribution. The `pysetup metadata` command can retrieve one or
+more metadata fields using the `-f` option and a metadata field as the
+argument. ::
+
+ $ pysetup metadata virtualenv -f version -f name
+ Version:
+ 1.6
+ Name:
+ virtualenv
+
+ $ pysetup metadata virtualenv
+ Metadata-Version:
+ 1.0
+ Name:
+ virtualenv
+ Version:
+ 1.6
+ Platform:
+ UNKNOWN
+ Summary:
+ Virtual Python Environment builder
+ ...
+
+.. seealso::
+
+ There are three metadata versions, 1.0, 1.1, and 1.2. The following PEPs
+ describe specifics of the field names, and their semantics and usage. 1.0
+ :PEP:`241`, 1.1 :PEP:`314`, and 1.2 :PEP:`345`
+
+
+Removing a distribution
+-----------------------
+
+You can remove one or more installed distributions using the `pysetup remove`
+command::
+
+ $ pysetup remove virtualenv
+ removing 'virtualenv':
+ /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/dependency_links.txt
+ /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/entry_points.txt
+ /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/not-zip-safe
+ /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/PKG-INFO
+ /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/SOURCES.txt
+ /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/top_level.txt
+ Proceed (y/n)? y
+ success: removed 6 files and 1 dirs
+
+The optional '-y' argument auto confirms, skipping the conformation prompt::
+
+ $ pysetup remove virtualenv -y
+
+
+Getting help
+------------
+
+All pysetup actions take the `-h` and `--help` options which prints the commands
+help string to stdout. ::
+
+ $ pysetup remove -h
+ Usage: pysetup remove dist [-y]
+ or: pysetup remove --help
+
+ Uninstall a Python package.
+
+ positional arguments:
+ dist installed distribution name
+
+ optional arguments:
+ -y auto confirm package removal
+
+Getting a list of all pysetup actions and global options::
+
+ $ pysetup --help
+ Usage: pysetup [options] action [action_options]
+
+ Actions:
+ run: Run one or several commands
+ metadata: Display the metadata of a project
+ install: Install a project
+ remove: Remove a project
+ search: Search for a project in the indexes
+ list: List installed projects
+ graph: Display a graph
+ create: Create a project
+ generate-setup: Generate a backward-compatible setup.py
+
+ To get more help on an action, use:
+
+ pysetup action --help
+
+ Global options:
+ --verbose (-v) run verbosely (default)
+ --quiet (-q) run quietly (turns verbosity off)
+ --dry-run (-n) don't actually do anything
+ --help (-h) show detailed help message
+ --no-user-cfg ignore pydistutils.cfg in your home directory
+ --version Display the version
diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst
index 369e9cd..e7e7504 100644
--- a/Doc/library/_thread.rst
+++ b/Doc/library/_thread.rst
@@ -35,6 +35,9 @@ It defines the following constants and functions:
Raised on thread-specific errors.
+ .. versionchanged:: 3.3
+ This is now a synonym of the built-in :exc:`RuntimeError`.
+
.. data:: LockType
diff --git a/Doc/library/abc.rst b/Doc/library/abc.rst
index 1048b24..6f42222 100644
--- a/Doc/library/abc.rst
+++ b/Doc/library/abc.rst
@@ -18,7 +18,7 @@ regarding a type hierarchy for numbers based on ABCs.)
The :mod:`collections` module has some concrete classes that derive from
ABCs; these can, of course, be further derived. In addition the
-:mod:`collections` module has some ABCs that can be used to test whether
+:mod:`collections.abc` submodule has some ABCs that can be used to test whether
a class or instance provides a particular interface, for example, is it
hashable or a mapping.
@@ -55,6 +55,9 @@ This module provides the following class:
assert issubclass(tuple, MyABC)
assert isinstance((), MyABC)
+ .. versionchanged:: 3.3
+ Returns the registered subclass, to allow usage as a class decorator.
+
You can also override this method in an abstract base class:
.. method:: __subclasshook__(subclass)
@@ -124,19 +127,18 @@ This module provides the following class:
available as a method of ``Foo``, so it is provided separately.
-It also provides the following decorators:
+The :mod:`abc` module also provides the following decorators:
.. decorator:: abstractmethod(function)
A decorator indicating abstract methods.
- Using this decorator requires that the class's metaclass is :class:`ABCMeta` or
- is derived from it.
- A class that has a metaclass derived from :class:`ABCMeta`
- cannot be instantiated unless all of its abstract methods and
- properties are overridden.
- The abstract methods can be called using any of the normal 'super' call
- mechanisms.
+ Using this decorator requires that the class's metaclass is :class:`ABCMeta`
+ or is derived from it. A class that has a metaclass derived from
+ :class:`ABCMeta` cannot be instantiated unless all of its abstract methods
+ and properties are overridden. The abstract methods can be called using any
+ of the normal 'super' call mechanisms. :func:`abstractmethod` may be used
+ to declare abstract methods for properties and descriptors.
Dynamically adding abstract methods to a class, or attempting to modify the
abstraction status of a method or class once it is created, are not
@@ -144,12 +146,52 @@ It also provides the following decorators:
regular inheritance; "virtual subclasses" registered with the ABC's
:meth:`register` method are not affected.
- Usage::
+ When :func:`abstractmethod` is applied in combination with other method
+ descriptors, it should be applied as the innermost decorator, as shown in
+ the following usage examples::
class C(metaclass=ABCMeta):
@abstractmethod
def my_abstract_method(self, ...):
...
+ @classmethod
+ @abstractmethod
+ def my_abstract_classmethod(cls, ...):
+ ...
+ @staticmethod
+ @abstractmethod
+ def my_abstract_staticmethod(...):
+ ...
+
+ @property
+ @abstractmethod
+ def my_abstract_property(self):
+ ...
+ @my_abstract_property.setter
+ @abstractmethod
+ def my_abstract_property(self, val):
+ ...
+
+ @abstractmethod
+ def _get_x(self):
+ ...
+ @abstractmethod
+ def _set_x(self, val):
+ ...
+ x = property(_get_x, _set_x)
+
+ In order to correctly interoperate with the abstract base class machinery,
+ the descriptor must identify itself as abstract using
+ :attr:`__isabstractmethod__`. In general, this attribute should be ``True``
+ if any of the methods used to compose the descriptor are abstract. For
+ example, Python's built-in property does the equivalent of::
+
+ class Descriptor:
+ ...
+ @property
+ def __isabstractmethod__(self):
+ return any(getattr(f, '__isabstractmethod__', False) for
+ f in (self._fget, self._fset, self._fdel))
.. note::
@@ -174,6 +216,8 @@ It also provides the following decorators:
...
.. versionadded:: 3.2
+ .. deprecated:: 3.3
+ Use :class:`classmethod` with :func:`abstractmethod` instead
.. decorator:: abstractstaticmethod(function)
@@ -189,18 +233,19 @@ It also provides the following decorators:
...
.. versionadded:: 3.2
+ .. deprecated:: 3.3
+ Use :class:`staticmethod` with :func:`abstractmethod` instead
-.. function:: abstractproperty(fget=None, fset=None, fdel=None, doc=None)
+.. decorator:: abstractproperty(fget=None, fset=None, fdel=None, doc=None)
A subclass of the built-in :func:`property`, indicating an abstract property.
- Using this function requires that the class's metaclass is :class:`ABCMeta` or
- is derived from it.
- A class that has a metaclass derived from :class:`ABCMeta` cannot be
- instantiated unless all of its abstract methods and properties are overridden.
- The abstract properties can be called using any of the normal
- 'super' call mechanisms.
+ Using this function requires that the class's metaclass is :class:`ABCMeta`
+ or is derived from it. A class that has a metaclass derived from
+ :class:`ABCMeta` cannot be instantiated unless all of its abstract methods
+ and properties are overridden. The abstract properties can be called using
+ any of the normal 'super' call mechanisms.
Usage::
@@ -217,6 +262,9 @@ It also provides the following decorators:
def setx(self, value): ...
x = abstractproperty(getx, setx)
+ .. deprecated:: 3.3
+ Use :class:`property` with :func:`abstractmethod` instead
+
.. rubric:: Footnotes
diff --git a/Doc/library/archiving.rst b/Doc/library/archiving.rst
index 75d137c..c928494 100644
--- a/Doc/library/archiving.rst
+++ b/Doc/library/archiving.rst
@@ -5,8 +5,9 @@ Data Compression and Archiving
******************************
The modules described in this chapter support data compression with the zlib,
-gzip, and bzip2 algorithms, and the creation of ZIP- and tar-format archives.
-See also :ref:`archiving-operations` provided by the :mod:`shutil` module.
+gzip, bzip2 and lzma algorithms, and the creation of ZIP- and tar-format
+archives. See also :ref:`archiving-operations` provided by the :mod:`shutil`
+module.
.. toctree::
@@ -14,5 +15,6 @@ See also :ref:`archiving-operations` provided by the :mod:`shutil` module.
zlib.rst
gzip.rst
bz2.rst
+ lzma.rst
zipfile.rst
tarfile.rst
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index 79a98cb..a3a67b5 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -362,16 +362,16 @@ formatter_class
^^^^^^^^^^^^^^^
:class:`ArgumentParser` objects allow the help formatting to be customized by
-specifying an alternate formatting class. Currently, there are three such
+specifying an alternate formatting class. Currently, there are four such
classes:
.. class:: RawDescriptionHelpFormatter
RawTextHelpFormatter
ArgumentDefaultsHelpFormatter
+ MetavarTypeHelpFormatter
-The first two allow more control over how textual descriptions are displayed,
-while the last automatically adds information about argument default values.
-
+:class:`RawDescriptionHelpFormatter` and :class:`RawTextHelpFormatter` give
+more control over how textual descriptions are displayed.
By default, :class:`ArgumentParser` objects line-wrap the description_ and
epilog_ texts in command-line help messages::
@@ -424,8 +424,8 @@ should not be line-wrapped::
:class:`RawTextHelpFormatter` maintains whitespace for all sorts of help text,
including argument descriptions.
-The other formatter class available, :class:`ArgumentDefaultsHelpFormatter`,
-will add information about the default value of each of the arguments::
+:class:`ArgumentDefaultsHelpFormatter` automatically adds information about
+default values to each of the argument help messages::
>>> parser = argparse.ArgumentParser(
... prog='PROG',
@@ -442,6 +442,25 @@ will add information about the default value of each of the arguments::
-h, --help show this help message and exit
--foo FOO FOO! (default: 42)
+:class:`MetavarTypeHelpFormatter` uses the name of the type_ argument for each
+argument as the display name for its values (rather than using the dest_
+as the regular formatter does)::
+
+ >>> parser = argparse.ArgumentParser(
+ ... prog='PROG',
+ ... formatter_class=argparse.MetavarTypeHelpFormatter)
+ >>> parser.add_argument('--foo', type=int)
+ >>> parser.add_argument('bar', type=float)
+ >>> parser.print_help()
+ usage: PROG [-h] [--foo int] float
+
+ positional arguments:
+ float
+
+ optional arguments:
+ -h, --help show this help message and exit
+ --foo int
+
conflict_handler
^^^^^^^^^^^^^^^^
diff --git a/Doc/library/array.rst b/Doc/library/array.rst
index d563cce..3e275a2 100644
--- a/Doc/library/array.rst
+++ b/Doc/library/array.rst
@@ -14,36 +14,44 @@ them is constrained. The type is specified at object creation time by using a
:dfn:`type code`, which is a single character. The following type codes are
defined:
-+-----------+----------------+-------------------+-----------------------+
-| Type code | C Type | Python Type | Minimum size in bytes |
-+===========+================+===================+=======================+
-| ``'b'`` | signed char | int | 1 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'B'`` | unsigned char | int | 1 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'u'`` | Py_UNICODE | Unicode character | 2 (see note) |
-+-----------+----------------+-------------------+-----------------------+
-| ``'h'`` | signed short | int | 2 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'H'`` | unsigned short | int | 2 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'i'`` | signed int | int | 2 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'I'`` | unsigned int | int | 2 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'l'`` | signed long | int | 4 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'L'`` | unsigned long | int | 4 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'f'`` | float | float | 4 |
-+-----------+----------------+-------------------+-----------------------+
-| ``'d'`` | double | float | 8 |
-+-----------+----------------+-------------------+-----------------------+
-
-.. note::
-
- The ``'u'`` typecode corresponds to Python's unicode character. On narrow
- Unicode builds this is 2-bytes, on wide builds this is 4-bytes.
++-----------+--------------------+-------------------+-----------------------+-------+
+| Type code | C Type | Python Type | Minimum size in bytes | Notes |
++===========+====================+===================+=======================+=======+
+| ``'b'`` | signed char | int | 1 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'B'`` | unsigned char | int | 1 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'u'`` | Py_UCS4 | Unicode character | 4 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'h'`` | signed short | int | 2 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'H'`` | unsigned short | int | 2 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'i'`` | signed int | int | 2 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'I'`` | unsigned int | int | 2 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'l'`` | signed long | int | 4 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'L'`` | unsigned long | int | 4 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'q'`` | signed long long | int | 8 | \(1) |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'Q'`` | unsigned long long | int | 8 | \(1) |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'f'`` | float | float | 4 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+| ``'d'`` | double | float | 8 | |
++-----------+--------------------+-------------------+-----------------------+-------+
+
+Notes:
+
+(1)
+ The ``'q'`` and ``'Q'`` type codes are available only if
+ the platform C compiler used to build Python supports C :c:type:`long long`,
+ or, on Windows, :c:type:`__int64`.
+
+ .. versionadded:: 3.3
The actual representation of values is determined by the machine architecture
(strictly speaking, by the C implementation). The actual size can be accessed
diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst
index e2c0b6d..16de3ca 100644
--- a/Doc/library/ast.rst
+++ b/Doc/library/ast.rst
@@ -96,9 +96,6 @@ Node classes
Abstract Grammar
----------------
-The module defines a string constant ``__version__`` which is the decimal
-Subversion revision number of the file shown below.
-
The abstract grammar is currently defined as follows:
.. literalinclude:: ../../Parser/Python.asdl
diff --git a/Doc/library/asyncore.rst b/Doc/library/asyncore.rst
index 619b7bb..8750659 100644
--- a/Doc/library/asyncore.rst
+++ b/Doc/library/asyncore.rst
@@ -184,12 +184,15 @@ any that have been added to the map during asynchronous service) is closed.
Most of these are nearly identical to their socket partners.
- .. method:: create_socket(family, type)
+ .. method:: create_socket(family=socket.AF_INET, type=socket.SOCK_STREAM)
This is identical to the creation of a normal socket, and will use the
same options for creation. Refer to the :mod:`socket` documentation for
information on creating sockets.
+ .. versionchanged:: 3.3
+ *family* and *type* arguments can be omitted.
+
.. method:: connect(address)
@@ -280,7 +283,7 @@ implement its socket handling::
def __init__(self, host, path):
asyncore.dispatcher.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket()
self.connect( (host, 80) )
self.buffer = bytes('GET %s HTTP/1.0\r\nHost: %s\r\n\r\n' %
(path, host), 'ascii')
@@ -327,7 +330,7 @@ connections and dispatches the incoming connections to a handler::
def __init__(self, host, port):
asyncore.dispatcher.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket()
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
diff --git a/Doc/library/atexit.rst b/Doc/library/atexit.rst
index 7c76bab..3d5c014 100644
--- a/Doc/library/atexit.rst
+++ b/Doc/library/atexit.rst
@@ -67,8 +67,9 @@ automatically when the program terminates without relying on the application
making an explicit call into this module at termination. ::
try:
- _count = int(open("/tmp/counter").read())
- except IOError:
+ with open("/tmp/counter") as infile:
+ _count = int(infile.read())
+ except FileNotFoundError:
_count = 0
def incrcounter(n):
@@ -76,7 +77,8 @@ making an explicit call into this module at termination. ::
_count = _count + n
def savecounter():
- open("/tmp/counter", "w").write("%d" % _count)
+ with open("/tmp/counter", "w") as outfile:
+ outfile.write("%d" % _count)
import atexit
atexit.register(savecounter)
diff --git a/Doc/library/base64.rst b/Doc/library/base64.rst
index 06f3ab1..afbedce 100644
--- a/Doc/library/base64.rst
+++ b/Doc/library/base64.rst
@@ -18,9 +18,14 @@ POST request. The encoding algorithm is not the same as the
There are two interfaces provided by this module. The modern interface
supports encoding and decoding ASCII byte string objects using all three
-alphabets. The legacy interface provides for encoding and decoding to and from
-file-like objects as well as byte strings, but only using the Base64 standard
-alphabet.
+alphabets. Additionally, the decoding functions of the modern interface also
+accept Unicode strings containing only ASCII characters. The legacy interface
+provides for encoding and decoding to and from file-like objects as well as
+byte strings, but only using the Base64 standard alphabet.
+
+.. versionchanged:: 3.3
+ ASCII-only Unicode strings are now accepted by the decoding functions of
+ the modern interface.
The modern interface provides:
diff --git a/Doc/library/binascii.rst b/Doc/library/binascii.rst
index 2aa3702..baf430d 100644
--- a/Doc/library/binascii.rst
+++ b/Doc/library/binascii.rst
@@ -20,8 +20,13 @@ higher-level modules.
.. note::
- Encoding and decoding functions do not accept Unicode strings. Only bytestring
- and bytearray objects can be processed.
+ ``a2b_*`` functions accept Unicode strings containing only ASCII characters.
+ Other functions only accept bytes and bytes-compatible objects (such as
+ bytearray objects and other objects implementing the buffer API).
+
+ .. versionchanged:: 3.3
+ ASCII-only unicode strings are now accepted by the ``a2b_*`` functions.
+
The :mod:`binascii` module defines the following functions:
diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst
index d13f6e0..9577f31 100644
--- a/Doc/library/bz2.rst
+++ b/Doc/library/bz2.rst
@@ -1,198 +1,169 @@
-:mod:`bz2` --- Compression compatible with :program:`bzip2`
-===========================================================
+:mod:`bz2` --- Support for :program:`bzip2` compression
+=======================================================
.. module:: bz2
- :synopsis: Interface to compression and decompression routines
- compatible with bzip2.
+ :synopsis: Interfaces for bzip2 compression and decompression.
.. moduleauthor:: Gustavo Niemeyer <niemeyer@conectiva.com>
+.. moduleauthor:: Nadeem Vawda <nadeem.vawda@gmail.com>
.. sectionauthor:: Gustavo Niemeyer <niemeyer@conectiva.com>
+.. sectionauthor:: Nadeem Vawda <nadeem.vawda@gmail.com>
-This module provides a comprehensive interface for the bz2 compression library.
-It implements a complete file interface, one-shot (de)compression functions, and
-types for sequential (de)compression.
+This module provides a comprehensive interface for compressing and
+decompressing data using the bzip2 compression algorithm.
-Here is a summary of the features offered by the bz2 module:
+The :mod:`bz2` module contains:
-* :class:`BZ2File` class implements a complete file interface, including
- :meth:`~BZ2File.readline`, :meth:`~BZ2File.readlines`,
- :meth:`~BZ2File.writelines`, :meth:`~BZ2File.seek`, etc;
+* The :class:`BZ2File` class for reading and writing compressed files.
+* The :class:`BZ2Compressor` and :class:`BZ2Decompressor` classes for
+ incremental (de)compression.
+* The :func:`compress` and :func:`decompress` functions for one-shot
+ (de)compression.
-* :class:`BZ2File` class implements emulated :meth:`~BZ2File.seek` support;
-
-* :class:`BZ2File` class implements universal newline support;
-
-* :class:`BZ2File` class offers an optimized line iteration using a readahead
- algorithm;
-
-* Sequential (de)compression supported by :class:`BZ2Compressor` and
- :class:`BZ2Decompressor` classes;
-
-* One-shot (de)compression supported by :func:`compress` and :func:`decompress`
- functions;
-
-* Thread safety uses individual locking mechanism.
+All of the classes in this module may safely be accessed from multiple threads.
(De)compression of files
------------------------
-Handling of compressed files is offered by the :class:`BZ2File` class.
-
-
-.. class:: BZ2File(filename, mode='r', buffering=0, compresslevel=9)
-
- Open a bz2 file. Mode can be either ``'r'`` or ``'w'``, for reading (default)
- or writing. When opened for writing, the file will be created if it doesn't
- exist, and truncated otherwise. If *buffering* is given, ``0`` means
- unbuffered, and larger numbers specify the buffer size; the default is
- ``0``. If *compresslevel* is given, it must be a number between ``1`` and
- ``9``; the default is ``9``. Add a ``'U'`` to mode to open the file for input
- with universal newline support. Any line ending in the input file will be
- seen as a ``'\n'`` in Python. Also, a file so opened gains the attribute
- :attr:`newlines`; the value for this attribute is one of ``None`` (no newline
- read yet), ``'\r'``, ``'\n'``, ``'\r\n'`` or a tuple containing all the
- newline types seen. Universal newlines are available only when
- reading. Instances support iteration in the same way as normal :class:`file`
- instances.
-
- :class:`BZ2File` supports the :keyword:`with` statement.
-
- .. versionchanged:: 3.1
- Support for the :keyword:`with` statement was added.
-
-
- .. note::
+.. class:: BZ2File(filename=None, mode='r', buffering=None, compresslevel=9, \*, fileobj=None)
- This class does not support input files containing multiple streams (such
- as those produced by the :program:`pbzip2` tool). When reading such an
- input file, only the first stream will be accessible. If you require
- support for multi-stream files, consider using the third-party
- :mod:`bz2file` module (available from
- `PyPI <http://pypi.python.org/pypi/bz2file>`_). This module provides a
- backport of Python 3.3's :class:`BZ2File` class, which does support
- multi-stream files.
+ Open a bzip2-compressed file.
+ The :class:`BZ2File` can wrap an existing :term:`file object` (given by
+ *fileobj*), or operate directly on a named file (named by *filename*).
+ Exactly one of these two parameters should be provided.
- .. method:: close()
+ The *mode* argument can be either ``'r'`` for reading (default), ``'w'`` for
+ overwriting, or ``'a'`` for appending. If *fileobj* is provided, a mode of
+ ``'w'`` does not truncate the file, and is instead equivalent to ``'a'``.
- Close the file. Sets data attribute :attr:`closed` to true. A closed file
- cannot be used for further I/O operations. :meth:`close` may be called
- more than once without error.
+ The *buffering* argument is ignored. Its use is deprecated.
+ If *mode* is ``'w'`` or ``'a'``, *compresslevel* can be a number between
+ ``1`` and ``9`` specifying the level of compression: ``1`` produces the
+ least compression, and ``9`` (default) produces the most compression.
- .. method:: read([size])
+ If *mode* is ``'r'``, the input file may be the concatenation of multiple
+ compressed streams.
- Read at most *size* uncompressed bytes, returned as a byte string. If the
- *size* argument is negative or omitted, read until EOF is reached.
+ :class:`BZ2File` provides all of the members specified by the
+ :class:`io.BufferedIOBase`, except for :meth:`detach` and :meth:`truncate`.
+ Iteration and the :keyword:`with` statement are supported.
+ :class:`BZ2File` also provides the following method:
- .. method:: readline([size])
+ .. method:: peek([n])
- Return the next line from the file, as a byte string, retaining newline.
- A non-negative *size* argument limits the maximum number of bytes to
- return (an incomplete line may be returned then). Return an empty byte
- string at EOF.
+ Return buffered data without advancing the file position. At least one
+ byte of data will be returned (unless at EOF). The exact number of bytes
+ returned is unspecified.
+ .. versionadded:: 3.3
- .. method:: readlines([size])
+ .. versionchanged:: 3.1
+ Support for the :keyword:`with` statement was added.
- Return a list of lines read. The optional *size* argument, if given, is an
- approximate bound on the total number of bytes in the lines returned.
+ .. versionchanged:: 3.3
+ The :meth:`fileno`, :meth:`readable`, :meth:`seekable`, :meth:`writable`,
+ :meth:`read1` and :meth:`readinto` methods were added.
+ .. versionchanged:: 3.3
+ The *fileobj* argument to the constructor was added.
- .. method:: seek(offset[, whence])
+ .. versionchanged:: 3.3
+ The ``'a'`` (append) mode was added, along with support for reading
+ multi-stream files.
- Move to new file position. Argument *offset* is a byte count. Optional
- argument *whence* defaults to ``os.SEEK_SET`` or ``0`` (offset from start
- of file; offset should be ``>= 0``); other values are ``os.SEEK_CUR`` or
- ``1`` (move relative to current position; offset can be positive or
- negative), and ``os.SEEK_END`` or ``2`` (move relative to end of file;
- offset is usually negative, although many platforms allow seeking beyond
- the end of a file).
- Note that seeking of bz2 files is emulated, and depending on the
- parameters the operation may be extremely slow.
+Incremental (de)compression
+---------------------------
+.. class:: BZ2Compressor(compresslevel=9)
- .. method:: tell()
+ Create a new compressor object. This object may be used to compress data
+ incrementally. For one-shot compression, use the :func:`compress` function
+ instead.
- Return the current file position, an integer.
+ *compresslevel*, if given, must be a number between ``1`` and ``9``. The
+ default is ``9``.
+ .. method:: compress(data)
- .. method:: write(data)
+ Provide data to the compressor object. Returns a chunk of compressed data
+ if possible, or an empty byte string otherwise.
- Write the byte string *data* to file. Note that due to buffering,
- :meth:`close` may be needed before the file on disk reflects the data
- written.
+ When you have finished providing data to the compressor, call the
+ :meth:`flush` method to finish the compression process.
- .. method:: writelines(sequence_of_byte_strings)
+ .. method:: flush()
- Write the sequence of byte strings to the file. Note that newlines are not
- added. The sequence can be any iterable object producing byte strings.
- This is equivalent to calling write() for each byte string.
+ Finish the compression process. Returns the compressed data left in
+ internal buffers.
+ The compressor object may not be used after this method has been called.
-Sequential (de)compression
---------------------------
-Sequential compression and decompression is done using the classes
-:class:`BZ2Compressor` and :class:`BZ2Decompressor`.
+.. class:: BZ2Decompressor()
+ Create a new decompressor object. This object may be used to decompress data
+ incrementally. For one-shot compression, use the :func:`decompress` function
+ instead.
-.. class:: BZ2Compressor(compresslevel=9)
+ .. note::
+ This class does not transparently handle inputs containing multiple
+ compressed streams, unlike :func:`decompress` and :class:`BZ2File`. If
+ you need to decompress a multi-stream input with :class:`BZ2Decompressor`,
+ you must use a new decompressor for each stream.
- Create a new compressor object. This object may be used to compress data
- sequentially. If you want to compress data in one shot, use the
- :func:`compress` function instead. The *compresslevel* parameter, if given,
- must be a number between ``1`` and ``9``; the default is ``9``.
+ .. method:: decompress(data)
- .. method:: compress(data)
+ Provide data to the decompressor object. Returns a chunk of decompressed
+ data if possible, or an empty byte string otherwise.
- Provide more data to the compressor object. It will return chunks of
- compressed data whenever possible. When you've finished providing data to
- compress, call the :meth:`flush` method to finish the compression process,
- and return what is left in internal buffers.
+ Attempting to decompress data after the end of the current stream is
+ reached raises an :exc:`EOFError`. If any data is found after the end of
+ the stream, it is ignored and saved in the :attr:`unused_data` attribute.
- .. method:: flush()
+ .. attribute:: eof
- Finish the compression process and return what is left in internal
- buffers. You must not use the compressor object after calling this method.
+ True if the end-of-stream marker has been reached.
+ .. versionadded:: 3.3
-.. class:: BZ2Decompressor()
- Create a new decompressor object. This object may be used to decompress data
- sequentially. If you want to decompress data in one shot, use the
- :func:`decompress` function instead.
+ .. attribute:: unused_data
- .. method:: decompress(data)
+ Data found after the end of the compressed stream.
- Provide more data to the decompressor object. It will return chunks of
- decompressed data whenever possible. If you try to decompress data after
- the end of stream is found, :exc:`EOFError` will be raised. If any data
- was found after the end of stream, it'll be ignored and saved in
- :attr:`unused_data` attribute.
+ If this attribute is accessed before the end of the stream has been
+ reached, its value will be ``b''``.
One-shot (de)compression
------------------------
-One-shot compression and decompression is provided through the :func:`compress`
-and :func:`decompress` functions.
+.. function:: compress(data, compresslevel=9)
+ Compress *data*.
-.. function:: compress(data, compresslevel=9)
+ *compresslevel*, if given, must be a number between ``1`` and ``9``. The
+ default is ``9``.
- Compress *data* in one shot. If you want to compress data sequentially, use
- an instance of :class:`BZ2Compressor` instead. The *compresslevel* parameter,
- if given, must be a number between ``1`` and ``9``; the default is ``9``.
+ For incremental compression, use a :class:`BZ2Compressor` instead.
.. function:: decompress(data)
- Decompress *data* in one shot. If you want to decompress data sequentially,
- use an instance of :class:`BZ2Decompressor` instead.
+ Decompress *data*.
+
+ If *data* is the concatenation of multiple compressed streams, decompress
+ all of the streams.
+
+ For incremental decompression, use a :class:`BZ2Decompressor` instead.
+
+ .. versionchanged:: 3.3
+ Support for multi-stream inputs was added.
diff --git a/Doc/library/chunk.rst b/Doc/library/chunk.rst
index d3558a4..c1ba497 100644
--- a/Doc/library/chunk.rst
+++ b/Doc/library/chunk.rst
@@ -84,8 +84,9 @@ instance will fail with a :exc:`EOFError` exception.
Close and skip to the end of the chunk. This does not close the
underlying file.
- The remaining methods will raise :exc:`IOError` if called after the
- :meth:`close` method has been called.
+ The remaining methods will raise :exc:`OSError` if called after the
+ :meth:`close` method has been called. Before Python 3.3, they used to
+ raise :exc:`IOError`, now an alias of :exc:`OSError`.
.. method:: isatty()
diff --git a/Doc/library/cmd.rst b/Doc/library/cmd.rst
index fd7f453..0c43bb8 100644
--- a/Doc/library/cmd.rst
+++ b/Doc/library/cmd.rst
@@ -285,8 +285,8 @@ immediate playback::
def do_playback(self, arg):
'Playback commands from a file: PLAYBACK rose.cmd'
self.close()
- cmds = open(arg).read().splitlines()
- self.cmdqueue.extend(cmds)
+ with open(arg) as f:
+ self.cmdqueue.extend(f.read().splitlines())
def precmd(self, line):
line = line.lower()
if self.file and 'playback' not in line:
diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst
index 7747794..a9fae95 100644
--- a/Doc/library/codecs.rst
+++ b/Doc/library/codecs.rst
@@ -458,7 +458,8 @@ define in order to be compatible with the Python codec registry.
.. method:: reset()
- Reset the encoder to the initial state.
+ Reset the encoder to the initial state. The output is discarded: call
+ ``.encode('', final=True)`` to reset the encoder and to get the output.
.. method:: IncrementalEncoder.getstate()
@@ -786,11 +787,9 @@ methods and attributes from the underlying stream.
Encodings and Unicode
---------------------
-Strings are stored internally as sequences of codepoints (to be precise
-as :c:type:`Py_UNICODE` arrays). Depending on the way Python is compiled (either
-via ``--without-wide-unicode`` or ``--with-wide-unicode``, with the
-former being the default) :c:type:`Py_UNICODE` is either a 16-bit or 32-bit data
-type. Once a string object is used outside of CPU and memory, CPU endianness
+Strings are stored internally as sequences of codepoints in range ``0 - 10FFFF``
+(see :pep:`393` for more details about the implementation).
+Once a string object is used outside of CPU and memory, CPU endianness
and how these arrays are stored as bytes become an issue. Transforming a
string object into a sequence of bytes is called encoding and recreating the
string object from the sequence of bytes is known as decoding. There are many
@@ -901,6 +900,15 @@ is meant to be exhaustive. Notice that spelling alternatives that only differ in
case or use a hyphen instead of an underscore are also valid aliases; therefore,
e.g. ``'utf-8'`` is a valid alias for the ``'utf_8'`` codec.
+.. impl-detail::
+
+ Some common encodings can bypass the codecs lookup machinery to
+ improve performance. These optimization opportunities are only
+ recognized by CPython for a limited set of aliases: utf-8, utf8,
+ latin-1, latin1, iso-8859-1, mbcs (Windows only), ascii, utf-16,
+ and utf-32. Using alternative spellings for these encodings may
+ result in slower execution.
+
Many of the character sets support the same languages. They vary in individual
characters (e.g. whether the EURO SIGN is supported or not), and in the
assignment of characters to code positions. For the European languages in
@@ -1003,6 +1011,11 @@ particular, the following variants typically exist:
+-----------------+--------------------------------+--------------------------------+
| cp1258 | windows-1258 | Vietnamese |
+-----------------+--------------------------------+--------------------------------+
+| cp65001 | | Windows only: Windows UTF-8 |
+| | | (``CP_UTF8``) |
+| | | |
+| | | .. versionadded:: 3.3 |
++-----------------+--------------------------------+--------------------------------+
| euc_jp | eucjp, ujis, u-jis | Japanese |
+-----------------+--------------------------------+--------------------------------+
| euc_jis_2004 | jisx0213, eucjis2004 | Japanese |
@@ -1160,6 +1173,8 @@ particular, the following variants typically exist:
| unicode_internal | | Return the internal |
| | | representation of the |
| | | operand |
+| | | |
+| | | .. deprecated:: 3.3 |
+--------------------+---------+---------------------------+
The following codecs provide bytes-to-bytes mappings.
@@ -1272,12 +1287,13 @@ functions can be used directly if desired.
.. module:: encodings.mbcs
:synopsis: Windows ANSI codepage
-Encode operand according to the ANSI codepage (CP_ACP). This codec only
-supports ``'strict'`` and ``'replace'`` error handlers to encode, and
-``'strict'`` and ``'ignore'`` error handlers to decode.
+Encode operand according to the ANSI codepage (CP_ACP).
Availability: Windows only.
+.. versionchanged:: 3.3
+ Support any error handler.
+
.. versionchanged:: 3.2
Before 3.2, the *errors* argument was ignored; ``'replace'`` was always used
to encode, and ``'ignore'`` to decode.
diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst
new file mode 100644
index 0000000..9873489
--- /dev/null
+++ b/Doc/library/collections.abc.rst
@@ -0,0 +1,182 @@
+:mod:`collections.abc` --- Abstract Base Classes for Containers
+===============================================================
+
+.. module:: collections.abc
+ :synopsis: Abstract base classes for containers
+.. moduleauthor:: Raymond Hettinger <python at rcn.com>
+.. sectionauthor:: Raymond Hettinger <python at rcn.com>
+
+.. versionadded:: 3.3
+ Formerly, this module was part of the :mod:`collections` module.
+
+.. testsetup:: *
+
+ from collections import *
+ import itertools
+ __name__ = '<doctest>'
+
+**Source code:** :source:`Lib/collections/abc.py`
+
+--------------
+
+This module provides :term:`abstract base classes <abstract base class>` that
+can be used to test whether a class provides a particular interface; for
+example, whether it is hashable or whether it is a mapping.
+
+
+.. _collections-abstract-base-classes:
+
+Collections Abstract Base Classes
+---------------------------------
+
+The collections module offers the following :term:`ABCs <abstract base class>`:
+
+========================= ===================== ====================== ====================================================
+ABC Inherits from Abstract Methods Mixin Methods
+========================= ===================== ====================== ====================================================
+:class:`Container` ``__contains__``
+:class:`Hashable` ``__hash__``
+:class:`Iterable` ``__iter__``
+:class:`Iterator` :class:`Iterable` ``__next__`` ``__iter__``
+:class:`Sized` ``__len__``
+:class:`Callable` ``__call__``
+
+:class:`Sequence` :class:`Sized`, ``__getitem__`` ``__contains__``, ``__iter__``, ``__reversed__``,
+ :class:`Iterable`, ``index``, and ``count``
+ :class:`Container`
+
+:class:`MutableSequence` :class:`Sequence` ``__setitem__``, Inherited :class:`Sequence` methods and
+ ``__delitem__``, ``append``, ``reverse``, ``extend``, ``pop``,
+ ``insert`` ``remove``, ``clear``, and ``__iadd__``
+
+:class:`Set` :class:`Sized`, ``__le__``, ``__lt__``, ``__eq__``, ``__ne__``,
+ :class:`Iterable`, ``__gt__``, ``__ge__``, ``__and__``, ``__or__``,
+ :class:`Container` ``__sub__``, ``__xor__``, and ``isdisjoint``
+
+:class:`MutableSet` :class:`Set` ``add``, Inherited :class:`Set` methods and
+ ``discard`` ``clear``, ``pop``, ``remove``, ``__ior__``,
+ ``__iand__``, ``__ixor__``, and ``__isub__``
+
+:class:`Mapping` :class:`Sized`, ``__getitem__`` ``__contains__``, ``keys``, ``items``, ``values``,
+ :class:`Iterable`, ``get``, ``__eq__``, and ``__ne__``
+ :class:`Container`
+
+:class:`MutableMapping` :class:`Mapping` ``__setitem__``, Inherited :class:`Mapping` methods and
+ ``__delitem__`` ``pop``, ``popitem``, ``clear``, ``update``,
+ and ``setdefault``
+
+
+:class:`MappingView` :class:`Sized` ``__len__``
+:class:`ItemsView` :class:`MappingView`, ``__contains__``,
+ :class:`Set` ``__iter__``
+:class:`KeysView` :class:`MappingView`, ``__contains__``,
+ :class:`Set` ``__iter__``
+:class:`ValuesView` :class:`MappingView` ``__contains__``, ``__iter__``
+========================= ===================== ====================== ====================================================
+
+
+.. class:: Container
+ Hashable
+ Sized
+ Callable
+
+ ABCs for classes that provide respectively the methods :meth:`__contains__`,
+ :meth:`__hash__`, :meth:`__len__`, and :meth:`__call__`.
+
+.. class:: Iterable
+
+ ABC for classes that provide the :meth:`__iter__` method.
+ See also the definition of :term:`iterable`.
+
+.. class:: Iterator
+
+ ABC for classes that provide the :meth:`__iter__` and :meth:`next` methods.
+ See also the definition of :term:`iterator`.
+
+.. class:: Sequence
+ MutableSequence
+
+ ABCs for read-only and mutable :term:`sequences <sequence>`.
+
+.. class:: Set
+ MutableSet
+
+ ABCs for read-only and mutable sets.
+
+.. class:: Mapping
+ MutableMapping
+
+ ABCs for read-only and mutable :term:`mappings <mapping>`.
+
+.. class:: MappingView
+ ItemsView
+ KeysView
+ ValuesView
+
+ ABCs for mapping, items, keys, and values :term:`views <view>`.
+
+
+These ABCs allow us to ask classes or instances if they provide
+particular functionality, for example::
+
+ size = None
+ if isinstance(myvar, collections.Sized):
+ size = len(myvar)
+
+Several of the ABCs are also useful as mixins that make it easier to develop
+classes supporting container APIs. For example, to write a class supporting
+the full :class:`Set` API, it only necessary to supply the three underlying
+abstract methods: :meth:`__contains__`, :meth:`__iter__`, and :meth:`__len__`.
+The ABC supplies the remaining methods such as :meth:`__and__` and
+:meth:`isdisjoint` ::
+
+ class ListBasedSet(collections.Set):
+ ''' Alternate set implementation favoring space over speed
+ and not requiring the set elements to be hashable. '''
+ def __init__(self, iterable):
+ self.elements = lst = []
+ for value in iterable:
+ if value not in lst:
+ lst.append(value)
+ def __iter__(self):
+ return iter(self.elements)
+ def __contains__(self, value):
+ return value in self.elements
+ def __len__(self):
+ return len(self.elements)
+
+ s1 = ListBasedSet('abcdef')
+ s2 = ListBasedSet('defghi')
+ overlap = s1 & s2 # The __and__() method is supported automatically
+
+Notes on using :class:`Set` and :class:`MutableSet` as a mixin:
+
+(1)
+ Since some set operations create new sets, the default mixin methods need
+ a way to create new instances from an iterable. The class constructor is
+ assumed to have a signature in the form ``ClassName(iterable)``.
+ That assumption is factored-out to an internal classmethod called
+ :meth:`_from_iterable` which calls ``cls(iterable)`` to produce a new set.
+ If the :class:`Set` mixin is being used in a class with a different
+ constructor signature, you will need to override :meth:`_from_iterable`
+ with a classmethod that can construct new instances from
+ an iterable argument.
+
+(2)
+ To override the comparisons (presumably for speed, as the
+ semantics are fixed), redefine :meth:`__le__` and
+ then the other operations will automatically follow suit.
+
+(3)
+ The :class:`Set` mixin provides a :meth:`_hash` method to compute a hash value
+ for the set; however, :meth:`__hash__` is not defined because not all sets
+ are hashable or immutable. To add set hashabilty using mixins,
+ inherit from both :meth:`Set` and :meth:`Hashable`, then define
+ ``__hash__ = Set._hash``.
+
+.. seealso::
+
+ * `OrderedSet recipe <http://code.activestate.com/recipes/576694/>`_ for an
+ example built on :class:`MutableSet`.
+
+ * For more about ABCs, see the :mod:`abc` module and :pep:`3119`.
diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst
index e90b25e..5298dbc 100644
--- a/Doc/library/collections.rst
+++ b/Doc/library/collections.rst
@@ -12,7 +12,7 @@
import itertools
__name__ = '<doctest>'
-**Source code:** :source:`Lib/collections.py` and :source:`Lib/_abcoll.py`
+**Source code:** :source:`Lib/collections/__init__.py`
--------------
@@ -23,6 +23,7 @@ Python's general purpose built-in containers, :class:`dict`, :class:`list`,
===================== ====================================================================
:func:`namedtuple` factory function for creating tuple subclasses with named fields
:class:`deque` list-like container with fast appends and pops on either end
+:class:`ChainMap` dict-like class for creating a single view of multiple mappings
:class:`Counter` dict subclass for counting hashable objects
:class:`OrderedDict` dict subclass that remembers the order entries were added
:class:`defaultdict` dict subclass that calls a factory function to supply missing values
@@ -31,10 +32,124 @@ Python's general purpose built-in containers, :class:`dict`, :class:`list`,
:class:`UserString` wrapper around string objects for easier string subclassing
===================== ====================================================================
-In addition to the concrete container classes, the collections module provides
-:ref:`abstract base classes <collections-abstract-base-classes>` that can be
-used to test whether a class provides a particular interface, for example,
-whether it is hashable or a mapping.
+.. versionchanged:: 3.3
+ Moved :ref:`collections-abstract-base-classes` to the :mod:`collections.abc` module.
+ For backwards compatibility, they continue to be visible in this module
+ as well.
+
+
+:class:`ChainMap` objects
+-------------------------
+
+A :class:`ChainMap` class is provided for quickly linking a number of mappings
+so they can be treated as a single unit. It is often much faster than creating
+a new dictionary and running multiple :meth:`~dict.update` calls.
+
+The class can be used to simulate nested scopes and is useful in templating.
+
+.. class:: ChainMap(*maps)
+
+ A :class:`ChainMap` groups multiple dicts or other mappings together to
+ create a single, updateable view. If no *maps* are specified, a single empty
+ dictionary is provided so that a new chain always has at least one mapping.
+
+ The underlying mappings are stored in a list. That list is public and can
+ accessed or updated using the *maps* attribute. There is no other state.
+
+ Lookups search the underlying mappings successively until a key is found. In
+ contrast, writes, updates, and deletions only operate on the first mapping.
+
+ A :class:`ChainMap` incorporates the underlying mappings by reference. So, if
+ one of the underlying mappings gets updated, those changes will be reflected
+ in :class:`ChainMap`.
+
+ All of the usual dictionary methods are supported. In addition, there is a
+ *maps* attribute, a method for creating new subcontexts, and a property for
+ accessing all but the first mapping:
+
+ .. attribute:: maps
+
+ A user updateable list of mappings. The list is ordered from
+ first-searched to last-searched. It is the only stored state and can
+ be modified to change which mappings are searched. The list should
+ always contain at least one mapping.
+
+ .. method:: new_child()
+
+ Returns a new :class:`ChainMap` containing a new :class:`dict` followed by
+ all of the maps in the current instance. A call to ``d.new_child()`` is
+ equivalent to: ``ChainMap({}, *d.maps)``. This method is used for
+ creating subcontexts that can be updated without altering values in any
+ of the parent mappings.
+
+ .. method:: parents()
+
+ Returns a new :class:`ChainMap` containing all of the maps in the current
+ instance except the first one. This is useful for skipping the first map
+ in the search. The use-cases are similar to those for the
+ :keyword:`nonlocal` keyword used in :term:`nested scopes <nested scope>`.
+ The use-cases also parallel those for the builtin :func:`super` function.
+ A reference to ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``.
+
+ .. versionadded:: 3.3
+
+ Example of simulating Python's internal lookup chain::
+
+ import builtins
+ pylookup = ChainMap(locals(), globals(), vars(builtins))
+
+ Example of letting user specified values take precedence over environment
+ variables which in turn take precedence over default values::
+
+ import os, argparse
+ defaults = {'color': 'red', 'user': guest}
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-u', '--user')
+ parser.add_argument('-c', '--color')
+ user_specified = vars(parser.parse_args())
+ combined = ChainMap(user_specified, os.environ, defaults)
+
+ Example patterns for using the :class:`ChainMap` class to simulate nested
+ contexts::
+
+ c = ChainMap() # Create root context
+ d = c.new_child() # Create nested child context
+ e = c.new_child() # Child of c, independent from d
+ e.maps[0] # Current context dictionary -- like Python's locals()
+ e.maps[-1] # Root context -- like Python's globals()
+ e.parents # Enclosing context chain -- like Python's nonlocals
+
+ d['x'] # Get first key in the chain of contexts
+ d['x'] = 1 # Set value in current context
+ del['x'] # Delete from current context
+ list(d) # All nested values
+ k in d # Check all nested values
+ len(d) # Number of nested values
+ d.items() # All nested items
+ dict(d) # Flatten into a regular dictionary
+
+ .. seealso::
+
+ * The `MultiContext class
+ <http://svn.enthought.com/svn/enthought/CodeTools/trunk/enthought/contexts/multi_context.py>`_
+ in the Enthought `CodeTools package
+ <https://github.com/enthought/codetools>`_ has options to support
+ writing to any mapping in the chain.
+
+ * Django's `Context class
+ <http://code.djangoproject.com/browser/django/trunk/django/template/context.py>`_
+ for templating is a read-only chain of mappings. It also features
+ pushing and popping of contexts similar to the
+ :meth:`~collections.ChainMap.new_child` method and the
+ :meth:`~collections.ChainMap.parents` property.
+
+ * The `Nested Contexts recipe
+ <http://code.activestate.com/recipes/577434/>`_ has options to control
+ whether writes and other mutations apply only to the first mapping or to
+ any mapping in the chain.
+
+ * A `greatly simplified read-only version of Chainmap
+ <http://code.activestate.com/recipes/305268/>`_.
:class:`Counter` objects
@@ -149,7 +264,7 @@ Common patterns for working with :class:`Counter` objects::
c.items() # convert to a list of (elem, cnt) pairs
Counter(dict(list_of_pairs)) # convert from a list of (elem, cnt) pairs
c.most_common()[:-n:-1] # n least common elements
- c += Counter() # remove zero and negative counts
+ +c # remove zero and negative counts
Several mathematical operations are provided for combining :class:`Counter`
objects to produce multisets (counters that have counts greater than zero).
@@ -169,6 +284,18 @@ counts, but the output will exclude results with counts of zero or less.
>>> c | d # union: max(c[x], d[x])
Counter({'a': 3, 'b': 2})
+Unary addition and substraction are shortcuts for adding an empty counter
+or subtracting from an empty counter.
+
+ >>> c = Counter(a=2, b=-4)
+ >>> +c
+ Counter({'a': 2})
+ >>> -c
+ Counter({'b': 4})
+
+.. versionadded:: 3.3
+ Added support for unary plus, unary minus, and in-place multiset operations.
+
.. note::
Counters were primarily designed to work with positive integers to represent
@@ -398,7 +525,8 @@ in Unix::
def tail(filename, n=10):
'Return the last n lines of a file'
- return deque(open(filename), n)
+ with open(filename) as f:
+ return deque(f, n)
Another approach to using deques is to maintain a sequence of recently
added elements by appending to the right and popping to the left::
@@ -550,7 +678,7 @@ Setting the :attr:`default_factory` to :class:`set` makes the
... d[k].add(v)
...
>>> list(d.items())
- [('blue', set([2, 4])), ('red', set([1, 3]))]
+ [('blue', {2, 4}), ('red', {1, 3})]
:func:`namedtuple` Factory Function for Tuples with Named Fields
@@ -583,7 +711,9 @@ they add the ability to access fields by name instead of position index.
converted to ``['abc', '_1', 'ghi', '_3']``, eliminating the keyword
``def`` and the duplicate fieldname ``abc``.
- If *verbose* is true, the class definition is printed just before being built.
+ If *verbose* is true, the class definition is printed after it is
+ built. This option is outdated; instead, it is simpler to print the
+ :attr:`_source` attribute.
Named tuple instances do not have per-instance dictionaries, so they are
lightweight and require no more memory than regular tuples.
@@ -597,53 +727,6 @@ they add the ability to access fields by name instead of position index.
>>> # Basic example
>>> Point = namedtuple('Point', ['x', 'y'])
- >>> p = Point(x=10, y=11)
-
- >>> # Example using the verbose option to print the class definition
- >>> Point = namedtuple('Point', 'x y', verbose=True)
- class Point(tuple):
- 'Point(x, y)'
- <BLANKLINE>
- __slots__ = ()
- <BLANKLINE>
- _fields = ('x', 'y')
- <BLANKLINE>
- def __new__(_cls, x, y):
- 'Create a new instance of Point(x, y)'
- return _tuple.__new__(_cls, (x, y))
- <BLANKLINE>
- @classmethod
- def _make(cls, iterable, new=tuple.__new__, len=len):
- 'Make a new Point object from a sequence or iterable'
- result = new(cls, iterable)
- if len(result) != 2:
- raise TypeError('Expected 2 arguments, got %d' % len(result))
- return result
- <BLANKLINE>
- def __repr__(self):
- 'Return a nicely formatted representation string'
- return self.__class__.__name__ + '(x=%r, y=%r)' % self
- <BLANKLINE>
- def _asdict(self):
- 'Return a new OrderedDict which maps field names to their values'
- return OrderedDict(zip(self._fields, self))
- <BLANKLINE>
- __dict__ = property(_asdict)
- <BLANKLINE>
- def _replace(_self, **kwds):
- 'Return a new Point object replacing specified fields with new values'
- result = _self._make(map(kwds.pop, ('x', 'y'), _self))
- if kwds:
- raise ValueError('Got unexpected field names: %r' % list(kwds.keys()))
- return result
- <BLANKLINE>
- def __getnewargs__(self):
- 'Return self as a plain tuple. Used by copy and pickle.'
- return tuple(self)
- <BLANKLINE>
- x = _property(_itemgetter(0), doc='Alias for field number 0')
- y = _property(_itemgetter(1), doc='Alias for field number 1')
-
>>> p = Point(11, y=22) # instantiate with positional or keyword arguments
>>> p[0] + p[1] # indexable like the plain tuple (11, 22)
33
@@ -672,7 +755,7 @@ by the :mod:`csv` or :mod:`sqlite3` modules::
print(emp.name, emp.title)
In addition to the methods inherited from tuples, named tuples support
-three additional methods and one attribute. To prevent conflicts with
+three additional methods and two attributes. To prevent conflicts with
field names, the method and attribute names start with an underscore.
.. classmethod:: somenamedtuple._make(iterable)
@@ -710,6 +793,15 @@ field names, the method and attribute names start with an underscore.
>>> for partnum, record in inventory.items():
... inventory[partnum] = record._replace(price=newprices[partnum], timestamp=time.now())
+.. attribute:: somenamedtuple._source
+
+ A string with the pure Python source code used to create the named
+ tuple class. The source makes the named tuple self-documenting.
+ It can be printed, executed using :func:`exec`, or saved to a file
+ and imported.
+
+ .. versionadded:: 3.3
+
.. attribute:: somenamedtuple._fields
Tuple of strings listing the field names. Useful for introspection
@@ -758,7 +850,6 @@ a fixed-width print format:
The subclass shown above sets ``__slots__`` to an empty tuple. This helps
keep memory requirements low by preventing the creation of instance dictionaries.
-
Subclassing is not useful for adding new, stored fields. Instead, simply
create a new named tuple type from the :attr:`_fields` attribute:
@@ -770,6 +861,7 @@ customize a prototype instance:
>>> Account = namedtuple('Account', 'owner balance transaction_count')
>>> default_account = Account('<owner name>', 0.0, 0)
>>> johns_account = default_account._replace(owner='John')
+ >>> janes_account = default_account._replace(owner='Jane')
Enumerated constants can be implemented with named tuples, but it is simpler
and more efficient to use a simple class declaration:
@@ -988,161 +1080,3 @@ attribute.
be an instance of :class:`bytes`, :class:`str`, :class:`UserString` (or a
subclass) or an arbitrary sequence which can be converted into a string using
the built-in :func:`str` function.
-
-
-.. _collections-abstract-base-classes:
-
-ABCs - abstract base classes
-----------------------------
-
-The collections module offers the following :term:`ABCs <abstract base class>`:
-
-========================= ===================== ====================== ====================================================
-ABC Inherits from Abstract Methods Mixin Methods
-========================= ===================== ====================== ====================================================
-:class:`Container` ``__contains__``
-:class:`Hashable` ``__hash__``
-:class:`Iterable` ``__iter__``
-:class:`Iterator` :class:`Iterable` ``__next__`` ``__iter__``
-:class:`Sized` ``__len__``
-:class:`Callable` ``__call__``
-
-:class:`Sequence` :class:`Sized`, ``__getitem__`` ``__contains__``, ``__iter__``, ``__reversed__``,
- :class:`Iterable`, ``index``, and ``count``
- :class:`Container`
-
-:class:`MutableSequence` :class:`Sequence` ``__setitem__``, Inherited :class:`Sequence` methods and
- ``__delitem__``, ``append``, ``reverse``, ``extend``, ``pop``,
- ``insert`` ``remove``, and ``__iadd__``
-
-:class:`Set` :class:`Sized`, ``__le__``, ``__lt__``, ``__eq__``, ``__ne__``,
- :class:`Iterable`, ``__gt__``, ``__ge__``, ``__and__``, ``__or__``,
- :class:`Container` ``__sub__``, ``__xor__``, and ``isdisjoint``
-
-:class:`MutableSet` :class:`Set` ``add``, Inherited :class:`Set` methods and
- ``discard`` ``clear``, ``pop``, ``remove``, ``__ior__``,
- ``__iand__``, ``__ixor__``, and ``__isub__``
-
-:class:`Mapping` :class:`Sized`, ``__getitem__`` ``__contains__``, ``keys``, ``items``, ``values``,
- :class:`Iterable`, ``get``, ``__eq__``, and ``__ne__``
- :class:`Container`
-
-:class:`MutableMapping` :class:`Mapping` ``__setitem__``, Inherited :class:`Mapping` methods and
- ``__delitem__`` ``pop``, ``popitem``, ``clear``, ``update``,
- and ``setdefault``
-
-
-:class:`MappingView` :class:`Sized` ``__len__``
-:class:`ItemsView` :class:`MappingView`, ``__contains__``,
- :class:`Set` ``__iter__``
-:class:`KeysView` :class:`MappingView`, ``__contains__``,
- :class:`Set` ``__iter__``
-:class:`ValuesView` :class:`MappingView` ``__contains__``, ``__iter__``
-========================= ===================== ====================== ====================================================
-
-
-.. class:: Container
- Hashable
- Sized
- Callable
-
- ABCs for classes that provide respectively the methods :meth:`__contains__`,
- :meth:`__hash__`, :meth:`__len__`, and :meth:`__call__`.
-
-.. class:: Iterable
-
- ABC for classes that provide the :meth:`__iter__` method.
- See also the definition of :term:`iterable`.
-
-.. class:: Iterator
-
- ABC for classes that provide the :meth:`__iter__` and :meth:`next` methods.
- See also the definition of :term:`iterator`.
-
-.. class:: Sequence
- MutableSequence
-
- ABCs for read-only and mutable :term:`sequences <sequence>`.
-
-.. class:: Set
- MutableSet
-
- ABCs for read-only and mutable sets.
-
-.. class:: Mapping
- MutableMapping
-
- ABCs for read-only and mutable :term:`mappings <mapping>`.
-
-.. class:: MappingView
- ItemsView
- KeysView
- ValuesView
-
- ABCs for mapping, items, keys, and values :term:`views <view>`.
-
-
-These ABCs allow us to ask classes or instances if they provide
-particular functionality, for example::
-
- size = None
- if isinstance(myvar, collections.Sized):
- size = len(myvar)
-
-Several of the ABCs are also useful as mixins that make it easier to develop
-classes supporting container APIs. For example, to write a class supporting
-the full :class:`Set` API, it only necessary to supply the three underlying
-abstract methods: :meth:`__contains__`, :meth:`__iter__`, and :meth:`__len__`.
-The ABC supplies the remaining methods such as :meth:`__and__` and
-:meth:`isdisjoint` ::
-
- class ListBasedSet(collections.Set):
- ''' Alternate set implementation favoring space over speed
- and not requiring the set elements to be hashable. '''
- def __init__(self, iterable):
- self.elements = lst = []
- for value in iterable:
- if value not in lst:
- lst.append(value)
- def __iter__(self):
- return iter(self.elements)
- def __contains__(self, value):
- return value in self.elements
- def __len__(self):
- return len(self.elements)
-
- s1 = ListBasedSet('abcdef')
- s2 = ListBasedSet('defghi')
- overlap = s1 & s2 # The __and__() method is supported automatically
-
-Notes on using :class:`Set` and :class:`MutableSet` as a mixin:
-
-(1)
- Since some set operations create new sets, the default mixin methods need
- a way to create new instances from an iterable. The class constructor is
- assumed to have a signature in the form ``ClassName(iterable)``.
- That assumption is factored-out to an internal classmethod called
- :meth:`_from_iterable` which calls ``cls(iterable)`` to produce a new set.
- If the :class:`Set` mixin is being used in a class with a different
- constructor signature, you will need to override :meth:`_from_iterable`
- with a classmethod that can construct new instances from
- an iterable argument.
-
-(2)
- To override the comparisons (presumably for speed, as the
- semantics are fixed), redefine :meth:`__le__` and
- then the other operations will automatically follow suit.
-
-(3)
- The :class:`Set` mixin provides a :meth:`_hash` method to compute a hash value
- for the set; however, :meth:`__hash__` is not defined because not all sets
- are hashable or immutable. To add set hashabilty using mixins,
- inherit from both :meth:`Set` and :meth:`Hashable`, then define
- ``__hash__ = Set._hash``.
-
-.. seealso::
-
- * `OrderedSet recipe <http://code.activestate.com/recipes/576694/>`_ for an
- example built on :class:`MutableSet`.
-
- * For more about ABCs, see the :mod:`abc` module and :pep:`3119`.
diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst
index 29ffc0d..a88f10c 100644
--- a/Doc/library/concurrent.futures.rst
+++ b/Doc/library/concurrent.futures.rst
@@ -169,6 +169,12 @@ to a :class:`ProcessPoolExecutor` will result in deadlock.
of at most *max_workers* processes. If *max_workers* is ``None`` or not
given, it will default to the number of processors on the machine.
+ .. versionchanged:: 3.3
+ When one of the worker processes terminates abruptly, a
+ :exc:`BrokenProcessPool` error is now raised. Previously, behaviour
+ was undefined but operations on the executor or its futures would often
+ freeze or deadlock.
+
.. _processpoolexecutor-example:
@@ -369,3 +375,16 @@ Module Functions
:pep:`3148` -- futures - execute computations asynchronously
The proposal which described this feature for inclusion in the Python
standard library.
+
+
+Exception classes
+-----------------
+
+.. exception:: BrokenProcessPool
+
+ Derived from :exc:`RuntimeError`, this exception class is raised when
+ one of the workers of a :class:`ProcessPoolExecutor` has terminated
+ in a non-clean fashion (for example, if it was killed from the outside).
+
+ .. versionadded:: 3.3
+
diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst
index a2d316e..41061e5 100644
--- a/Doc/library/copyreg.rst
+++ b/Doc/library/copyreg.rst
@@ -32,6 +32,8 @@ Such constructors may be factory functions or class instances.
returned by *function* at pickling time. :exc:`TypeError` will be raised if
*object* is a class or *constructor* is not callable.
- See the :mod:`pickle` module for more details on the interface expected of
- *function* and *constructor*.
-
+ See the :mod:`pickle` module for more details on the interface
+ expected of *function* and *constructor*. Note that the
+ :attr:`~pickle.Pickler.dispatch_table` attribute of a pickler
+ object or subclass of :class:`pickle.Pickler` can also be used for
+ declaring reduction functions.
diff --git a/Doc/library/crypt.rst b/Doc/library/crypt.rst
index 0be571e..1ba2ed3 100644
--- a/Doc/library/crypt.rst
+++ b/Doc/library/crypt.rst
@@ -15,9 +15,9 @@
This module implements an interface to the :manpage:`crypt(3)` routine, which is
a one-way hash function based upon a modified DES algorithm; see the Unix man
-page for further details. Possible uses include allowing Python scripts to
-accept typed passwords from the user, or attempting to crack Unix passwords with
-a dictionary.
+page for further details. Possible uses include storing hashed passwords
+so you can check passwords without storing the actual password, or attempting
+to crack Unix passwords with a dictionary.
.. index:: single: crypt(3)
@@ -26,15 +26,74 @@ the :manpage:`crypt(3)` routine in the running system. Therefore, any
extensions available on the current implementation will also be available on
this module.
+Hashing Methods
+---------------
-.. function:: crypt(word, salt)
+.. versionadded:: 3.3
+
+The :mod:`crypt` module defines the list of hashing methods (not all methods
+are available on all platforms):
+
+.. data:: METHOD_SHA512
+
+ A Modular Crypt Format method with 16 character salt and 86 character
+ hash. This is the strongest method.
+
+.. data:: METHOD_SHA256
+
+ Another Modular Crypt Format method with 16 character salt and 43
+ character hash.
+
+.. data:: METHOD_MD5
+
+ Another Modular Crypt Format method with 8 character salt and 22
+ character hash.
+
+.. data:: METHOD_CRYPT
+
+ The traditional method with a 2 character salt and 13 characters of
+ hash. This is the weakest method.
+
+
+Module Attributes
+-----------------
+
+.. versionadded:: 3.3
+
+.. attribute:: methods
+
+ A list of available password hashing algorithms, as
+ ``crypt.METHOD_*`` objects. This list is sorted from strongest to
+ weakest, and is guaranteed to have at least ``crypt.METHOD_CRYPT``.
+
+
+Module Functions
+----------------
+
+The :mod:`crypt` module defines the following functions:
+
+.. function:: crypt(word, salt=None)
*word* will usually be a user's password as typed at a prompt or in a graphical
- interface. *salt* is usually a random two-character string which will be used
- to perturb the DES algorithm in one of 4096 ways. The characters in *salt* must
- be in the set ``[./a-zA-Z0-9]``. Returns the hashed password as a string, which
- will be composed of characters from the same alphabet as the salt (the first two
- characters represent the salt itself).
+ interface. The optional *salt* is either a string as returned from
+ :func:`mksalt`, one of the ``crypt.METHOD_*`` values (though not all
+ may be available on all platforms), or a full encrypted password
+ including salt, as returned by this function. If *salt* is not
+ provided, the strongest method will be used (as returned by
+ :func:`methods`.
+
+ Checking a password is usually done by passing the plain-text password
+ as *word* and the full results of a previous :func:`crypt` call,
+ which should be the same as the results of this call.
+
+ *salt* (either a random 2 or 16 character string, possibly prefixed with
+ ``$digit$`` to indicate the method) which will be used to perturb the
+ encryption algorithm. The characters in *salt* must be in the set
+ ``[./a-zA-Z0-9]``, with the exception of Modular Crypt Format which
+ prefixes a ``$digit$``.
+
+ Returns the hashed password as a string, which will be composed of
+ characters from the same alphabet as the salt.
.. index:: single: crypt(3)
@@ -42,18 +101,48 @@ this module.
different sizes in the *salt*, it is recommended to use the full crypted
password as salt when checking for a password.
+ .. versionchanged:: 3.3
+ Accept ``crypt.METHOD_*`` values in addition to strings for *salt*.
+
+
+.. function:: mksalt(method=None)
+
+ Return a randomly generated salt of the specified method. If no
+ *method* is given, the strongest method available as returned by
+ :func:`methods` is used.
+
+ The return value is a string either of 2 characters in length for
+ ``crypt.METHOD_CRYPT``, or 19 characters starting with ``$digit$`` and
+ 16 random characters from the set ``[./a-zA-Z0-9]``, suitable for
+ passing as the *salt* argument to :func:`crypt`.
+
+ .. versionadded:: 3.3
+
+Examples
+--------
+
A simple example illustrating typical use::
- import crypt, getpass, pwd
+ import pwd
+ import crypt
+ import getpass
def login():
- username = input('Python login:')
+ username = input('Python login: ')
cryptedpasswd = pwd.getpwnam(username)[1]
if cryptedpasswd:
if cryptedpasswd == 'x' or cryptedpasswd == '*':
- raise "Sorry, currently no support for shadow passwords"
+ raise ValueError('no support for shadow passwords')
cleartext = getpass.getpass()
return crypt.crypt(cleartext, cryptedpasswd) == cryptedpasswd
else:
- return 1
+ return True
+
+To generate a hash of a password using the strongest available method and
+check it against the original::
+
+ import crypt
+ hashed = crypt.crypt(plaintext)
+ if hashed != crypt.crypt(plaintext, hashed):
+ raise ValueError("hashed version doesn't validate against original")
diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst
index edbe726..ec0dfcc 100644
--- a/Doc/library/csv.rst
+++ b/Doc/library/csv.rst
@@ -11,15 +11,15 @@
pair: data; tabular
The so-called CSV (Comma Separated Values) format is the most common import and
-export format for spreadsheets and databases. There is no "CSV standard", so
-the format is operationally defined by the many applications which read and
-write it. The lack of a standard means that subtle differences often exist in
-the data produced and consumed by different applications. These differences can
-make it annoying to process CSV files from multiple sources. Still, while the
-delimiters and quoting characters vary, the overall format is similar enough
-that it is possible to write a single module which can efficiently manipulate
-such data, hiding the details of reading and writing the data from the
-programmer.
+export format for spreadsheets and databases. CSV format was used for many
+years prior to attempts to describe the format in a standardized way in
+:rfc:`4180`. The lack of a well-defined standard means that subtle differences
+often exist in the data produced and consumed by different applications. These
+differences can make it annoying to process CSV files from multiple sources.
+Still, while the delimiters and quoting characters vary, the overall format is
+similar enough that it is possible to write a single module which can
+efficiently manipulate such data, hiding the details of reading and writing the
+data from the programmer.
The :mod:`csv` module implements classes to read and write tabular data in CSV
format. It allows programmers to say, "write this data in the format preferred
diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
index df39c28..8a2b196 100644
--- a/Doc/library/ctypes.rst
+++ b/Doc/library/ctypes.rst
@@ -39,9 +39,14 @@ loads libraries which export functions using the standard ``cdecl`` calling
convention, while *windll* libraries call functions using the ``stdcall``
calling convention. *oledll* also uses the ``stdcall`` calling convention, and
assumes the functions return a Windows :c:type:`HRESULT` error code. The error
-code is used to automatically raise a :class:`WindowsError` exception when the
+code is used to automatically raise a :class:`OSError` exception when the
function call fails.
+.. versionchanged:: 3.3
+ Windows errors used to raise :exc:`WindowsError`, which is now an alias
+ of :exc:`OSError`.
+
+
Here are some examples for Windows. Note that ``msvcrt`` is the MS standard C
library containing most standard C functions, and uses the cdecl calling
convention::
@@ -189,7 +194,7 @@ argument values::
>>> windll.kernel32.GetModuleHandleA(32) # doctest: +WINDOWS
Traceback (most recent call last):
File "<stdin>", line 1, in ?
- WindowsError: exception: access violation reading 0x00000020
+ OSError: exception: access violation reading 0x00000020
>>>
There are, however, enough ways to crash Python with :mod:`ctypes`, so you
@@ -491,7 +496,7 @@ useful to check for error return values and automatically raise an exception::
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 3, in ValidHandle
- WindowsError: [Errno 126] The specified module could not be found.
+ OSError: [Errno 126] The specified module could not be found.
>>>
``WinError`` is a function which will call Windows ``FormatMessage()`` api to
@@ -921,21 +926,21 @@ Callback functions
:mod:`ctypes` allows to create C callable function pointers from Python callables.
These are sometimes called *callback functions*.
-First, you must create a class for the callback function, the class knows the
+First, you must create a class for the callback function. The class knows the
calling convention, the return type, and the number and types of arguments this
function will receive.
-The CFUNCTYPE factory function creates types for callback functions using the
-normal cdecl calling convention, and, on Windows, the WINFUNCTYPE factory
-function creates types for callback functions using the stdcall calling
-convention.
+The :func:`CFUNCTYPE` factory function creates types for callback functions
+using the ``cdecl`` calling convention. On Windows, the :func:`WINFUNCTYPE`
+factory function creates types for callback functions using the ``stdcall``
+calling convention.
Both of these factory functions are called with the result type as first
argument, and the callback functions expected argument types as the remaining
arguments.
I will present an example here which uses the standard C library's
-:c:func:`qsort` function, this is used to sort items with the help of a callback
+:c:func:`qsort` function, that is used to sort items with the help of a callback
function. :c:func:`qsort` will be used to sort an array of integers::
>>> IntArray5 = c_int * 5
@@ -948,7 +953,7 @@ function. :c:func:`qsort` will be used to sort an array of integers::
items in the data array, the size of one item, and a pointer to the comparison
function, the callback. The callback will then be called with two pointers to
items, and it must return a negative integer if the first item is smaller than
-the second, a zero if they are equal, and a positive integer else.
+the second, a zero if they are equal, and a positive integer otherwise.
So our callback function receives pointers to integers, and must return an
integer. First we create the ``type`` for the callback function::
@@ -956,36 +961,8 @@ integer. First we create the ``type`` for the callback function::
>>> CMPFUNC = CFUNCTYPE(c_int, POINTER(c_int), POINTER(c_int))
>>>
-For the first implementation of the callback function, we simply print the
-arguments we get, and return 0 (incremental development ;-)::
-
- >>> def py_cmp_func(a, b):
- ... print("py_cmp_func", a, b)
- ... return 0
- ...
- >>>
-
-Create the C callable callback::
-
- >>> cmp_func = CMPFUNC(py_cmp_func)
- >>>
-
-And we're ready to go::
-
- >>> qsort(ia, len(ia), sizeof(c_int), cmp_func) # doctest: +WINDOWS
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- py_cmp_func <ctypes.LP_c_long object at 0x00...> <ctypes.LP_c_long object at 0x00...>
- >>>
-
-We know how to access the contents of a pointer, so lets redefine our callback::
+To get started, here is a simple callback that shows the values it gets
+passed::
>>> def py_cmp_func(a, b):
... print("py_cmp_func", a[0], b[0])
@@ -994,23 +971,7 @@ We know how to access the contents of a pointer, so lets redefine our callback::
>>> cmp_func = CMPFUNC(py_cmp_func)
>>>
-Here is what we get on Windows::
-
- >>> qsort(ia, len(ia), sizeof(c_int), cmp_func) # doctest: +WINDOWS
- py_cmp_func 7 1
- py_cmp_func 33 1
- py_cmp_func 99 1
- py_cmp_func 5 1
- py_cmp_func 7 5
- py_cmp_func 33 5
- py_cmp_func 99 5
- py_cmp_func 7 99
- py_cmp_func 33 99
- py_cmp_func 7 33
- >>>
-
-It is funny to see that on linux the sort function seems to work much more
-efficiently, it is doing less comparisons::
+The result::
>>> qsort(ia, len(ia), sizeof(c_int), cmp_func) # doctest: +LINUX
py_cmp_func 5 1
@@ -1020,32 +981,13 @@ efficiently, it is doing less comparisons::
py_cmp_func 1 7
>>>
-Ah, we're nearly done! The last step is to actually compare the two items and
-return a useful result::
+Now we can actually compare the two items and return a useful result::
>>> def py_cmp_func(a, b):
... print("py_cmp_func", a[0], b[0])
... return a[0] - b[0]
...
>>>
-
-Final run on Windows::
-
- >>> qsort(ia, len(ia), sizeof(c_int), CMPFUNC(py_cmp_func)) # doctest: +WINDOWS
- py_cmp_func 33 7
- py_cmp_func 99 33
- py_cmp_func 5 99
- py_cmp_func 1 99
- py_cmp_func 33 7
- py_cmp_func 1 33
- py_cmp_func 5 33
- py_cmp_func 5 7
- py_cmp_func 1 7
- py_cmp_func 5 1
- >>>
-
-and on Linux::
-
>>> qsort(ia, len(ia), sizeof(c_int), CMPFUNC(py_cmp_func)) # doctest: +LINUX
py_cmp_func 5 1
py_cmp_func 33 99
@@ -1054,9 +996,6 @@ and on Linux::
py_cmp_func 5 7
>>>
-It is quite interesting to see that the Windows :func:`qsort` function needs
-more comparisons than the linux version!
-
As we can easily check, our array is sorted now::
>>> for i in ia: print(i, end=" ")
@@ -1066,9 +1005,9 @@ As we can easily check, our array is sorted now::
**Important note for callback functions:**
-Make sure you keep references to CFUNCTYPE objects as long as they are used from
-C code. :mod:`ctypes` doesn't, and if you don't, they may be garbage collected,
-crashing your program when a callback is made.
+Make sure you keep references to :func:`CFUNCTYPE` objects as long as they are
+used from C code. :mod:`ctypes` doesn't, and if you don't, they may be garbage
+collected, crashing your program when a callback is made.
.. _ctypes-accessing-values-exported-from-dlls:
@@ -1345,7 +1284,10 @@ way is to instantiate one of the following classes:
assumed to return the windows specific :class:`HRESULT` code. :class:`HRESULT`
values contain information specifying whether the function call failed or
succeeded, together with additional error code. If the return value signals a
- failure, an :class:`WindowsError` is automatically raised.
+ failure, an :class:`OSError` is automatically raised.
+
+ .. versionchanged:: 3.3
+ :exc:`WindowsError` used to be raised.
.. class:: WinDLL(name, mode=DEFAULT_MODE, handle=None, use_errno=False, use_last_error=False)
@@ -1962,11 +1904,14 @@ Utility functions
.. function:: WinError(code=None, descr=None)
Windows only: this function is probably the worst-named thing in ctypes. It
- creates an instance of WindowsError. If *code* is not specified,
+ creates an instance of OSError. If *code* is not specified,
``GetLastError`` is called to determine the error code. If *descr* is not
specified, :func:`FormatError` is called to get a textual description of the
error.
+ .. versionchanged:: 3.3
+ An instance of :exc:`WindowsError` used to be created.
+
.. function:: wstring_at(address, size=-1)
diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst
index f31b9c5..ff3a793 100644
--- a/Doc/library/curses.rst
+++ b/Doc/library/curses.rst
@@ -598,6 +598,17 @@ The module :mod:`curses` defines the following functions:
Only one *ch* can be pushed before :meth:`getch` is called.
+.. function:: unget_wch(ch)
+
+ Push *ch* so the next :meth:`get_wch` will return it.
+
+ .. note::
+
+ Only one *ch* can be pushed before :meth:`get_wch` is called.
+
+ .. versionadded:: 3.3
+
+
.. function:: ungetmouse(id, x, y, z, bstate)
Push a :const:`KEY_MOUSE` event onto the input queue, associating the given
@@ -642,7 +653,7 @@ Window Objects
--------------
Window objects, as returned by :func:`initscr` and :func:`newwin` above, have
-the following methods:
+the following methods and attributes:
.. method:: window.addch([y, x,] ch[, attr])
@@ -823,6 +834,16 @@ the following methods:
event.
+.. attribute:: window.encoding
+
+ Encoding used to encode method arguments (Unicode strings and characters).
+ The encoding attribute is inherited from by parent window when a subwindow
+ is created, for example with :meth:`window.subwin`. By default, the locale
+ encoding is used (see :func:`locale.getpreferredencoding`).
+
+ .. versionadded:: 3.3
+
+
.. method:: window.erase()
Clear the window.
@@ -846,6 +867,14 @@ the following methods:
until a key is pressed.
+.. method:: window.get_wch([y, x])
+
+ Get a wide character. Like :meth:`getch`, but the integer returned is the
+ Unicode code point for the key pressed, so it can be passed to :func:`chr`.
+
+ .. versionadded:: 3.3
+
+
.. method:: window.getkey([y, x])
Get a character, returning a string instead of an integer, as :meth:`getch`
diff --git a/Doc/library/datatypes.rst b/Doc/library/datatypes.rst
index 6b4a71a..8e33c1f 100644
--- a/Doc/library/datatypes.rst
+++ b/Doc/library/datatypes.rst
@@ -21,6 +21,7 @@ The following modules are documented in this chapter:
datetime.rst
calendar.rst
collections.rst
+ collections.abc.rst
heapq.rst
bisect.rst
array.rst
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 1f4cfba..401a647 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -395,12 +395,19 @@ Other constructors, all class methods:
.. classmethod:: date.fromtimestamp(timestamp)
Return the local date corresponding to the POSIX timestamp, such as is returned
- by :func:`time.time`. This may raise :exc:`ValueError`, if the timestamp is out
- of the range of values supported by the platform C :c:func:`localtime` function.
+ by :func:`time.time`. This may raise :exc:`OverflowError`, if the timestamp is out
+ of the range of values supported by the platform C :c:func:`localtime` function,
+ and :exc:`OSError` on :c:func:`localtime` failure.
It's common for this to be restricted to years from 1970 through 2038. Note
that on non-POSIX systems that include leap seconds in their notion of a
timestamp, leap seconds are ignored by :meth:`fromtimestamp`.
+ .. versionchanged:: 3.3
+ Raise :exc:`OverflowError` instead of :exc:`ValueError` if the timestamp
+ is out of the range of values supported by the platform C
+ :c:func:`localtime` function. Raise :exc:`OSError` instead of
+ :exc:`ValueError` on :c:func:`localtime` failure.
+
.. classmethod:: date.fromordinal(ordinal)
@@ -704,23 +711,55 @@ Other constructors, all class methods:
equivalent to
``tz.fromutc(datetime.utcfromtimestamp(timestamp).replace(tzinfo=tz))``.
- :meth:`fromtimestamp` may raise :exc:`ValueError`, if the timestamp is out of
+ :meth:`fromtimestamp` may raise :exc:`OverflowError`, if the timestamp is out of
the range of values supported by the platform C :c:func:`localtime` or
- :c:func:`gmtime` functions. It's common for this to be restricted to years in
+ :c:func:`gmtime` functions, and :exc:`OSError` on :c:func:`localtime` or
+ :c:func:`gmtime` failure.
+ It's common for this to be restricted to years in
1970 through 2038. Note that on non-POSIX systems that include leap seconds in
their notion of a timestamp, leap seconds are ignored by :meth:`fromtimestamp`,
and then it's possible to have two timestamps differing by a second that yield
identical :class:`.datetime` objects. See also :meth:`utcfromtimestamp`.
+ .. versionchanged:: 3.3
+ Raise :exc:`OverflowError` instead of :exc:`ValueError` if the timestamp
+ is out of the range of values supported by the platform C
+ :c:func:`localtime` or :c:func:`gmtime` functions. Raise :exc:`OSError`
+ instead of :exc:`ValueError` on :c:func:`localtime` or :c:func:`gmtime`
+ failure.
+
.. classmethod:: datetime.utcfromtimestamp(timestamp)
Return the UTC :class:`.datetime` corresponding to the POSIX timestamp, with
- :attr:`tzinfo` ``None``. This may raise :exc:`ValueError`, if the timestamp is
- out of the range of values supported by the platform C :c:func:`gmtime` function.
+ :attr:`tzinfo` ``None``. This may raise :exc:`OverflowError`, if the timestamp is
+ out of the range of values supported by the platform C :c:func:`gmtime` function,
+ and :exc:`OSError` on :c:func:`gmtime` failure.
It's common for this to be restricted to years in 1970 through 2038. See also
:meth:`fromtimestamp`.
+ On the POSIX compliant platforms, ``utcfromtimestamp(timestamp)``
+ is equivalent to the following expression::
+
+ datetime(1970, 1, 1) + timedelta(seconds=timestamp)
+
+ There is no method to obtain the timestamp from a :class:`datetime`
+ instance, but POSIX timestamp corresponding to a :class:`datetime`
+ instance ``dt`` can be easily calculated as follows. For a naive
+ ``dt``::
+
+ timestamp = (dt - datetime(1970, 1, 1)) / timedelta(seconds=1)
+
+ And for an aware ``dt``::
+
+ timestamp = (dt - datetime(1970, 1, 1, tzinfo=timezone.utc)) / timedelta(seconds=1)
+
+ .. versionchanged:: 3.3
+ Raise :exc:`OverflowError` instead of :exc:`ValueError` if the timestamp
+ is out of the range of values supported by the platform C
+ :c:func:`gmtime` function. Raise :exc:`OSError` instead of
+ :exc:`ValueError` on :c:func:`gmtime` failure.
+
.. classmethod:: datetime.fromordinal(ordinal)
@@ -1564,11 +1603,12 @@ only EST (fixed offset -5 hours), or only EDT (fixed offset -4 hours)).
:class:`timezone` Objects
--------------------------
-A :class:`timezone` object represents a timezone that is defined by a
-fixed offset from UTC. Note that objects of this class cannot be used
-to represent timezone information in the locations where different
-offsets are used in different days of the year or where historical
-changes have been made to civil time.
+The :class:`timezone` class is a subclass of :class:`tzinfo`, each
+instance of which represents a timezone defined by a fixed offset from
+UTC. Note that objects of this class cannot be used to represent
+timezone information in the locations where different offsets are used
+in different days of the year or where historical changes have been
+made to civil time.
.. class:: timezone(offset[, name])
@@ -1737,8 +1777,7 @@ format codes.
| | decimal number [00,99]. | |
+-----------+--------------------------------+-------+
| ``%Y`` | Year with century as a decimal | \(5) |
-| | number [0001,9999] (strptime), | |
-| | [1000,9999] (strftime). | |
+| | number [0001,9999]. | |
+-----------+--------------------------------+-------+
| ``%z`` | UTC offset in the form +HHMM | \(6) |
| | or -HHMM (empty string if the | |
@@ -1772,10 +1811,7 @@ Notes:
calculations when the day of the week and the year are specified.
(5)
- For technical reasons, :meth:`strftime` method does not support
- dates before year 1000: ``t.strftime(format)`` will raise a
- :exc:`ValueError` when ``t.year < 1000`` even if ``format`` does
- not contain ``%Y`` directive. The :meth:`strptime` method can
+ The :meth:`strptime` method can
parse years in the full [1, 9999] range, but years < 1000 must be
zero-filled to 4-digit width.
@@ -1783,6 +1819,10 @@ Notes:
In previous versions, :meth:`strftime` method was restricted to
years >= 1900.
+ .. versionchanged:: 3.3
+ In version 3.2, :meth:`strftime` method was restricted to
+ years >= 1000.
+
(6)
For example, if :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``,
``%z`` is replaced with the string ``'-0330'``.
diff --git a/Doc/library/debug.rst b/Doc/library/debug.rst
index b2ee4fa..c69fb1c 100644
--- a/Doc/library/debug.rst
+++ b/Doc/library/debug.rst
@@ -10,7 +10,8 @@ allowing you to identify bottlenecks in your programs.
.. toctree::
bdb.rst
+ faulthandler.rst
pdb.rst
profile.rst
timeit.rst
- trace.rst \ No newline at end of file
+ trace.rst
diff --git a/Doc/library/depgraph-output.png b/Doc/library/depgraph-output.png
new file mode 100644
index 0000000..960bb1b
--- /dev/null
+++ b/Doc/library/depgraph-output.png
Binary files differ
diff --git a/Doc/library/difflib.rst b/Doc/library/difflib.rst
index bdc37b3..836e240 100644
--- a/Doc/library/difflib.rst
+++ b/Doc/library/difflib.rst
@@ -752,8 +752,8 @@ It is also contained in the Python source distribution, as
# we're passing these as arguments to the diff function
fromdate = time.ctime(os.stat(fromfile).st_mtime)
todate = time.ctime(os.stat(tofile).st_mtime)
- fromlines = open(fromfile, 'U').readlines()
- tolines = open(tofile, 'U').readlines()
+ with open(fromlines) as fromf, open(tofile) as tof:
+ fromlines, tolines = list(fromf), list(tof)
if options.u:
diff = difflib.unified_diff(fromlines, tolines, fromfile, tofile,
diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst
index 79cc583..5ba66cb 100644
--- a/Doc/library/dis.rst
+++ b/Doc/library/dis.rst
@@ -171,11 +171,6 @@ The Python compiler currently generates the following bytecode instructions.
**General instructions**
-.. opcode:: STOP_CODE
-
- Indicates end-of-code to the compiler, not used by the interpreter.
-
-
.. opcode:: NOP
Do nothing code. Used as a placeholder by the bytecode optimizer.
@@ -436,6 +431,13 @@ the stack so that it is available for further iterations of the loop.
Pops ``TOS`` and yields it from a :term:`generator`.
+.. opcode:: YIELD_FROM
+
+ Pops ``TOS`` and delegates to it as a subiterator from a :term:`generator`.
+
+ .. versionadded:: 3.3
+
+
.. opcode:: IMPORT_STAR
Loads all symbols not starting with ``'_'`` directly from the module TOS to the
diff --git a/Doc/library/distutils.rst b/Doc/library/distutils.rst
index 238b79d..53a69ae 100644
--- a/Doc/library/distutils.rst
+++ b/Doc/library/distutils.rst
@@ -12,18 +12,26 @@ additional modules into a Python installation. The new modules may be either
100%-pure Python, or may be extension modules written in C, or may be
collections of Python packages which include modules coded in both Python and C.
-This package is discussed in two separate chapters:
+.. deprecated:: 3.3
+ :mod:`packaging` replaces Distutils. See :ref:`packaging-index` and
+ :ref:`packaging-install-index`.
+User documentation and API reference are provided in another document:
+
.. seealso::
:ref:`distutils-index`
The manual for developers and packagers of Python modules. This describes
how to prepare :mod:`distutils`\ -based packages so that they may be
- easily installed into an existing Python installation.
+ easily installed into an existing Python installation. If also contains
+ instructions for end-users wanting to install a distutils-based package,
+ :ref:`install-index`.
+
+
+.. trick to silence a Sphinx warning
- :ref:`install-index`
- An "administrators" manual which includes information on installing
- modules into an existing Python installation. You do not need to be a
- Python programmer to read this manual.
+.. toctree::
+ :hidden:
+ ../distutils/index
diff --git a/Doc/library/email.generator.rst b/Doc/library/email.generator.rst
index 85b32fe..847d7e4 100644
--- a/Doc/library/email.generator.rst
+++ b/Doc/library/email.generator.rst
@@ -32,7 +32,8 @@ Here are the public methods of the :class:`Generator` class, imported from the
:mod:`email.generator` module:
-.. class:: Generator(outfp, mangle_from_=True, maxheaderlen=78)
+.. class:: Generator(outfp, mangle_from_=True, maxheaderlen=78, *, \
+ policy=policy.default)
The constructor for the :class:`Generator` class takes a :term:`file-like object`
called *outfp* for an argument. *outfp* must support the :meth:`write` method
@@ -53,10 +54,16 @@ Here are the public methods of the :class:`Generator` class, imported from the
:class:`~email.header.Header` class. Set to zero to disable header wrapping.
The default is 78, as recommended (but not required) by :rfc:`2822`.
+ The *policy* keyword specifies a :mod:`~email.policy` object that controls a
+ number of aspects of the generator's operation. The default policy
+ maintains backward compatibility.
+
+ .. versionchanged:: 3.3 Added the *policy* keyword.
+
The other public :class:`Generator` methods are:
- .. method:: flatten(msg, unixfrom=False, linesep='\\n')
+ .. method:: flatten(msg, unixfrom=False, linesep=None)
Print the textual representation of the message object structure rooted at
*msg* to the output file specified when the :class:`Generator` instance
@@ -72,12 +79,13 @@ Here are the public methods of the :class:`Generator` class, imported from the
Note that for subparts, no envelope header is ever printed.
Optional *linesep* specifies the line separator character used to
- terminate lines in the output. It defaults to ``\n`` because that is
- the most useful value for Python application code (other library packages
- expect ``\n`` separated lines). ``linesep=\r\n`` can be used to
- generate output with RFC-compliant line separators.
+ terminate lines in the output. If specified it overrides the value
+ specified by the ``Generator``\'s ``policy``.
- Messages parsed with a Bytes parser that have a
+ Because strings cannot represent non-ASCII bytes, ``Generator`` ignores
+ the value of the :attr:`~email.policy.Policy.must_be_7bit`
+ :mod:`~email.policy` setting and operates as if it were set ``True``.
+ This means that messages parsed with a Bytes parser that have a
:mailheader:`Content-Transfer-Encoding` of 8bit will be converted to a
use a 7bit Content-Transfer-Encoding. Non-ASCII bytes in the headers
will be :rfc:`2047` encoded with a charset of `unknown-8bit`.
@@ -103,7 +111,8 @@ As a convenience, see the :class:`~email.message.Message` methods
formatted string representation of a message object. For more detail, see
:mod:`email.message`.
-.. class:: BytesGenerator(outfp, mangle_from_=True, maxheaderlen=78)
+.. class:: BytesGenerator(outfp, mangle_from_=True, maxheaderlen=78, *, \
+ policy=policy.default)
The constructor for the :class:`BytesGenerator` class takes a binary
:term:`file-like object` called *outfp* for an argument. *outfp* must
@@ -125,19 +134,31 @@ formatted string representation of a message object. For more detail, see
wrapping. The default is 78, as recommended (but not required) by
:rfc:`2822`.
+ The *policy* keyword specifies a :mod:`~email.policy` object that controls a
+ number of aspects of the generator's operation. The default policy
+ maintains backward compatibility.
+
+ .. versionchanged:: 3.3 Added the *policy* keyword.
+
The other public :class:`BytesGenerator` methods are:
- .. method:: flatten(msg, unixfrom=False, linesep='\n')
+ .. method:: flatten(msg, unixfrom=False, linesep=None)
Print the textual representation of the message object structure rooted
at *msg* to the output file specified when the :class:`BytesGenerator`
instance was created. Subparts are visited depth-first and the resulting
- text will be properly MIME encoded. If the input that created the *msg*
- contained bytes with the high bit set and those bytes have not been
- modified, they will be copied faithfully to the output, even if doing so
- is not strictly RFC compliant. (To produce strictly RFC compliant
- output, use the :class:`Generator` class.)
+ text will be properly MIME encoded. If the :mod:`~email.policy` option
+ :attr:`~email.policy.Policy.must_be_7bit` is ``False`` (the default),
+ then any bytes with the high bit set in the original parsed message that
+ have not been modified will be copied faithfully to the output. If
+ ``must_be_7bit`` is true, the bytes will be converted as needed using an
+ ASCII content-transfer-encoding. In particular, RFC-invalid non-ASCII
+ bytes in headers will be encoded using the MIME ``unknown-8bit``
+ character set, thus rendering them RFC-compliant.
+
+ .. XXX: There should be a complementary option that just does the RFC
+ compliance transformation but leaves CTE 8bit parts alone.
Messages parsed with a Bytes parser that have a
:mailheader:`Content-Transfer-Encoding` of 8bit will be reconstructed
@@ -152,10 +173,8 @@ formatted string representation of a message object. For more detail, see
Note that for subparts, no envelope header is ever printed.
Optional *linesep* specifies the line separator character used to
- terminate lines in the output. It defaults to ``\n`` because that is
- the most useful value for Python application code (other library packages
- expect ``\n`` separated lines). ``linesep=\r\n`` can be used to
- generate output with RFC-compliant line separators.
+ terminate lines in the output. If specified it overrides the value
+ specified by the ``Generator``\ 's ``policy``.
.. method:: clone(fp)
diff --git a/Doc/library/email.parser.rst b/Doc/library/email.parser.rst
index 384c5c9..a0303a4 100644
--- a/Doc/library/email.parser.rst
+++ b/Doc/library/email.parser.rst
@@ -58,12 +58,18 @@ list of defects that it can find.
Here is the API for the :class:`FeedParser`:
-.. class:: FeedParser(_factory=email.message.Message)
+.. class:: FeedParser(_factory=email.message.Message, *, policy=policy.default)
Create a :class:`FeedParser` instance. Optional *_factory* is a no-argument
callable that will be called whenever a new message object is needed. It
defaults to the :class:`email.message.Message` class.
+ The *policy* keyword specifies a :mod:`~email.policy` object that controls a
+ number of aspects of the parser's operation. The default policy maintains
+ backward compatibility.
+
+ .. versionchanged:: 3.3 Added the *policy* keyword.
+
.. method:: feed(data)
Feed the :class:`FeedParser` some more data. *data* should be a string
@@ -94,15 +100,17 @@ Parser class API
The :class:`Parser` class, imported from the :mod:`email.parser` module,
provides an API that can be used to parse a message when the complete contents
of the message are available in a string or file. The :mod:`email.parser`
-module also provides a second class, called :class:`HeaderParser` which can be
-used if you're only interested in the headers of the message.
-:class:`HeaderParser` can be much faster in these situations, since it does not
-attempt to parse the message body, instead setting the payload to the raw body
-as a string. :class:`HeaderParser` has the same API as the :class:`Parser`
-class.
+module also provides header-only parsers, called :class:`HeaderParser` and
+:class:`BytesHeaderParser`, which can be used if you're only interested in the
+headers of the message. :class:`HeaderParser` and :class:`BytesHeaderParser`
+can be much faster in these situations, since they do not attempt to parse the
+message body, instead setting the payload to the raw body as a string. They
+have the same API as the :class:`Parser` and :class:`BytesParser` classes.
+.. versionadded:: 3.3 BytesHeaderParser
-.. class:: Parser(_class=email.message.Message, strict=None)
+
+.. class:: Parser(_class=email.message.Message, *, policy=policy.default)
The constructor for the :class:`Parser` class takes an optional argument
*_class*. This must be a callable factory (such as a function or a class), and
@@ -110,13 +118,13 @@ class.
:class:`~email.message.Message` (see :mod:`email.message`). The factory will
be called without arguments.
- The optional *strict* flag is ignored.
+ The *policy* keyword specifies a :mod:`~email.policy` object that controls a
+ number of aspects of the parser's operation. The default policy maintains
+ backward compatibility.
- .. deprecated:: 2.4
- Because the :class:`Parser` class is a backward compatible API wrapper
- around the new-in-Python 2.4 :class:`FeedParser`, *all* parsing is
- effectively non-strict. You should simply stop passing a *strict* flag to
- the :class:`Parser` constructor.
+ .. versionchanged:: 3.3
+ Removed the *strict* argument that was deprecated in 2.4. Added the
+ *policy* keyword.
The other public :class:`Parser` methods are:
@@ -147,12 +155,18 @@ class.
Optional *headersonly* is as with the :meth:`parse` method.
-.. class:: BytesParser(_class=email.message.Message, strict=None)
+.. class:: BytesParser(_class=email.message.Message, *, policy=policy.default)
This class is exactly parallel to :class:`Parser`, but handles bytes input.
The *_class* and *strict* arguments are interpreted in the same way as for
- the :class:`Parser` constructor. *strict* is supported only to make porting
- code easier; it is deprecated.
+ the :class:`Parser` constructor.
+
+ The *policy* keyword specifies a :mod:`~email.policy` object that
+ controls a number of aspects of the parser's operation. The default
+ policy maintains backward compatibility.
+
+ .. versionchanged:: 3.3
+ Removed the *strict* argument. Added the *policy* keyword.
.. method:: parse(fp, headeronly=False)
@@ -190,34 +204,48 @@ in the top-level :mod:`email` package namespace.
.. currentmodule:: email
-.. function:: message_from_string(s, _class=email.message.Message, strict=None)
+.. function:: message_from_string(s, _class=email.message.Message, *, \
+ policy=policy.default)
Return a message object structure from a string. This is exactly equivalent to
- ``Parser().parsestr(s)``. Optional *_class* and *strict* are interpreted as
+ ``Parser().parsestr(s)``. *_class* and *policy* are interpreted as
with the :class:`Parser` class constructor.
-.. function:: message_from_bytes(s, _class=email.message.Message, strict=None)
+ .. versionchanged:: 3.3
+ Removed the *strict* argument. Added the *policy* keyword.
+
+.. function:: message_from_bytes(s, _class=email.message.Message, *, \
+ policy=policy.default)
Return a message object structure from a byte string. This is exactly
equivalent to ``BytesParser().parsebytes(s)``. Optional *_class* and
*strict* are interpreted as with the :class:`Parser` class constructor.
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ Removed the *strict* argument. Added the *policy* keyword.
-.. function:: message_from_file(fp, _class=email.message.Message, strict=None)
+.. function:: message_from_file(fp, _class=email.message.Message, *, \
+ policy=policy.default)
Return a message object structure tree from an open :term:`file object`.
- This is exactly equivalent to ``Parser().parse(fp)``. Optional *_class*
- and *strict* are interpreted as with the :class:`Parser` class constructor.
+ This is exactly equivalent to ``Parser().parse(fp)``. *_class*
+ and *policy* are interpreted as with the :class:`Parser` class constructor.
+
+ .. versionchanged::
+ Removed the *strict* argument. Added the *policy* keyword.
-.. function:: message_from_binary_file(fp, _class=email.message.Message, strict=None)
+.. function:: message_from_binary_file(fp, _class=email.message.Message, *, \
+ policy=policy.default)
Return a message object structure tree from an open binary :term:`file
object`. This is exactly equivalent to ``BytesParser().parse(fp)``.
- Optional *_class* and *strict* are interpreted as with the :class:`Parser`
+ *_class* and *policy* are interpreted as with the :class:`Parser`
class constructor.
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ Removed the *strict* argument. Added the *policy* keyword.
Here's an example of how you might use this at an interactive Python prompt::
diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst
new file mode 100644
index 0000000..d9a292c
--- /dev/null
+++ b/Doc/library/email.policy.rst
@@ -0,0 +1,184 @@
+:mod:`email`: Policy Objects
+----------------------------
+
+.. module:: email.policy
+ :synopsis: Controlling the parsing and generating of messages
+
+.. versionadded:: 3.3
+
+
+The :mod:`email` package's prime focus is the handling of email messages as
+described by the various email and MIME RFCs. However, the general format of
+email messages (a block of header fields each consisting of a name followed by
+a colon followed by a value, the whole block followed by a blank line and an
+arbitrary 'body'), is a format that has found utility outside of the realm of
+email. Some of these uses conform fairly closely to the main RFCs, some do
+not. And even when working with email, there are times when it is desirable to
+break strict compliance with the RFCs.
+
+Policy objects give the email package the flexibility to handle all these
+disparate use cases.
+
+A :class:`Policy` object encapsulates a set of attributes and methods that
+control the behavior of various components of the email package during use.
+:class:`Policy` instances can be passed to various classes and methods in the
+email package to alter the default behavior. The settable values and their
+defaults are described below. The :mod:`policy` module also provides some
+pre-created :class:`Policy` instances. In addition to a :const:`default`
+instance, there are instances tailored for certain applications. For example
+there is an :const:`SMTP` :class:`Policy` with defaults appropriate for
+generating output to be sent to an SMTP server. These are listed `below
+<Policy Instances>`.
+
+In general an application will only need to deal with setting the policy at the
+input and output boundaries. Once parsed, a message is represented by a
+:class:`~email.message.Message` object, which is designed to be independent of
+the format that the message has "on the wire" when it is received, transmitted,
+or displayed. Thus, a :class:`Policy` can be specified when parsing a message
+to create a :class:`~email.message.Message`, and again when turning the
+:class:`~email.message.Message` into some other representation. While often a
+program will use the same :class:`Policy` for both input and output, the two
+can be different.
+
+As an example, the following code could be used to read an email message from a
+file on disk and pass it to the system ``sendmail`` program on a Unix system::
+
+ >>> from email import msg_from_binary_file
+ >>> from email.generator import BytesGenerator
+ >>> import email.policy
+ >>> from subprocess import Popen, PIPE
+ >>> with open('mymsg.txt', 'b') as f:
+ ... msg = msg_from_binary_file(f, policy=email.policy.mbox)
+ >>> p = Popen(['sendmail', msg['To'][0].address], stdin=PIPE)
+ >>> g = BytesGenerator(p.stdin, policy=email.policy.SMTP)
+ >>> g.flatten(msg)
+ >>> p.stdin.close()
+ >>> rc = p.wait()
+
+.. XXX email.policy.mbox/MBOX does not exist yet
+
+Some email package methods accept a *policy* keyword argument, allowing the
+policy to be overridden for that method. For example, the following code uses
+the :meth:`~email.message.Message.as_string` method of the *msg* object from the
+previous example and re-write it to a file using the native line separators for
+the platform on which it is running::
+
+ >>> import os
+ >>> mypolicy = email.policy.Policy(linesep=os.linesep)
+ >>> with open('converted.txt', 'wb') as f:
+ ... f.write(msg.as_string(policy=mypolicy))
+
+Policy instances are immutable, but they can be cloned, accepting the same
+keyword arguments as the class constructor and returning a new :class:`Policy`
+instance that is a copy of the original but with the specified attributes
+values changed. For example, the following creates an SMTP policy that will
+raise any defects detected as errors::
+
+ >>> strict_SMTP = email.policy.SMTP.clone(raise_on_defect=True)
+
+Policy objects can also be combined using the addition operator, producing a
+policy object whose settings are a combination of the non-default values of the
+summed objects::
+
+ >>> strict_SMTP = email.policy.SMTP + email.policy.strict
+
+This operation is not commutative; that is, the order in which the objects are
+added matters. To illustrate::
+
+ >>> Policy = email.policy.Policy
+ >>> apolicy = Policy(max_line_length=100) + Policy(max_line_length=80)
+ >>> apolicy.max_line_length
+ 80
+ >>> apolicy = Policy(max_line_length=80) + Policy(max_line_length=100)
+ >>> apolicy.max_line_length
+ 100
+
+
+.. class:: Policy(**kw)
+
+ The valid constructor keyword arguments are any of the attributes listed
+ below.
+
+ .. attribute:: max_line_length
+
+ The maximum length of any line in the serialized output, not counting the
+ end of line character(s). Default is 78, per :rfc:`5322`. A value of
+ ``0`` or :const:`None` indicates that no line wrapping should be
+ done at all.
+
+ .. attribute:: linesep
+
+ The string to be used to terminate lines in serialized output. The
+ default is ``\n`` because that's the internal end-of-line discipline used
+ by Python, though ``\r\n`` is required by the RFCs. See `Policy
+ Instances`_ for policies that use an RFC conformant linesep. Setting it
+ to :attr:`os.linesep` may also be useful.
+
+ .. attribute:: must_be_7bit
+
+ If ``True``, data output by a bytes generator is limited to ASCII
+ characters. If :const:`False` (the default), then bytes with the high
+ bit set are preserved and/or allowed in certain contexts (for example,
+ where possible a content transfer encoding of ``8bit`` will be used).
+ String generators act as if ``must_be_7bit`` is ``True`` regardless of
+ the policy in effect, since a string cannot represent non-ASCII bytes.
+
+ .. attribute:: raise_on_defect
+
+ If :const:`True`, any defects encountered will be raised as errors. If
+ :const:`False` (the default), defects will be passed to the
+ :meth:`register_defect` method.
+
+ :mod:`Policy` object also have the following methods:
+
+ .. method:: handle_defect(obj, defect)
+
+ *obj* is the object on which to register the defect. *defect* should be
+ an instance of a subclass of :class:`~email.errors.Defect`.
+ If :attr:`raise_on_defect`
+ is ``True`` the defect is raised as an exception. Otherwise *obj* and
+ *defect* are passed to :meth:`register_defect`. This method is intended
+ to be called by parsers when they encounter defects, and will not be
+ called by code that uses the email library unless that code is
+ implementing an alternate parser.
+
+ .. method:: register_defect(obj, defect)
+
+ *obj* is the object on which to register the defect. *defect* should be
+ a subclass of :class:`~email.errors.Defect`. This method is part of the
+ public API so that custom ``Policy`` subclasses can implement alternate
+ handling of defects. The default implementation calls the ``append``
+ method of the ``defects`` attribute of *obj*.
+
+ .. method:: clone(obj, *kw)
+
+ Return a new :class:`Policy` instance whose attributes have the same
+ values as the current instance, except where those attributes are
+ given new values by the keyword arguments.
+
+
+Policy Instances
+^^^^^^^^^^^^^^^^
+
+The following instances of :class:`Policy` provide defaults suitable for
+specific common application domains.
+
+.. data:: default
+
+ An instance of :class:`Policy` with all defaults unchanged.
+
+.. data:: SMTP
+
+ Output serialized from a message will conform to the email and SMTP
+ RFCs. The only changed attribute is :attr:`linesep`, which is set to
+ ``\r\n``.
+
+.. data:: HTTP
+
+ Suitable for use when serializing headers for use in HTTP traffic.
+ :attr:`linesep` is set to ``\r\n``, and :attr:`max_line_length` is set to
+ :const:`None` (unlimited).
+
+.. data:: strict
+
+ :attr:`raise_on_defect` is set to :const:`True`.
diff --git a/Doc/library/email.rst b/Doc/library/email.rst
index 4530b95..fc206f4 100644
--- a/Doc/library/email.rst
+++ b/Doc/library/email.rst
@@ -51,6 +51,7 @@ Contents of the :mod:`email` package documentation:
email.message.rst
email.parser.rst
email.generator.rst
+ email.policy.rst
email.mime.rst
email.header.rst
email.charset.rst
diff --git a/Doc/library/email.util.rst b/Doc/library/email.util.rst
index f7b777a..2f9ef89 100644
--- a/Doc/library/email.util.rst
+++ b/Doc/library/email.util.rst
@@ -29,13 +29,20 @@ There are several useful utilities provided in the :mod:`email.utils` module:
fails, in which case a 2-tuple of ``('', '')`` is returned.
-.. function:: formataddr(pair)
+.. function:: formataddr(pair, charset='utf-8')
The inverse of :meth:`parseaddr`, this takes a 2-tuple of the form ``(realname,
email_address)`` and returns the string value suitable for a :mailheader:`To` or
:mailheader:`Cc` header. If the first element of *pair* is false, then the
second element is returned unmodified.
+ Optional *charset* is the character set that will be used in the :rfc:`2047`
+ encoding of the ``realname`` if the ``realname`` contains non-ASCII
+ characters. Can be an instance of :class:`str` or a
+ :class:`~email.charset.Charset`. Defaults to ``utf-8``.
+
+ .. versionchanged: 3.3 added the *charset* option
+
.. function:: getaddresses(fieldvalues)
@@ -74,6 +81,20 @@ There are several useful utilities provided in the :mod:`email.utils` module:
indexes 6, 7, and 8 of the result tuple are not usable.
+.. function:: parsedate_to_datetime(date)
+
+ The inverse of :func:`format_datetime`. Performs the same function as
+ :func:`parsedate`, but on success returns a :mod:`~datetime.datetime`. If
+ the input date has a timezone of ``-0000``, the ``datetime`` will be a naive
+ ``datetime``, and if the date is conforming to the RFCs it will represent a
+ time in UTC but with no indication of the actual source timezone of the
+ message the date comes from. If the input date has any other valid timezone
+ offset, the ``datetime`` will be an aware ``datetime`` with the
+ corresponding a :class:`~datetime.timezone` :class:`~datetime.tzinfo`.
+
+ .. versionadded:: 3.3
+
+
.. function:: mktime_tz(tuple)
Turn a 10-tuple as returned by :func:`parsedate_tz` into a UTC timestamp. It
@@ -105,6 +126,20 @@ There are several useful utilities provided in the :mod:`email.utils` module:
``False``. The default is ``False``.
+.. function:: format_datetime(dt, usegmt=False)
+
+ Like ``formatdate``, but the input is a :mod:`datetime` instance. If it is
+ a naive datetime, it is assumed to be "UTC with no information about the
+ source timezone", and the conventional ``-0000`` is used for the timezone.
+ If it is an aware ``datetime``, then the numeric timezone offset is used.
+ If it is an aware timezone with offset zero, then *usegmt* may be set to
+ ``True``, in which case the string ``GMT`` is used instead of the numeric
+ timezone offset. This provides a way to generate standards conformant HTTP
+ date headers.
+
+ .. versionadded:: 3.3
+
+
.. function:: make_msgid(idstring=None, domain=None)
Returns a string suitable for an :rfc:`2822`\ -compliant
diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst
index ca3ad3e..8c5a960 100644
--- a/Doc/library/exceptions.rst
+++ b/Doc/library/exceptions.rst
@@ -34,6 +34,28 @@ programmers are encouraged to at least derive new exceptions from the
defining exceptions is available in the Python Tutorial under
:ref:`tut-userexceptions`.
+When raising (or re-raising) an exception in an :keyword:`except` clause
+:attr:`__context__` is automatically set to the last exception caught; if the
+new exception is not handled the traceback that is eventually displayed will
+include the originating exception(s) and the final exception.
+
+This implicit exception chain can be made explicit by using :keyword:`from`
+with :keyword:`raise`. The single argument to :keyword:`from` must be an
+exception or :const:`None`, and it will be set as :attr:`__cause__` on the
+raised exception. If :attr:`__cause__` is an exception it will be displayed
+instead of :attr:`__context__`; if :attr:`__cause__` is None,
+:attr:`__context__` will not be displayed by the default exception handling
+code. (Note: the default value for :attr:`__context__` is :const:`None`,
+while the default value for :attr:`__cause__` is :const:`Ellipsis`.)
+
+In either case, the default exception handling code will not display
+any of the remaining links in the :attr:`__context__` chain if
+:attr:`__cause__` has been set.
+
+
+Base classes
+------------
+
The following exceptions are used mostly as base classes for other exceptions.
.. exception:: BaseException
@@ -90,27 +112,8 @@ The following exceptions are used mostly as base classes for other exceptions.
can be raised directly by :func:`codecs.lookup`.
-.. exception:: EnvironmentError
-
- The base class for exceptions that can occur outside the Python system:
- :exc:`IOError`, :exc:`OSError`. When exceptions of this type are created with a
- 2-tuple, the first item is available on the instance's :attr:`errno` attribute
- (it is assumed to be an error number), and the second item is available on the
- :attr:`strerror` attribute (it is usually the associated error message). The
- tuple itself is also available on the :attr:`args` attribute.
-
- When an :exc:`EnvironmentError` exception is instantiated with a 3-tuple, the
- first two items are available as above, while the third item is available on the
- :attr:`filename` attribute. However, for backwards compatibility, the
- :attr:`args` attribute contains only a 2-tuple of the first two constructor
- arguments.
-
- The :attr:`filename` attribute is ``None`` when this exception is created with
- other than 3 arguments. The :attr:`errno` and :attr:`strerror` attributes are
- also ``None`` when the instance was created with other than 2 or 3 arguments.
- In this last case, :attr:`args` contains the verbatim constructor arguments as a
- tuple.
-
+Concrete exceptions
+-------------------
The following exceptions are the exceptions that are usually raised.
@@ -151,16 +154,6 @@ The following exceptions are the exceptions that are usually raised.
it is technically not an error.
-.. exception:: IOError
-
- Raised when an I/O operation (such as the built-in :func:`print` or
- :func:`open` functions or a method of a :term:`file object`) fails for an
- I/O-related reason, e.g., "file not found" or "disk full".
-
- This class is derived from :exc:`EnvironmentError`. See the discussion above
- for more information on exception instance attributes.
-
-
.. exception:: ImportError
Raised when an :keyword:`import` statement fails to find the module definition
@@ -221,17 +214,30 @@ The following exceptions are the exceptions that are usually raised.
.. index:: module: errno
- This exception is derived from :exc:`EnvironmentError`. It is raised when a
- function returns a system-related error (not for illegal argument types or
- other incidental errors). The :attr:`errno` attribute is a numeric error
- code from :c:data:`errno`, and the :attr:`strerror` attribute is the
- corresponding string, as would be printed by the C function :c:func:`perror`.
- See the module :mod:`errno`, which contains names for the error codes defined
- by the underlying operating system.
+ This exception is raised when a system function returns a system-related
+ error, including I/O failures such as "file not found" or "disk full"
+ (not for illegal argument types or other incidental errors). Often a
+ subclass of :exc:`OSError` will actually be raised as described in
+ `OS exceptions`_ below. The :attr:`errno` attribute is a numeric error
+ code from the C variable :c:data:`errno`.
+
+ Under Windows, the :attr:`winerror` attribute gives you the native
+ Windows error code. The :attr:`errno` attribute is then an approximate
+ translation, in POSIX terms, of that native error code.
+
+ Under all platforms, the :attr:`strerror` attribute is the corresponding
+ error message as provided by the operating system (as formatted by the C
+ functions :c:func:`perror` under POSIX, and :c:func:`FormatMessage`
+ Windows).
+
+ For exceptions that involve a file system path (such as :func:`open` or
+ :func:`os.unlink`), the exception instance will contain an additional
+ attribute, :attr:`filename`, which is the file name passed to the function.
- For exceptions that involve a file system path (such as :func:`chdir` or
- :func:`unlink`), the exception instance will contain a third attribute,
- :attr:`filename`, which is the file name passed to the function.
+ .. versionchanged:: 3.3
+ :exc:`EnvironmentError`, :exc:`IOError`, :exc:`WindowsError`,
+ :exc:`VMSError`, :exc:`socket.error`, :exc:`select.error` and
+ :exc:`mmap.error` have been merged into :exc:`OSError`.
.. exception:: OverflowError
@@ -262,8 +268,20 @@ The following exceptions are the exceptions that are usually raised.
.. exception:: StopIteration
Raised by built-in function :func:`next` and an :term:`iterator`\'s
- :meth:`__next__` method to signal that there are no further values.
+ :meth:`__next__` method to signal that there are no further items to be
+ produced by the iterator.
+ The exception object has a single attribute :attr:`value`, which is
+ given as an argument when constructing the exception, and defaults
+ to :const:`None`.
+
+ When a generator function returns, a new :exc:`StopIteration` instance is
+ raised, and the value returned by the function is used as the
+ :attr:`value` parameter to the constructor of the exception.
+
+ .. versionchanged:: 3.3
+ Added ``value`` attribute and the ability for generator functions to
+ use it to return a value.
.. exception:: SyntaxError
@@ -372,27 +390,142 @@ The following exceptions are the exceptions that are usually raised.
more precise exception such as :exc:`IndexError`.
-.. exception:: VMSError
+.. exception:: ZeroDivisionError
+
+ Raised when the second argument of a division or modulo operation is zero. The
+ associated value is a string indicating the type of the operands and the
+ operation.
- Only available on VMS. Raised when a VMS-specific error occurs.
+The following exceptions are kept for compatibility with previous versions;
+starting from Python 3.3, they are aliases of :exc:`OSError`.
+
+.. exception:: EnvironmentError
+
+.. exception:: IOError
+
+.. exception:: VMSError
+
+ Only available on VMS.
.. exception:: WindowsError
- Raised when a Windows-specific error occurs or when the error number does not
- correspond to an :c:data:`errno` value. The :attr:`winerror` and
- :attr:`strerror` values are created from the return values of the
- :c:func:`GetLastError` and :c:func:`FormatMessage` functions from the Windows
- Platform API. The :attr:`errno` value maps the :attr:`winerror` value to
- corresponding ``errno.h`` values. This is a subclass of :exc:`OSError`.
+ Only available on Windows.
-.. exception:: ZeroDivisionError
+OS exceptions
+^^^^^^^^^^^^^
+
+The following exceptions are subclasses of :exc:`OSError`, they get raised
+depending on the system error code.
+
+.. exception:: BlockingIOError
+
+ Raised when an operation would block on an object (e.g. socket) set
+ for non-blocking operation.
+ Corresponds to :c:data:`errno` ``EAGAIN``, ``EALREADY``,
+ ``EWOULDBLOCK`` and ``EINPROGRESS``.
+
+ In addition to those of :exc:`OSError`, :exc:`BlockingIOError` can have
+ one more attribute:
+
+ .. attribute:: characters_written
+
+ An integer containing the number of characters written to the stream
+ before it blocked. This attribute is available when using the
+ buffered I/O classes from the :mod:`io` module.
+
+.. exception:: ChildProcessError
+
+ Raised when an operation on a child process failed.
+ Corresponds to :c:data:`errno` ``ECHILD``.
+
+.. exception:: ConnectionError
+
+ A base class for connection-related issues. Subclasses are
+ :exc:`BrokenPipeError`, :exc:`ConnectionAbortedError`,
+ :exc:`ConnectionRefusedError` and :exc:`ConnectionResetError`.
+
+ .. exception:: BrokenPipeError
+
+ A subclass of :exc:`ConnectionError`, raised when trying to write on a
+ pipe while the other end has been closed, or trying to write on a socket
+ which has been shutdown for writing.
+ Corresponds to :c:data:`errno` ``EPIPE`` and ``ESHUTDOWN``.
+
+ .. exception:: ConnectionAbortedError
+
+ A subclass of :exc:`ConnectionError`, raised when a connection attempt
+ is aborted by the peer.
+ Corresponds to :c:data:`errno` ``ECONNABORTED``.
+
+ .. exception:: ConnectionRefusedError
+
+ A subclass of :exc:`ConnectionError`, raised when a connection attempt
+ is refused by the peer.
+ Corresponds to :c:data:`errno` ``ECONNREFUSED``.
+
+ .. exception:: ConnectionResetError
+
+ A subclass of :exc:`ConnectionError`, raised when a connection is
+ reset by the peer.
+ Corresponds to :c:data:`errno` ``ECONNRESET``.
+
+.. exception:: FileExistsError
+
+ Raised when trying to create a file or directory which already exists.
+ Corresponds to :c:data:`errno` ``EEXIST``.
+
+.. exception:: FileNotFoundError
+
+ Raised when a file or directory is requested but doesn't exist.
+ Corresponds to :c:data:`errno` ``ENOENT``.
+
+.. exception:: InterruptedError
+
+ Raised when a system call is interrupted by an incoming signal.
+ Corresponds to :c:data:`errno` ``EEINTR``.
+
+.. exception:: IsADirectoryError
+
+ Raised when a file operation (such as :func:`os.remove`) is requested
+ on a directory.
+ Corresponds to :c:data:`errno` ``EISDIR``.
+
+.. exception:: NotADirectoryError
+
+ Raised when a directory operation (such as :func:`os.listdir`) is requested
+ on something which is not a directory.
+ Corresponds to :c:data:`errno` ``ENOTDIR``.
+
+.. exception:: PermissionError
+
+ Raised when trying to run an operation without the adequate access
+ rights - for example filesystem permissions.
+ Corresponds to :c:data:`errno` ``EACCES`` and ``EPERM``.
+
+.. exception:: ProcessLookupError
+
+ Raised when a given process doesn't exist.
+ Corresponds to :c:data:`errno` ``ESRCH``.
+
+.. exception:: TimeoutError
+
+ Raised when a system function timed out at the system level.
+ Corresponds to :c:data:`errno` ``ETIMEDOUT``.
+
+.. versionadded:: 3.3
+ All the above :exc:`OSError` subclasses were added.
+
+
+.. seealso::
+
+ :pep:`3151` - Reworking the OS and IO exception hierarchy
+ PEP written and implemented by Antoine Pitrou.
- Raised when the second argument of a division or modulo operation is zero. The
- associated value is a string indicating the type of the operands and the
- operation.
+Warnings
+--------
The following exceptions are used as warning categories; see the :mod:`warnings`
module for more information.
diff --git a/Doc/library/faulthandler.rst b/Doc/library/faulthandler.rst
new file mode 100644
index 0000000..c9b9546
--- /dev/null
+++ b/Doc/library/faulthandler.rst
@@ -0,0 +1,136 @@
+:mod:`faulthandler` --- Dump the Python traceback
+=================================================
+
+.. module:: faulthandler
+ :synopsis: Dump the Python traceback.
+
+This module contains functions to dump Python tracebacks explicitly, on a fault,
+after a timeout, or on a user signal. Call :func:`faulthandler.enable` to
+install fault handlers for the :const:`SIGSEGV`, :const:`SIGFPE`,
+:const:`SIGABRT`, :const:`SIGBUS`, and :const:`SIGILL` signals. You can also
+enable them at startup by setting the :envvar:`PYTHONFAULTHANDLER` environment
+variable or by using :option:`-X` ``faulthandler`` command line option.
+
+The fault handler is compatible with system fault handlers like Apport or the
+Windows fault handler. The module uses an alternative stack for signal handlers
+if the :c:func:`sigaltstack` function is available. This allows it to dump the
+traceback even on a stack overflow.
+
+The fault handler is called on catastrophic cases and therefore can only use
+signal-safe functions (e.g. it cannot allocate memory on the heap). Because of
+this limitation traceback dumping is minimal compared to normal Python
+tracebacks:
+
+* Only ASCII is supported. The ``backslashreplace`` error handler is used on
+ encoding.
+* Each string is limited to 100 characters.
+* Only the filename, the function name and the line number are
+ displayed. (no source code)
+* It is limited to 100 frames and 100 threads.
+
+By default, the Python traceback is written to :data:`sys.stderr`. To see
+tracebacks, applications must be run in the terminal. A log file can
+alternatively be passed to :func:`faulthandler.enable`.
+
+The module is implemented in C, so tracebacks can be dumped on a crash or when
+Python is deadlocked.
+
+.. versionadded:: 3.3
+
+
+Dump the traceback
+------------------
+
+.. function:: dump_traceback(file=sys.stderr, all_threads=True)
+
+ Dump the tracebacks of all threads into *file*. If *all_threads* is
+ ``False``, dump only the current thread.
+
+
+Fault handler state
+-------------------
+
+.. function:: enable(file=sys.stderr, all_threads=True)
+
+ Enable the fault handler: install handlers for the :const:`SIGSEGV`,
+ :const:`SIGFPE`, :const:`SIGABRT`, :const:`SIGBUS` and :const:`SIGILL`
+ signals to dump the Python traceback. If *all_threads* is ``True``,
+ produce tracebacks for every running thread. Otherwise, dump only the current
+ thread.
+
+.. function:: disable()
+
+ Disable the fault handler: uninstall the signal handlers installed by
+ :func:`enable`.
+
+.. function:: is_enabled()
+
+ Check if the fault handler is enabled.
+
+
+Dump the tracebacks after a timeout
+-----------------------------------
+
+.. function:: dump_tracebacks_later(timeout, repeat=False, file=sys.stderr, exit=False)
+
+ Dump the tracebacks of all threads, after a timeout of *timeout* seconds, or
+ every *timeout* seconds if *repeat* is ``True``. If *exit* is ``True``, call
+ :c:func:`_exit` with status=1 after dumping the tracebacks. (Note
+ :c:func:`_exit` exits the process immediately, which means it doesn't do any
+ cleanup like flushing file buffers.) If the function is called twice, the new
+ call replaces previous parameters and resets the timeout. The timer has a
+ sub-second resolution.
+
+ This function is implemented using a watchdog thread and therefore is not
+ available if Python is compiled with threads disabled.
+
+.. function:: cancel_dump_tracebacks_later()
+
+ Cancel the last call to :func:`dump_tracebacks_later`.
+
+
+Dump the traceback on a user signal
+-----------------------------------
+
+.. function:: register(signum, file=sys.stderr, all_threads=True, chain=False)
+
+ Register a user signal: install a handler for the *signum* signal to dump
+ the traceback of all threads, or of the current thread if *all_threads* is
+ ``False``, into *file*. Call the previous handler if chain is ``True``.
+
+ Not available on Windows.
+
+.. function:: unregister(signum)
+
+ Unregister a user signal: uninstall the handler of the *signum* signal
+ installed by :func:`register`. Return ``True`` if the signal was registered,
+ ``False`` otherwise.
+
+ Not available on Windows.
+
+
+File descriptor issue
+---------------------
+
+:func:`enable`, :func:`dump_tracebacks_later` and :func:`register` keep the
+file descriptor of their *file* argument. If the file is closed and its file
+descriptor is reused by a new file, or if :func:`os.dup2` is used to replace
+the file descriptor, the traceback will be written into a different file. Call
+these functions again each time that the file is replaced.
+
+
+Example
+-------
+
+Example of a segmentation fault on Linux: ::
+
+ $ python -q -X faulthandler
+ >>> import ctypes
+ >>> ctypes.string_at(0)
+ Fatal Python error: Segmentation fault
+
+ Current thread 0x00007fb899f39700:
+ File "/home/python/cpython/Lib/ctypes/__init__.py", line 486 in string_at
+ File "<stdin>", line 1 in <module>
+ Segmentation fault
+
diff --git a/Doc/library/fcntl.rst b/Doc/library/fcntl.rst
index 6192400..9a9cdc1 100644
--- a/Doc/library/fcntl.rst
+++ b/Doc/library/fcntl.rst
@@ -19,6 +19,11 @@ argument. This can be an integer file descriptor, such as returned by
``sys.stdin.fileno()``, or a :class:`io.IOBase` object, such as ``sys.stdin``
itself, which provides a :meth:`fileno` that returns a genuine file descriptor.
+.. versionchanged:: 3.3
+ Operations in this module used to raise a :exc:`IOError` where they now
+ raise a :exc:`OSError`.
+
+
The module defines the following functions:
@@ -40,7 +45,7 @@ The module defines the following functions:
larger than 1024 bytes, this is most likely to result in a segmentation
violation or a more subtle data corruption.
- If the :c:func:`fcntl` fails, an :exc:`IOError` is raised.
+ If the :c:func:`fcntl` fails, an :exc:`OSError` is raised.
.. function:: ioctl(fd, op[, arg[, mutate_flag]])
@@ -107,7 +112,7 @@ The module defines the following functions:
When *operation* is :const:`LOCK_SH` or :const:`LOCK_EX`, it can also be
bitwise ORed with :const:`LOCK_NB` to avoid blocking on lock acquisition.
If :const:`LOCK_NB` is used and the lock cannot be acquired, an
- :exc:`IOError` will be raised and the exception will have an *errno*
+ :exc:`OSError` will be raised and the exception will have an *errno*
attribute set to :const:`EACCES` or :const:`EAGAIN` (depending on the
operating system; for portability, check for both values). On at least some
systems, :const:`LOCK_EX` can only be used if the file descriptor refers to a
diff --git a/Doc/library/fileinput.rst b/Doc/library/fileinput.rst
index ac44311..f8ec436 100644
--- a/Doc/library/fileinput.rst
+++ b/Doc/library/fileinput.rst
@@ -28,7 +28,10 @@ as the first argument to :func:`.input`. A single file name is also allowed.
All files are opened in text mode by default, but you can override this by
specifying the *mode* parameter in the call to :func:`.input` or
:class:`FileInput`. If an I/O error occurs during opening or reading a file,
-:exc:`IOError` is raised.
+:exc:`OSError` is raised.
+
+.. versionchanged:: 3.3
+ :exc:`IOError` used to be raised; it is now an alias of :exc:`OSError`.
If ``sys.stdin`` is used more than once, the second and further use will return
no lines, except perhaps for interactive use, or if it has been explicitly reset
@@ -168,10 +171,6 @@ and the backup file remains around; by default, the extension is ``'.bak'`` and
it is deleted when the output file is closed. In-place filtering is disabled
when standard input is read.
-.. note::
-
- The current implementation does not work for MS-DOS 8+3 filesystems.
-
The two following opening hooks are provided by this module:
diff --git a/Doc/library/ftplib.rst b/Doc/library/ftplib.rst
index 5bbef4f..3cc295a 100644
--- a/Doc/library/ftplib.rst
+++ b/Doc/library/ftplib.rst
@@ -40,7 +40,7 @@ Here's a sample session using the :mod:`ftplib` module::
The module defines the following items:
-.. class:: FTP(host='', user='', passwd='', acct=''[, timeout])
+.. class:: FTP(host='', user='', passwd='', acct='', timeout=None, source_address=None)
Return a new instance of the :class:`FTP` class. When *host* is given, the
method call ``connect(host)`` is made. When *user* is given, additionally
@@ -48,7 +48,8 @@ The module defines the following items:
*acct* default to the empty string when not given). The optional *timeout*
parameter specifies a timeout in seconds for blocking operations like the
connection attempt (if is not specified, the global default timeout setting
- will be used).
+ will be used). *source_address* is a 2-tuple ``(host, port)`` for the socket
+ to bind to as its source address before connecting.
:class:`FTP` class supports the :keyword:`with` statement. Here is a sample
on how using it:
@@ -68,8 +69,11 @@ The module defines the following items:
.. versionchanged:: 3.2
Support for the :keyword:`with` statement was added.
+ .. versionchanged:: 3.3
+ *source_address* parameter was added.
-.. class:: FTP_TLS(host='', user='', passwd='', acct='', [keyfile[, certfile[, context[, timeout]]]])
+
+.. class:: FTP_TLS(host='', user='', passwd='', acct='', keyfile=None, certfile=None, context=None, timeout=None, source_address=None)
A :class:`FTP` subclass which adds TLS support to FTP as described in
:rfc:`4217`.
@@ -80,10 +84,15 @@ The module defines the following items:
private key and certificate chain file name for the SSL connection.
*context* parameter is a :class:`ssl.SSLContext` object which allows
bundling SSL configuration options, certificates and private keys into a
- single (potentially long-lived) structure.
+ single (potentially long-lived) structure. *source_address* is a 2-tuple
+ ``(host, port)`` for the socket to bind to as its source address before
+ connecting.
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ *source_address* parameter was added.
+
Here's a sample session using the :class:`FTP_TLS` class:
>>> from ftplib import FTP_TLS
@@ -135,8 +144,7 @@ The module defines the following items:
The set of all exceptions (as a tuple) that methods of :class:`FTP`
instances may raise as a result of problems with the FTP connection (as
opposed to programming errors made by the caller). This set includes the
- four exceptions listed above as well as :exc:`socket.error` and
- :exc:`IOError`.
+ four exceptions listed above as well as :exc:`OSError`.
.. seealso::
@@ -174,7 +182,7 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
debugging output, logging each line sent and received on the control connection.
-.. method:: FTP.connect(host='', port=0[, timeout])
+.. method:: FTP.connect(host='', port=0, timeout=None, source_address=None)
Connect to the given host and port. The default port number is ``21``, as
specified by the FTP protocol specification. It is rarely needed to specify a
@@ -182,10 +190,14 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
instance; it should not be called at all if a host was given when the instance
was created. All other methods can only be used after a connection has been
made.
-
The optional *timeout* parameter specifies a timeout in seconds for the
connection attempt. If no *timeout* is passed, the global default timeout
setting will be used.
+ *source_address* is a 2-tuple ``(host, port)`` for the socket to bind to as
+ its source address before connecting.
+
+ .. versionchanged:: 3.3
+ *source_address* parameter was added.
.. method:: FTP.getwelcome()
@@ -241,13 +253,12 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
Retrieve a file or directory listing in ASCII transfer mode. *cmd* should be
an appropriate ``RETR`` command (see :meth:`retrbinary`) or a command such as
- ``LIST``, ``NLST`` or ``MLSD`` (usually just the string ``'LIST'``).
+ ``LIST`` or ``NLST`` (usually just the string ``'LIST'``).
``LIST`` retrieves a list of files and information about those files.
- ``NLST`` retrieves a list of file names. On some servers, ``MLSD`` retrieves
- a machine readable list of files and information about those files. The
- *callback* function is called for each line with a string argument containing
- the line with the trailing CRLF stripped. The default *callback* prints the
- line to ``sys.stdout``.
+ ``NLST`` retrieves a list of file names.
+ The *callback* function is called for each line with a string argument
+ containing the line with the trailing CRLF stripped. The default *callback*
+ prints the line to ``sys.stdout``.
.. method:: FTP.set_pasv(boolean)
@@ -307,6 +318,20 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
in :meth:`transfercmd`.
+.. method:: FTP.mlsd(path="", facts=[])
+
+ List a directory in a standardized format by using MLSD command
+ (:rfc:`3659`). If *path* is omitted the current directory is assumed.
+ *facts* is a list of strings representing the type of information desired
+ (e.g. ``["type", "size", "perm"]``). Return a generator object yielding a
+ tuple of two elements for every file found in path. First element is the
+ file name, the second one is a dictionary containing facts about the file
+ name. Content of this dictionary might be limited by the *facts* argument
+ but server is not guaranteed to return all requested facts.
+
+ .. versionadded:: 3.3
+
+
.. method:: FTP.nlst(argument[, ...])
Return a list of file names as returned by the ``NLST`` command. The
@@ -314,6 +339,8 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
directory). Multiple arguments can be used to pass non-standard options to
the ``NLST`` command.
+ .. deprecated:: 3.3 use :meth:`mlsd` instead.
+
.. method:: FTP.dir(argument[, ...])
@@ -324,6 +351,8 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
as a *callback* function as for :meth:`retrlines`; the default prints to
``sys.stdout``. This method returns ``None``.
+ .. deprecated:: 3.3 use :meth:`mlsd` instead.
+
.. method:: FTP.rename(fromname, toname)
@@ -396,6 +425,14 @@ FTP_TLS Objects
Set up secure control connection by using TLS or SSL, depending on what specified in :meth:`ssl_version` attribute.
+.. method:: FTP_TLS.ccc()
+
+ Revert control channel back to plaintext. This can be useful to take
+ advantage of firewalls that know how to handle NAT with non-secure FTP
+ without opening fixed ports.
+
+ .. versionadded:: 3.3
+
.. method:: FTP_TLS.prot_p()
Set up secure data connection.
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index 3fcd694..2674ef9 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -152,10 +152,6 @@ are always available. They are listed here in alphabetical order.
1,114,111 (0x10FFFF in base 16). :exc:`ValueError` will be raised if *i* is
outside that range.
- Note that on narrow Unicode builds, the result is a string of
- length two for *i* greater than 65,535 (0xFFFF in hexadecimal).
-
-
.. function:: classmethod(function)
@@ -787,10 +783,10 @@ are always available. They are listed here in alphabetical order.
:meth:`__index__` method that returns an integer.
-.. function:: open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True)
+.. function:: open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None)
Open *file* and return a corresponding stream. If the file cannot be opened,
- an :exc:`IOError` is raised.
+ an :exc:`OSError` is raised.
*file* is either a string or bytes object giving the pathname (absolute or
relative to the current working directory) of the file to be opened or
@@ -894,6 +890,15 @@ are always available. They are listed here in alphabetical order.
closed. If a filename is given *closefd* has no effect and must be ``True``
(the default).
+ A custom opener can be used by passing a callable as *opener*. The underlying
+ file descriptor for the file object is then obtained by calling *opener* with
+ (*file*, *flags*). *opener* must return an open file descriptor (passing
+ :mod:`os.open` as *opener* results in functionality similar to passing
+ ``None``).
+
+ .. versionchanged:: 3.3
+ The *opener* parameter was added.
+
The type of file object returned by the :func:`open` function depends on the
mode. When :func:`open` is used to open a file in a text mode (``'w'``,
``'r'``, ``'wt'``, ``'rt'``, etc.), it returns a subclass of
@@ -919,6 +924,9 @@ are always available. They are listed here in alphabetical order.
(where :func:`open` is declared), :mod:`os`, :mod:`os.path`, :mod:`tempfile`,
and :mod:`shutil`.
+ .. versionchanged:: 3.3
+ :exc:`IOError` used to be raised, it is now an alias of :exc:`OSError`.
+
.. XXX works for bytes too, but should it?
.. function:: ord(c)
@@ -928,9 +936,6 @@ are always available. They are listed here in alphabetical order.
point of that character. For example, ``ord('a')`` returns the integer ``97``
and ``ord('\u2020')`` returns ``8224``. This is the inverse of :func:`chr`.
- On wide Unicode builds, if the argument length is not one, a
- :exc:`TypeError` will be raised. On narrow Unicode builds, strings
- of length two are accepted when they form a UTF-16 surrogate pair.
.. function:: pow(x, y[, z])
@@ -948,7 +953,7 @@ are always available. They are listed here in alphabetical order.
must be of integer types, and *y* must be non-negative.
-.. function:: print([object, ...], *, sep=' ', end='\\n', file=sys.stdout)
+.. function:: print([object, ...], *, sep=' ', end='\\n', file=sys.stdout, flush=False)
Print *object*\(s) to the stream *file*, separated by *sep* and followed by
*end*. *sep*, *end* and *file*, if present, must be given as keyword
@@ -961,9 +966,12 @@ are always available. They are listed here in alphabetical order.
*end*.
The *file* argument must be an object with a ``write(string)`` method; if it
- is not present or ``None``, :data:`sys.stdout` will be used. Output buffering
- is determined by *file*. Use ``file.flush()`` to ensure, for instance,
- immediate appearance on a screen.
+ is not present or ``None``, :data:`sys.stdout` will be used. Whether output
+ is buffered is usually determined by *file*, but if the *flush* keyword
+ argument is true, the stream is forcibly flushed.
+
+ .. versionchanged:: 3.3
+ Added the *flush* keyword argument.
.. function:: property(fget=None, fset=None, fdel=None, doc=None)
@@ -1046,7 +1054,9 @@ are always available. They are listed here in alphabetical order.
...]``. If *step* is positive, the last element is the largest ``start + i *
step`` less than *stop*; if *step* is negative, the last element is the
smallest ``start + i * step`` greater than *stop*. *step* must not be zero
- (or else :exc:`ValueError` is raised). Example:
+ (or else :exc:`ValueError` is raised). Range objects have read-only data
+ attributes :attr:`start`, :attr:`stop` and :attr:`step` which return the
+ argument values (or their default). Example:
>>> list(range(10))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
@@ -1083,6 +1093,13 @@ are always available. They are listed here in alphabetical order.
>>> r[-1]
18
+ Testing range objects for equality with ``==`` and ``!=`` compares
+ them as sequences. That is, two range objects are considered equal if
+ they represent the same sequence of values. (Note that two range
+ objects that compare equal might have different :attr:`start`,
+ :attr:`stop` and :attr:`step` attributes, for example ``range(0) ==
+ range(2, 1, 3)`` or ``range(0, 3, 2) == range(0, 4, 2)``.)
+
Ranges containing absolute values larger than :data:`sys.maxsize` are permitted
but some features (such as :func:`len`) will raise :exc:`OverflowError`.
@@ -1092,6 +1109,14 @@ are always available. They are listed here in alphabetical order.
Test integers for membership in constant time instead of iterating
through all items.
+ .. versionchanged:: 3.3
+ Define '==' and '!=' to compare range objects based on the
+ sequence of values they define (instead of comparing based on
+ object identity).
+
+ .. versionadded:: 3.3
+ The :attr:`start`, :attr:`stop` and :attr:`step` attributes.
+
.. function:: repr(object)
diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst
index 04743d3..05f97b1 100644
--- a/Doc/library/functools.rst
+++ b/Doc/library/functools.rst
@@ -40,7 +40,7 @@ The :mod:`functools` module defines the following functions:
.. versionadded:: 3.2
-.. decorator:: lru_cache(maxsize=100)
+.. decorator:: lru_cache(maxsize=100, typed=False)
Decorator to wrap a function with a memoizing callable that saves up to the
*maxsize* most recent calls. It can save time when an expensive or I/O bound
@@ -52,6 +52,10 @@ The :mod:`functools` module defines the following functions:
If *maxsize* is set to None, the LRU feature is disabled and the cache
can grow without bound.
+ If *typed* is set to True, function arguments of different types will be
+ cached separately. For example, ``f(3)`` and ``f(3.0)`` will be treated
+ as distinct calls with distinct results.
+
To help measure the effectiveness of the cache and tune the *maxsize*
parameter, the wrapped function is instrumented with a :func:`cache_info`
function that returns a :term:`named tuple` showing *hits*, *misses*,
@@ -67,8 +71,8 @@ The :mod:`functools` module defines the following functions:
An `LRU (least recently used) cache
<http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used>`_ works
- best when more recent calls are the best predictors of upcoming calls (for
- example, the most popular articles on a news server tend to change daily).
+ best when the most recent calls are the best predictors of upcoming calls (for
+ example, the most popular articles on a news server tend to change each day).
The cache's size limit assures that the cache does not grow without bound on
long-running processes such as web servers.
@@ -111,6 +115,9 @@ The :mod:`functools` module defines the following functions:
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ Added the *typed* option.
+
.. decorator:: total_ordering
Given a class defining one or more rich comparison ordering methods, this
diff --git a/Doc/library/gettext.rst b/Doc/library/gettext.rst
index 0fa022c..825311b 100644
--- a/Doc/library/gettext.rst
+++ b/Doc/library/gettext.rst
@@ -185,10 +185,13 @@ class can also install themselves in the built-in namespace as the function
translation object from the cache; the actual instance data is still shared with
the cache.
- If no :file:`.mo` file is found, this function raises :exc:`IOError` if
+ If no :file:`.mo` file is found, this function raises :exc:`OSError` if
*fallback* is false (which is the default), and returns a
:class:`NullTranslations` instance if *fallback* is true.
+ .. versionchanged:: 3.3
+ :exc:`IOError` used to be raised instead of :exc:`OSError`.
+
.. function:: install(domain, localedir=None, codeset=None, names=None)
@@ -342,7 +345,7 @@ The entire set of key/value pairs are placed into a dictionary and set as the
If the :file:`.mo` file's magic number is invalid, or if other problems occur
while reading the file, instantiating a :class:`GNUTranslations` class can raise
-:exc:`IOError`.
+:exc:`OSError`.
The following methods are overridden from the base class implementation:
diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst
index 9422ea9..9e57990 100644
--- a/Doc/library/gzip.rst
+++ b/Doc/library/gzip.rst
@@ -71,7 +71,7 @@ The module defines the following items:
:class:`GzipFile` supports the :class:`io.BufferedIOBase` interface,
including iteration and the :keyword:`with` statement. Only the
- :meth:`read1` and :meth:`truncate` methods aren't implemented.
+ :meth:`truncate` method isn't implemented.
:class:`GzipFile` also provides the following method:
@@ -93,6 +93,9 @@ The module defines the following items:
.. versionchanged:: 3.2
Support for unseekable files was added.
+ .. versionchanged:: 3.3
+ The :meth:`io.BufferedIOBase.read1` method is now implemented.
+
.. function:: open(filename, mode='rb', compresslevel=9)
diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst
index 52fbe57..f0da8ee 100644
--- a/Doc/library/http.client.rst
+++ b/Doc/library/http.client.rst
@@ -502,6 +502,12 @@ statement.
Reads and returns the response body, or up to the next *amt* bytes.
+.. method:: HTTPResponse.readinto(b)
+
+ Reads up to the next len(b) bytes of the response body into the buffer *b*.
+ Returns the number of bytes read.
+
+ .. versionadded:: 3.3
.. method:: HTTPResponse.getheader(name, default=None)
diff --git a/Doc/library/http.cookiejar.rst b/Doc/library/http.cookiejar.rst
index 9771496..1fe775f 100644
--- a/Doc/library/http.cookiejar.rst
+++ b/Doc/library/http.cookiejar.rst
@@ -40,7 +40,11 @@ The module defines the following exception:
.. exception:: LoadError
Instances of :class:`FileCookieJar` raise this exception on failure to load
- cookies from a file. :exc:`LoadError` is a subclass of :exc:`IOError`.
+ cookies from a file. :exc:`LoadError` is a subclass of :exc:`OSError`.
+
+ .. versionchanged:: 3.3
+ LoadError was made a subclass of :exc:`OSError` instead of
+ :exc:`IOError`.
The following classes are provided:
@@ -257,9 +261,12 @@ contained :class:`Cookie` objects.
Arguments are as for :meth:`save`.
The named file must be in the format understood by the class, or
- :exc:`LoadError` will be raised. Also, :exc:`IOError` may be raised, for
+ :exc:`LoadError` will be raised. Also, :exc:`OSError` may be raised, for
example if the file does not exist.
+ .. versionchanged:: 3.3
+ :exc:`IOError` used to be raised, it is now an alias of :exc:`OSError`.
+
.. method:: FileCookieJar.revert(filename=None, ignore_discard=False, ignore_expires=False)
diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst
index e3a3a10..d9aaa72 100644
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -179,19 +179,29 @@ of which this module provides three different variants:
.. method:: send_response(code, message=None)
- Sends a response header and logs the accepted request. The HTTP response
- line is sent, followed by *Server* and *Date* headers. The values for
- these two headers are picked up from the :meth:`version_string` and
- :meth:`date_time_string` methods, respectively.
+ Adds a response header to the headers buffer and logs the accepted
+ request. The HTTP response line is written to the internal buffer,
+ followed by *Server* and *Date* headers. The values for these two headers
+ are picked up from the :meth:`version_string` and
+ :meth:`date_time_string` methods, respectively. If the server does not
+ intend to send any other headers using the :meth:`send_header` method,
+ then :meth:`send_response` should be followed by a :meth:`end_headers`
+ call.
+
+ .. versionchanged:: 3.3
+ Headers are stored to an internal buffer and :meth:`end_headers`
+ needs to be called explicitly.
+
.. method:: send_header(keyword, value)
- Stores the HTTP header to an internal buffer which will be written to the
- output stream when :meth:`end_headers` method is invoked.
- *keyword* should specify the header keyword, with *value*
- specifying its value.
+ Adds the HTTP header to an internal buffer which will be written to the
+ output stream when either :meth:`end_headers` or :meth:`flush_headers` is
+ invoked. *keyword* should specify the header keyword, with *value*
+ specifying its value. Note that, after the send_header calls are done,
+ :meth:`end_headers` MUST BE called in order to complete the operation.
- .. versionchanged:: 3.2 Storing the headers in an internal buffer
+ .. versionchanged:: 3.2 Headers are stored in an internal buffer.
.. method:: send_response_only(code, message=None)
@@ -205,10 +215,19 @@ of which this module provides three different variants:
.. method:: end_headers()
- Write the buffered HTTP headers to the output stream and send a blank
- line, indicating the end of the HTTP headers in the response.
+ Adds a blank line
+ (indicating the end of the HTTP headers in the response)
+ to the headers buffer and calls :meth:`flush_headers()`.
+
+ .. versionchanged:: 3.2
+ The buffered headers are written to the output stream.
+
+ .. method:: flush_headers()
+
+ Finally send the headers to the output stream and flush the internal
+ headers buffer.
- .. versionchanged:: 3.2 Writing the buffered headers to the output stream.
+ .. versionadded:: 3.3
.. method:: log_request(code='-', size='-')
@@ -299,7 +318,7 @@ of which this module provides three different variants:
response if the :func:`listdir` fails.
If the request was mapped to a file, it is opened and the contents are
- returned. Any :exc:`IOError` exception in opening the requested file is
+ returned. Any :exc:`OSError` exception in opening the requested file is
mapped to a ``404``, ``'File not found'`` error. Otherwise, the content
type is guessed by calling the :meth:`guess_type` method, which in turn
uses the *extensions_map* variable.
diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst
index 3f45c95..038355c 100644
--- a/Doc/library/imaplib.rst
+++ b/Doc/library/imaplib.rst
@@ -64,14 +64,21 @@ Three exceptions are defined as attributes of the :class:`IMAP4` class:
There's also a subclass for secure connections:
-.. class:: IMAP4_SSL(host='', port=IMAP4_SSL_PORT, keyfile=None, certfile=None)
+.. class:: IMAP4_SSL(host='', port=IMAP4_SSL_PORT, keyfile=None, certfile=None, ssl_context=None)
This is a subclass derived from :class:`IMAP4` that connects over an SSL
encrypted socket (to use this class you need a socket module that was compiled
with SSL support). If *host* is not specified, ``''`` (the local host) is used.
If *port* is omitted, the standard IMAP4-over-SSL port (993) is used. *keyfile*
and *certfile* are also optional - they can contain a PEM formatted private key
- and certificate chain file for the SSL connection.
+ and certificate chain file for the SSL connection. *ssl_context* parameter is a
+ :class:`ssl.SSLContext` object which allows bundling SSL configuration
+ options, certificates and private keys into a single (potentially long-lived)
+ structure. Note that the *keyfile*/*certfile* parameters are mutually exclusive with *ssl_context*,
+ a :class:`ValueError` is thrown if *keyfile*/*certfile* is provided along with *ssl_context*.
+
+ .. versionchanged:: 3.3
+ *ssl_context* parameter added.
The second subclass allows for connections created by a child process:
diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst
index c9f742a..e5cc27f 100644
--- a/Doc/library/importlib.rst
+++ b/Doc/library/importlib.rst
@@ -86,6 +86,17 @@ Functions
that was imported (e.g. ``pkg.mod``), while :func:`__import__` returns the
top-level package or module (e.g. ``pkg``).
+.. function:: invalidate_caches()
+
+ Invalidate the internal caches of the finders stored at
+ :data:`sys.path_importer_cache`. If a finder implements
+ :meth:`abc.Finder.invalidate_caches()` then it will be called to perform the
+ invalidation. This function may be needed if some modules are installed
+ while your program is running and you expect the program to notice the
+ changes.
+
+ .. versionadded:: 3.3
+
:mod:`importlib.abc` -- Abstract base classes related to import
---------------------------------------------------------------
@@ -111,6 +122,12 @@ are also provided to help in implementing the core ABCs.
be the value of :attr:`__path__` from the parent package. If a loader
cannot be found, ``None`` is returned.
+ .. method:: invalidate_caches()
+
+ An optional method which, when called, should invalidate any internal
+ cache used by the finder. Used by :func:`invalidate_caches()` when
+ invalidating the caches of all cached finders.
+
.. class:: Loader
@@ -239,11 +256,30 @@ are also provided to help in implementing the core ABCs.
optimization to speed up loading by removing the parsing step of Python's
compiler, and so no bytecode-specific API is exposed.
+ .. method:: path_stats(self, path)
+
+ Optional abstract method which returns a :class:`dict` containing
+ metadata about the specifed path. Supported dictionary keys are:
+
+ - ``'mtime'`` (mandatory): an integer or floating-point number
+ representing the modification time of the source code;
+ - ``'size'`` (optional): the size in bytes of the source code.
+
+ Any other keys in the dictionary are ignored, to allow for future
+ extensions.
+
+ .. versionadded:: 3.3
+
.. method:: path_mtime(self, path)
Optional abstract method which returns the modification time for the
specified path.
+ .. deprecated:: 3.3
+ This method is deprecated in favour of :meth:`path_stats`. You don't
+ have to implement it, but it is still available for compatibility
+ purposes.
+
.. method:: set_data(self, path, data)
Optional abstract method which writes the specified bytes to a file
@@ -441,7 +477,9 @@ find and load modules.
This class does not perfectly mirror the semantics of :keyword:`import` in
terms of :data:`sys.path`. No implicit path hooks are assumed for
- simplification of the class and its semantics.
+ simplification of the class and its semantics. This implies that when
+ ``None`` is found in :data:`sys.path_importer_cache` that it is simply
+ ignored instead of implying a default finder.
Only class methods are defined by this class to alleviate the need for
instantiation.
@@ -451,7 +489,7 @@ find and load modules.
Class method that attempts to find a :term:`loader` for the module
specified by *fullname* on :data:`sys.path` or, if defined, on
*path*. For each path entry that is searched,
- :data:`sys.path_importer_cache` is checked. If an non-false object is
+ :data:`sys.path_importer_cache` is checked. If a non-false object is
found then it is used as the :term:`finder` to look for the module
being searched for. If no entry is found in
:data:`sys.path_importer_cache`, then :data:`sys.path_hooks` is
@@ -464,7 +502,7 @@ find and load modules.
---------------------------------------------------
.. module:: importlib.util
- :synopsis: Importers and path hooks
+ :synopsis: Utility code for importers
This module contains the various objects that help in the construction of
an :term:`importer`.
@@ -500,7 +538,7 @@ an :term:`importer`.
to set the :attr:`__loader__`
attribute on loaded modules. If the attribute is already set the decorator
does nothing. It is assumed that the first positional argument to the
- wrapped method is what :attr:`__loader__` should be set to.
+ wrapped method (i.e. ``self``) is what :attr:`__loader__` should be set to.
.. decorator:: set_package
@@ -511,8 +549,8 @@ an :term:`importer`.
set on and not the module found in :data:`sys.modules`.
Reliance on this decorator is discouraged when it is possible to set
- :attr:`__package__` before the execution of the code is possible. By
- setting it before the code for the module is executed it allows the
- attribute to be used at the global level of the module during
+ :attr:`__package__` before importing. By
+ setting it beforehand the code for the module is executed with the
+ attribute set and thus can be used by global level code during
initialization.
diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst
index d127ce8..ac6ae99 100644
--- a/Doc/library/inspect.rst
+++ b/Doc/library/inspect.rst
@@ -355,17 +355,25 @@ Retrieving source code
argument may be a module, class, method, function, traceback, frame, or code
object. The source code is returned as a list of the lines corresponding to the
object and the line number indicates where in the original source file the first
- line of code was found. An :exc:`IOError` is raised if the source code cannot
+ line of code was found. An :exc:`OSError` is raised if the source code cannot
be retrieved.
+ .. versionchanged:: 3.3
+ :exc:`OSError` is raised instead of :exc:`IOError`, now an alias of the
+ former.
+
.. function:: getsource(object)
Return the text of the source code for an object. The argument may be a module,
class, method, function, traceback, frame, or code object. The source code is
- returned as a single string. An :exc:`IOError` is raised if the source code
+ returned as a single string. An :exc:`OSError` is raised if the source code
cannot be retrieved.
+ .. versionchanged:: 3.3
+ :exc:`OSError` is raised instead of :exc:`IOError`, now an alias of the
+ former.
+
.. function:: cleandoc(doc)
diff --git a/Doc/library/io.rst b/Doc/library/io.rst
index becc4a2..4d564bb 100644
--- a/Doc/library/io.rst
+++ b/Doc/library/io.rst
@@ -33,6 +33,10 @@ giving a :class:`str` object to the ``write()`` method of a binary stream
will raise a ``TypeError``. So will giving a :class:`bytes` object to the
``write()`` method of a text stream.
+.. versionchanged:: 3.3
+ Operations defined in this module used to raise :exc:`IOError`, which is
+ now an alias of :exc:`OSError`.
+
Text I/O
^^^^^^^^
@@ -109,21 +113,13 @@ High-level Module Interface
.. exception:: BlockingIOError
- Error raised when blocking would occur on a non-blocking stream. It inherits
- :exc:`IOError`.
-
- In addition to those of :exc:`IOError`, :exc:`BlockingIOError` has one
- attribute:
-
- .. attribute:: characters_written
-
- An integer containing the number of characters written to the stream
- before it blocked.
+ This is a compatibility alias for the builtin :exc:`BlockingIOError`
+ exception.
.. exception:: UnsupportedOperation
- An exception inheriting :exc:`IOError` and :exc:`ValueError` that is raised
+ An exception inheriting :exc:`OSError` and :exc:`ValueError` that is raised
when an unsupported operation is called on a stream.
@@ -202,8 +198,8 @@ I/O Base Classes
Even though :class:`IOBase` does not declare :meth:`read`, :meth:`readinto`,
or :meth:`write` because their signatures will vary, implementations and
clients should consider those methods part of the interface. Also,
- implementations may raise a :exc:`IOError` when operations they do not
- support are called.
+ implementations may raise a :exc:`ValueError` (or :exc:`UnsupportedOperation`)
+ when operations they do not support are called.
The basic type used for binary data read from or written to a file is
:class:`bytes`. :class:`bytearray`\s are accepted too, and in some cases
@@ -211,7 +207,7 @@ I/O Base Classes
:class:`str` data.
Note that calling any method (even inquiries) on a closed stream is
- undefined. Implementations may raise :exc:`IOError` in this case.
+ undefined. Implementations may raise :exc:`ValueError` in this case.
IOBase (and its subclasses) support the iterator protocol, meaning that an
:class:`IOBase` object can be iterated over yielding the lines in a stream.
@@ -244,7 +240,7 @@ I/O Base Classes
.. method:: fileno()
Return the underlying file descriptor (an integer) of the stream if it
- exists. An :exc:`IOError` is raised if the IO object does not use a file
+ exists. An :exc:`OSError` is raised if the IO object does not use a file
descriptor.
.. method:: flush()
@@ -260,7 +256,7 @@ I/O Base Classes
.. method:: readable()
Return ``True`` if the stream can be read from. If False, :meth:`read`
- will raise :exc:`IOError`.
+ will raise :exc:`OSError`.
.. method:: readline(limit=-1)
@@ -298,7 +294,7 @@ I/O Base Classes
.. method:: seekable()
Return ``True`` if the stream supports random access. If ``False``,
- :meth:`seek`, :meth:`tell` and :meth:`truncate` will raise :exc:`IOError`.
+ :meth:`seek`, :meth:`tell` and :meth:`truncate` will raise :exc:`OSError`.
.. method:: tell()
@@ -316,7 +312,7 @@ I/O Base Classes
.. method:: writable()
Return ``True`` if the stream supports writing. If ``False``,
- :meth:`write` and :meth:`truncate` will raise :exc:`IOError`.
+ :meth:`write` and :meth:`truncate` will raise :exc:`OSError`.
.. method:: writelines(lines)
@@ -450,7 +446,7 @@ I/O Base Classes
Write the given bytes or bytearray object, *b* and return the number
of bytes written (never less than ``len(b)``, since if the write fails
- an :exc:`IOError` will be raised). Depending on the actual
+ an :exc:`OSError` will be raised). Depending on the actual
implementation, these bytes may be readily written to the underlying
stream, or held in a buffer for performance and latency reasons.
@@ -462,7 +458,7 @@ I/O Base Classes
Raw File I/O
^^^^^^^^^^^^
-.. class:: FileIO(name, mode='r', closefd=True)
+.. class:: FileIO(name, mode='r', closefd=True, opener=None)
:class:`FileIO` represents an OS-level file containing bytes data.
It implements the :class:`RawIOBase` interface (and therefore the
@@ -475,14 +471,27 @@ Raw File I/O
* an integer representing the number of an existing OS-level file descriptor
to which the resulting :class:`FileIO` object will give access.
- The *mode* can be ``'r'``, ``'w'`` or ``'a'`` for reading (default), writing,
- or appending. The file will be created if it doesn't exist when opened for
- writing or appending; it will be truncated when opened for writing. Add a
+ The *mode* can be ``'r'``, ``'w'``, ``'x'`` or ``'a'`` for reading
+ (default), writing, exclusive creation or appending. The file will be
+ created if it doesn't exist when opened for writing or appending; it will be
+ truncated when opened for writing. :exc:`FileExistsError` will be raised if
+ it already exists when opened for creating. Opening a file for creating
+ implies writing, so this mode behaves in a similar way to ``'w'``. Add a
``'+'`` to the mode to allow simultaneous reading and writing.
The :meth:`read` (when called with a positive argument), :meth:`readinto`
and :meth:`write` methods on this class will only make one system call.
+ A custom opener can be used by passing a callable as *opener*. The underlying
+ file descriptor for the file object is then obtained by calling *opener* with
+ (*name*, *flags*). *opener* must return an open file descriptor (passing
+ :mod:`os.open` as *opener* results in functionality similar to passing
+ ``None``).
+
+ .. versionchanged:: 3.3
+ The *opener* parameter was added.
+ The ``'x'`` mode was added.
+
In addition to the attributes and methods from :class:`IOBase` and
:class:`RawIOBase`, :class:`FileIO` provides the following data
attributes and methods:
@@ -736,7 +745,8 @@ Text I/O
written.
-.. class:: TextIOWrapper(buffer, encoding=None, errors=None, newline=None, line_buffering=False)
+.. class:: TextIOWrapper(buffer, encoding=None, errors=None, newline=None, \
+ line_buffering=False, write_through=False)
A buffered text stream over a :class:`BufferedIOBase` binary stream.
It inherits :class:`TextIOBase`.
@@ -767,6 +777,13 @@ Text I/O
If *line_buffering* is ``True``, :meth:`flush` is implied when a call to
write contains a newline character.
+ If *write_through* is ``True``, calls to :meth:`write` are guaranteed
+ not to be buffered: any data written on the :class:`TextIOWrapper`
+ object is immediately handled to its underlying binary *buffer*.
+
+ .. versionchanged:: 3.3
+ The *write_through* argument has been added.
+
:class:`TextIOWrapper` provides one attribute in addition to those of
:class:`TextIOBase` and its parents:
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index d1d1188..994a25a 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -46,7 +46,7 @@ Iterator Arguments Results
==================== ============================ ================================================= =============================================================
Iterator Arguments Results Example
==================== ============================ ================================================= =============================================================
-:func:`accumulate` p p0, p0+p1, p0+p1+p2, ... ``accumulate([1,2,3,4,5]) --> 1 3 6 10 15``
+:func:`accumulate` p [,func] p0, p0+p1, p0+p1+p2, ... ``accumulate([1,2,3,4,5]) --> 1 3 6 10 15``
:func:`chain` p, q, ... p0, p1, ... plast, q0, q1, ... ``chain('ABC', 'DEF') --> A B C D E F``
:func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) --> A C E F``
:func:`dropwhile` pred, seq seq[n], seq[n+1], starting when pred fails ``dropwhile(lambda x: x<5, [1,4,6,4,1]) --> 6 4 1``
@@ -84,23 +84,61 @@ The following module functions all construct and return iterators. Some provide
streams of infinite length, so they should only be accessed by functions or
loops that truncate the stream.
-.. function:: accumulate(iterable)
+.. function:: accumulate(iterable[, func])
Make an iterator that returns accumulated sums. Elements may be any addable
- type including :class:`Decimal` or :class:`Fraction`. Equivalent to::
+ type including :class:`Decimal` or :class:`Fraction`. If the optional
+ *func* argument is supplied, it should be a function of two arguments
+ and it will be used instead of addition.
- def accumulate(iterable):
+ Equivalent to::
+
+ def accumulate(iterable, func=operator.add):
'Return running totals'
# accumulate([1,2,3,4,5]) --> 1 3 6 10 15
+ # accumulate([1,2,3,4,5], operator.mul) --> 1 2 6 24 120
it = iter(iterable)
total = next(it)
yield total
for element in it:
- total = total + element
+ total = func(total, element)
yield total
+ There are a number of uses for the *func* argument. It can be set to
+ :func:`min` for a running minimum, :func:`max` for a running maximum, or
+ :func:`operator.mul` for a running product. Amortization tables can be
+ built by accumulating interest and applying payments. First-order
+ `recurrence relations <http://en.wikipedia.org/wiki/Recurrence_relation>`_
+ can be modeled by supplying the initial value in the iterable and using only
+ the accumulated total in *func* argument::
+
+ >>> data = [3, 4, 6, 2, 1, 9, 0, 7, 5, 8]
+ >>> list(accumulate(data, operator.mul)) # running product
+ [3, 12, 72, 144, 144, 1296, 0, 0, 0, 0]
+ >>> list(accumulate(data, max)) # running maximum
+ [3, 4, 6, 6, 6, 9, 9, 9, 9, 9]
+
+ # Amortize a 5% loan of 1000 with 4 annual payments of 90
+ >>> cashflows = [1000, -90, -90, -90, -90]
+ >>> list(accumulate(cashflows, lambda bal, pmt: bal*1.05 + pmt))
+ [1000, 960.0, 918.0, 873.9000000000001, 827.5950000000001]
+
+ # Chaotic recurrence relation http://en.wikipedia.org/wiki/Logistic_map
+ >>> logistic_map = lambda x, _: r * x * (1 - x)
+ >>> r = 3.8
+ >>> x0 = 0.4
+ >>> inputs = repeat(x0, 36) # only the initial value is used
+ >>> [format(x, '.2f') for x in accumulate(inputs, logistic_map)]
+ ['0.40', '0.91', '0.30', '0.81', '0.60', '0.92', '0.29', '0.79', '0.63',
+ '0.88' ,'0.39', '0.90', '0.33', '0.84', '0.52', '0.95', '0.18', '0.57',
+ '0.93', '0.25', '0.71', '0.79', '0.63', '0.88', '0.39', '0.91', '0.32',
+ '0.83', '0.54', '0.95', '0.20', '0.60', '0.91', '0.30', '0.80', '0.60']
+
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ Added the optional *func* parameter.
+
.. function:: chain(*iterables)
Make an iterator that returns elements from the first iterable until it is
diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst
index c4dd438..51e476b 100644
--- a/Doc/library/logging.handlers.rst
+++ b/Doc/library/logging.handlers.rst
@@ -164,6 +164,87 @@ this value.
changed. If it has, the existing stream is flushed and closed and the
file opened again, before outputting the record to the file.
+.. _base-rotating-handler:
+
+BaseRotatingHandler
+^^^^^^^^^^^^^^^^^^^
+
+The :class:`BaseRotatingHandler` class, located in the :mod:`logging.handlers`
+module, is the base class for the rotating file handlers,
+:class:`RotatingFileHandler` and :class:`TimedRotatingFileHandler`. You should
+not need to instantiate this class, but it has attributes and methods you may
+need to override.
+
+.. class:: BaseRotatingHandler(filename, mode, encoding=None, delay=False)
+
+ The parameters are as for :class:`FileHandler`. The attributes are:
+
+ .. attribute:: namer
+
+ If this attribute is set to a callable, the :meth:`rotation_filename`
+ method delegates to this callable. The parameters passed to the callable
+ are those passed to :meth:`rotation_filename`.
+
+ .. note:: The namer function is called quite a few times during rollover,
+ so it should be as simple and as fast as possible. It should also
+ return the same output every time for a given input, otherwise the
+ rollover behaviour may not work as expected.
+
+ .. versionadded:: 3.3
+
+
+ .. attribute:: BaseRotatingHandler.rotator
+
+ If this attribute is set to a callable, the :meth:`rotate` method
+ delegates to this callable. The parameters passed to the callable are
+ those passed to :meth:`rotate`.
+
+ .. versionadded:: 3.3
+
+ .. method:: BaseRotatingHandler.rotation_filename(default_name)
+
+ Modify the filename of a log file when rotating.
+
+ This is provided so that a custom filename can be provided.
+
+ The default implementation calls the 'namer' attribute of the handler,
+ if it's callable, passing the default name to it. If the attribute isn't
+ callable (the default is ``None``), the name is returned unchanged.
+
+ :param default_name: The default name for the log file.
+
+ .. versionadded:: 3.3
+
+
+ .. method:: BaseRotatingHandler.rotate(source, dest)
+
+ When rotating, rotate the current log.
+
+ The default implementation calls the 'rotator' attribute of the handler,
+ if it's callable, passing the source and dest arguments to it. If the
+ attribute isn't callable (the default is ``None``), the source is simply
+ renamed to the destination.
+
+ :param source: The source filename. This is normally the base
+ filename, e.g. 'test.log'
+ :param dest: The destination filename. This is normally
+ what the source is rotated to, e.g. 'test.log.1'.
+
+ .. versionadded:: 3.3
+
+The reason the attributes exist is to save you having to subclass - you can use
+the same callables for instances of :class:`RotatingFileHandler` and
+:class:`TimedRotatingFileHandler`. If either the namer or rotator callable
+raises an exception, this will be handled in the same way as any other
+exception during an :meth:`emit` call, i.e. via the :meth:`handleError` method
+of the handler.
+
+If you need to make more significant changes to rotation processing, you can
+override the methods.
+
+For an example, see :ref:`cookbook-rotator-namer`.
+
+
.. _rotating-file-handler:
RotatingFileHandler
@@ -452,6 +533,15 @@ supports sending logging messages to a remote or local Unix syslog.
behaviour) but can be set to ``False`` on a ``SysLogHandler`` instance
in order for that instance to *not* append the NUL terminator.
+ .. versionchanged:: 3.3
+ (See: :issue:`12419`.) In earlier versions, there was no facility for
+ an "ident" or "tag" prefix to identify the source of the message. This
+ can now be specified using a class-level attribute, defaulting to
+ ``""`` to preserve existing behaviour, but which can be overridden on
+ a ``SysLogHandler`` instance in order for that instance to prepend
+ the ident to every message handled. Note that the provided ident must
+ be text, not bytes, and is prepended to the message exactly as is.
+
.. method:: encodePriority(facility, priority)
Encodes the facility and priority into an integer. You can pass in strings
@@ -690,7 +780,7 @@ should, then :meth:`flush` is expected to do the needful.
.. method:: close()
- Calls :meth:`flush`, sets the target to :const:`None` and clears the
+ Calls :meth:`flush`, sets the target to ``None`` and clears the
buffer.
@@ -859,6 +949,15 @@ possible, while any potentially slow operations (such as sending an email via
Note that if you don't call this before your application exits, there
may be some records still left on the queue, which won't be processed.
+ .. method:: enqueue_sentinel()
+
+ Writes a sentinel to the queue to tell the listener to quit. This
+ implementation uses ``put_nowait()``. You may want to override this
+ method if you want to use timeouts or work with custom queue
+ implementations.
+
+ .. versionadded:: 3.3
+
.. seealso::
diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst
index 1005b79..c2c6a6f 100644
--- a/Doc/library/logging.rst
+++ b/Doc/library/logging.rst
@@ -200,6 +200,9 @@ instantiated directly, but always through the module-level function
Logs a message with level :const:`WARNING` on this logger. The arguments are
interpreted as for :meth:`debug`.
+ .. note:: There is an obsolete method ``warn`` which is functionally
+ identical to ``warning``. As ``warn`` is deprecated, please do not use
+ it - use ``warning`` instead.
.. method:: Logger.error(msg, *args, **kwargs)
@@ -479,6 +482,19 @@ The useful mapping keys in a :class:`LogRecord` are given in the section on
want all logging times to be shown in GMT, set the ``converter``
attribute in the ``Formatter`` class.
+ .. versionchanged:: 3.3
+ Previously, the default ISO 8601 format was hard-coded as in this
+ example: ``2010-09-06 22:38:15,292`` where the part before the comma is
+ handled by a strptime format string (``'%Y-%m-%d %H:%M:%S'``), and the
+ part after the comma is a millisecond value. Because strptime does not
+ have a format placeholder for milliseconds, the millisecond value is
+ appended using another format string, ``'%s,%03d'`` – and both of these
+ format strings have been hardcoded into this method. With the change,
+ these strings are defined as class-level attributes which can be
+ overridden at the instance level when desired. The names of the
+ attributes are ``default_time_format`` (for the strptime format string)
+ and ``default_msec_format`` (for appending the millisecond value).
+
.. method:: formatException(exc_info)
Formats the specified exception information (a standard exception tuple as
@@ -885,8 +901,12 @@ functions.
.. function:: warning(msg, *args, **kwargs)
- Logs a message with level :const:`WARNING` on the root logger. The arguments are
- interpreted as for :func:`debug`.
+ Logs a message with level :const:`WARNING` on the root logger. The arguments
+ are interpreted as for :func:`debug`.
+
+ .. note:: There is an obsolete function ``warn`` which is functionally
+ identical to ``warning``. As ``warn`` is deprecated, please do not use
+ it - use ``warning`` instead.
.. function:: error(msg, *args, **kwargs)
@@ -1011,12 +1031,27 @@ functions.
| ``stream`` | Use the specified stream to initialize the |
| | StreamHandler. Note that this argument is |
| | incompatible with 'filename' - if both are |
- | | present, 'stream' is ignored. |
+ | | present, a ``ValueError`` is raised. |
+ +--------------+---------------------------------------------+
+ | ``handlers`` | If specified, this should be an iterable of |
+ | | already created handlers to add to the root |
+ | | logger. Any handlers which don't already |
+ | | have a formatter set will be assigned the |
+ | | default formatter created in this function. |
+ | | Note that this argument is incompatible |
+ | | with 'filename' or 'stream' - if both are |
+ | | present, a ``ValueError`` is raised. |
+--------------+---------------------------------------------+
.. versionchanged:: 3.2
The ``style`` argument was added.
+ .. versionchanged:: 3.3
+ The ``handlers`` argument was added. Additional checks were added to
+ catch situations where incompatible arguments are specified (e.g.
+ ``handlers`` together with ``stream`` or ``filename``, or ``stream``
+ together with ``filename``).
+
.. function:: shutdown()
diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst
new file mode 100644
index 0000000..cae05b6
--- /dev/null
+++ b/Doc/library/lzma.rst
@@ -0,0 +1,349 @@
+:mod:`lzma` --- Compression using the LZMA algorithm
+====================================================
+
+.. module:: lzma
+ :synopsis: A Python wrapper for the liblzma compression library.
+.. moduleauthor:: Nadeem Vawda <nadeem.vawda@gmail.com>
+.. sectionauthor:: Nadeem Vawda <nadeem.vawda@gmail.com>
+
+.. versionadded:: 3.3
+
+
+This module provides classes and convenience functions for compressing and
+decompressing data using the LZMA compression algorithm. Also included is a file
+interface supporting the ``.xz`` and legacy ``.lzma`` file formats used by the
+:program:`xz` utility, as well as raw compressed streams.
+
+The interface provided by this module is very similar to that of the :mod:`bz2`
+module. However, note that :class:`LZMAFile` is *not* thread-safe, unlike
+:class:`bz2.BZ2File`, so if you need to use a single :class:`LZMAFile` instance
+from multiple threads, it is necessary to protect it with a lock.
+
+
+.. exception:: LZMAError
+
+ This exception is raised when an error occurs during compression or
+ decompression, or while initializing the compressor/decompressor state.
+
+
+Reading and writing compressed files
+------------------------------------
+
+.. class:: LZMAFile(filename=None, mode="r", \*, fileobj=None, format=None, check=-1, preset=None, filters=None)
+
+ Open an LZMA-compressed file.
+
+ An :class:`LZMAFile` can wrap an existing :term:`file object` (given by
+ *fileobj*), or operate directly on a named file (named by *filename*).
+ Exactly one of these two parameters should be provided. If *fileobj* is
+ provided, it is not closed when the :class:`LZMAFile` is closed.
+
+ The *mode* argument can be either ``"r"`` for reading (default), ``"w"`` for
+ overwriting, or ``"a"`` for appending. If *fileobj* is provided, a mode of
+ ``"w"`` does not truncate the file, and is instead equivalent to ``"a"``.
+
+ When opening a file for reading, the input file may be the concatenation of
+ multiple separate compressed streams. These are transparently decoded as a
+ single logical stream.
+
+ When opening a file for reading, the *format* and *filters* arguments have
+ the same meanings as for :class:`LZMADecompressor`. In this case, the *check*
+ and *preset* arguments should not be used.
+
+ When opening a file for writing, the *format*, *check*, *preset* and
+ *filters* arguments have the same meanings as for :class:`LZMACompressor`.
+
+ :class:`LZMAFile` supports all the members specified by
+ :class:`io.BufferedIOBase`, except for :meth:`detach` and :meth:`truncate`.
+ Iteration and the :keyword:`with` statement are supported.
+
+ The following method is also provided:
+
+ .. method:: peek(size=-1)
+
+ Return buffered data without advancing the file position. At least one
+ byte of data will be returned, unless EOF has been reached. The exact
+ number of bytes returned is unspecified (the *size* argument is ignored).
+
+
+Compressing and decompressing data in memory
+--------------------------------------------
+
+.. class:: LZMACompressor(format=FORMAT_XZ, check=-1, preset=None, filters=None)
+
+ Create a compressor object, which can be used to compress data incrementally.
+
+ For a more convenient way of compressing a single chunk of data, see
+ :func:`compress`.
+
+ The *format* argument specifies what container format should be used.
+ Possible values are:
+
+ * :const:`FORMAT_XZ`: The ``.xz`` container format.
+ This is the default format.
+
+ * :const:`FORMAT_ALONE`: The legacy ``.lzma`` container format.
+ This format is more limited than ``.xz`` -- it does not support integrity
+ checks or multiple filters.
+
+ * :const:`FORMAT_RAW`: A raw data stream, not using any container format.
+ This format specifier does not support integrity checks, and requires that
+ you always specify a custom filter chain (for both compression and
+ decompression). Additionally, data compressed in this manner cannot be
+ decompressed using :const:`FORMAT_AUTO` (see :class:`LZMADecompressor`).
+
+ The *check* argument specifies the type of integrity check to include in the
+ compressed data. This check is used when decompressing, to ensure that the
+ data has not been corrupted. Possible values are:
+
+ * :const:`CHECK_NONE`: No integrity check.
+ This is the default (and the only acceptable value) for
+ :const:`FORMAT_ALONE` and :const:`FORMAT_RAW`.
+
+ * :const:`CHECK_CRC32`: 32-bit Cyclic Redundancy Check.
+
+ * :const:`CHECK_CRC64`: 64-bit Cyclic Redundancy Check.
+ This is the default for :const:`FORMAT_XZ`.
+
+ * :const:`CHECK_SHA256`: 256-bit Secure Hash Algorithm.
+
+ If the specified check is not supported, an :class:`LZMAError` is raised.
+
+ The compression settings can be specified either as a preset compression
+ level (with the *preset* argument), or in detail as a custom filter chain
+ (with the *filters* argument).
+
+ The *preset* argument (if provided) should be an integer between ``0`` and
+ ``9`` (inclusive), optionally OR-ed with the constant
+ :const:`PRESET_EXTREME`. If neither *preset* nor *filters* are given, the
+ default behavior is to use :const:`PRESET_DEFAULT` (preset level ``6``).
+ Higher presets produce smaller output, but make the compression process
+ slower.
+
+ .. note::
+
+ In addition to being more CPU-intensive, compression with higher presets
+ also requires much more memory (and produces output that needs more memory
+ to decompress). With preset ``9`` for example, the overhead for an
+ :class:`LZMACompressor` object can be as high as 800MiB. For this reason,
+ it is generally best to stick with the default preset.
+
+ The *filters* argument (if provided) should be a filter chain specifier.
+ See :ref:`filter-chain-specs` for details.
+
+ .. method:: compress(data)
+
+ Compress *data* (a :class:`bytes` object), returning a :class:`bytes`
+ object containing compressed data for at least part of the input. Some of
+ *data* may be buffered internally, for use in later calls to
+ :meth:`compress` and :meth:`flush`. The returned data should be
+ concatenated with the output of any previous calls to :meth:`compress`.
+
+ .. method:: flush()
+
+ Finish the compression process, returning a :class:`bytes` object
+ containing any data stored in the compressor's internal buffers.
+
+ The compressor cannot be used after this method has been called.
+
+
+.. class:: LZMADecompressor(format=FORMAT_AUTO, memlimit=None, filters=None)
+
+ Create a decompressor object, which can be used to decompress data
+ incrementally.
+
+ For a more convenient way of decompressing an entire compressed stream at
+ once, see :func:`decompress`.
+
+ The *format* argument specifies the container format that should be used. The
+ default is :const:`FORMAT_AUTO`, which can decompress both ``.xz`` and
+ ``.lzma`` files. Other possible values are :const:`FORMAT_XZ`,
+ :const:`FORMAT_ALONE`, and :const:`FORMAT_RAW`.
+
+ The *memlimit* argument specifies a limit (in bytes) on the amount of memory
+ that the decompressor can use. When this argument is used, decompression will
+ fail with an :class:`LZMAError` if it is not possible to decompress the input
+ within the given memory limit.
+
+ The *filters* argument specifies the filter chain that was used to create
+ the stream being decompressed. This argument is required if *format* is
+ :const:`FORMAT_RAW`, but should not be used for other formats.
+ See :ref:`filter-chain-specs` for more information about filter chains.
+
+ .. note::
+ This class does not transparently handle inputs containing multiple
+ compressed streams, unlike :func:`decompress` and :class:`LZMAFile`. To
+ decompress a multi-stream input with :class:`LZMADecompressor`, you must
+ create a new decompressor for each stream.
+
+ .. method:: decompress(data)
+
+ Decompress *data* (a :class:`bytes` object), returning a :class:`bytes`
+ object containing the decompressed data for at least part of the input.
+ Some of *data* may be buffered internally, for use in later calls to
+ :meth:`decompress`. The returned data should be concatenated with the
+ output of any previous calls to :meth:`decompress`.
+
+ .. attribute:: check
+
+ The ID of the integrity check used by the input stream. This may be
+ :const:`CHECK_UNKNOWN` until enough of the input has been decoded to
+ determine what integrity check it uses.
+
+ .. attribute:: eof
+
+ True if the end-of-stream marker has been reached.
+
+ .. attribute:: unused_data
+
+ Data found after the end of the compressed stream.
+
+ Before the end of the stream is reached, this will be ``b""``.
+
+
+.. function:: compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None)
+
+ Compress *data* (a :class:`bytes` object), returning the compressed data as a
+ :class:`bytes` object.
+
+ See :class:`LZMACompressor` above for a description of the *format*, *check*,
+ *preset* and *filters* arguments.
+
+
+.. function:: decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None)
+
+ Decompress *data* (a :class:`bytes` object), returning the uncompressed data
+ as a :class:`bytes` object.
+
+ If *data* is the concatenation of multiple distinct compressed streams,
+ decompress all of these streams, and return the concatenation of the results.
+
+ See :class:`LZMADecompressor` above for a description of the *format*,
+ *memlimit* and *filters* arguments.
+
+
+Miscellaneous
+-------------
+
+.. function:: check_is_supported(check)
+
+ Returns true if the given integrity check is supported on this system.
+
+ :const:`CHECK_NONE` and :const:`CHECK_CRC32` are always supported.
+ :const:`CHECK_CRC64` and :const:`CHECK_SHA256` may be unavailable if you are
+ using a version of :program:`liblzma` that was compiled with a limited
+ feature set.
+
+
+.. _filter-chain-specs:
+
+Specifying custom filter chains
+-------------------------------
+
+A filter chain specifier is a sequence of dictionaries, where each dictionary
+contains the ID and options for a single filter. Each dictionary must contain
+the key ``"id"``, and may contain additional keys to specify filter-dependent
+options. Valid filter IDs are as follows:
+
+* Compression filters:
+ * :const:`FILTER_LZMA1` (for use with :const:`FORMAT_ALONE`)
+ * :const:`FILTER_LZMA2` (for use with :const:`FORMAT_XZ` and :const:`FORMAT_RAW`)
+
+* Delta filter:
+ * :const:`FILTER_DELTA`
+
+* Branch-Call-Jump (BCJ) filters:
+ * :const:`FILTER_X86`
+ * :const:`FILTER_IA64`
+ * :const:`FILTER_ARM`
+ * :const:`FILTER_ARMTHUMB`
+ * :const:`FILTER_POWERPC`
+ * :const:`FILTER_SPARC`
+
+A filter chain can consist of up to 4 filters, and cannot be empty. The last
+filter in the chain must be a compression filter, and any other filters must be
+delta or BCJ filters.
+
+Compression filters support the following options (specified as additional
+entries in the dictionary representing the filter):
+
+ * ``preset``: A compression preset to use as a source of default values for
+ options that are not specified explicitly.
+ * ``dict_size``: Dictionary size in bytes. This should be between 4KiB and
+ 1.5GiB (inclusive).
+ * ``lc``: Number of literal context bits.
+ * ``lp``: Number of literal position bits. The sum ``lc + lp`` must be at
+ most 4.
+ * ``pb``: Number of position bits; must be at most 4.
+ * ``mode``: :const:`MODE_FAST` or :const:`MODE_NORMAL`.
+ * ``nice_len``: What should be considered a "nice length" for a match.
+ This should be 273 or less.
+ * ``mf``: What match finder to use -- :const:`MF_HC3`, :const:`MF_HC4`,
+ :const:`MF_BT2`, :const:`MF_BT3`, or :const:`MF_BT4`.
+ * ``depth``: Maximum search depth used by match finder. 0 (default) means to
+ select automatically based on other filter options.
+
+The delta filter stores the differences between bytes, producing more repetitive
+input for the compressor in certain circumstances. It only supports a single
+The delta filter supports only one option, ``dist``. This indicates the distance
+between bytes to be subtracted. The default is 1, i.e. take the differences
+between adjacent bytes.
+
+The BCJ filters are intended to be applied to machine code. They convert
+relative branches, calls and jumps in the code to use absolute addressing, with
+the aim of increasing the redundancy that can be exploited by the compressor.
+These filters support one option, ``start_offset``. This specifies the address
+that should be mapped to the beginning of the input data. The default is 0.
+
+
+Examples
+--------
+
+Reading in a compressed file::
+
+ import lzma
+ with lzma.LZMAFile("file.xz") as f:
+ file_content = f.read()
+
+Creating a compressed file::
+
+ import lzma
+ data = b"Insert Data Here"
+ with lzma.LZMAFile("file.xz", "w") as f:
+ f.write(data)
+
+Compressing data in memory::
+
+ import lzma
+ data_in = b"Insert Data Here"
+ data_out = lzma.compress(data_in)
+
+Incremental compression::
+
+ import lzma
+ lzc = lzma.LZMACompressor()
+ out1 = lzc.compress(b"Some data\n")
+ out2 = lzc.compress(b"Another piece of data\n")
+ out3 = lzc.compress(b"Even more data\n")
+ out4 = lzc.flush()
+ # Concatenate all the partial results:
+ result = b"".join([out1, out2, out3, out4])
+
+Writing compressed data to an already-open file::
+
+ import lzma
+ with open("file.xz", "wb") as f:
+ f.write(b"This data will not be compressed\n")
+ with lzma.LZMAFile(fileobj=f, mode="w") as lzf:
+ lzf.write(b"This *will* be compressed\n")
+ f.write(b"Not compressed\n")
+
+Creating a compressed file using a custom filter chain::
+
+ import lzma
+ my_filters = [
+ {"id": lzma.FILTER_DELTA, "dist": 5},
+ {"id": lzma.FILTER_LZMA2, "preset": 7 | lzma.PRESET_EXTREME},
+ ]
+ with lzma.LZMAFile("file.xz", "w", filters=my_filters) as f:
+ f.write(b"blah blah blah")
diff --git a/Doc/library/math.rst b/Doc/library/math.rst
index 98c5b33..62c0f34 100644
--- a/Doc/library/math.rst
+++ b/Doc/library/math.rst
@@ -184,6 +184,19 @@ Power and logarithmic functions
result is calculated in a way which is accurate for *x* near zero.
+.. function:: log2(x)
+
+ Return the base-2 logarithm of *x*. This is usually more accurate than
+ ``log(x, 2)``.
+
+ .. versionadded:: 3.3
+
+ .. seealso::
+
+ :meth:`int.bit_length` returns the number of bits necessary to represent
+ an integer in binary, excluding the sign and leading zeros.
+
+
.. function:: log10(x)
Return the base-10 logarithm of *x*. This is usually more accurate
diff --git a/Doc/library/mmap.rst b/Doc/library/mmap.rst
index 5f0f004..1a19a7e 100644
--- a/Doc/library/mmap.rst
+++ b/Doc/library/mmap.rst
@@ -196,12 +196,16 @@ To map anonymous memory, -1 should be passed as the fileno along with the length
move will raise a :exc:`TypeError` exception.
- .. method:: read(num)
+ .. method:: read([n])
- Return a :class:`bytes` containing up to *num* bytes starting from the
- current file position; the file position is updated to point after the
- bytes that were returned.
+ Return a :class:`bytes` containing up to *n* bytes starting from the
+ current file position. If the argument is omitted, *None* or negative,
+ return all bytes from the current file position to the end of the
+ mapping. The file position is updated to point after the bytes that were
+ returned.
+ .. versionchanged:: 3.3
+ Argument can be omitted or *None*.
.. method:: read_byte()
diff --git a/Doc/library/msvcrt.rst b/Doc/library/msvcrt.rst
index 889a0c5..9d23720 100644
--- a/Doc/library/msvcrt.rst
+++ b/Doc/library/msvcrt.rst
@@ -20,6 +20,11 @@ api. The normal API deals only with ASCII characters and is of limited use
for internationalized applications. The wide char API should be used where
ever possible
+.. versionchanged:: 3.3
+ Operations in this module now raise :exc:`OSError` where :exc:`IOError`
+ was raised.
+
+
.. _msvcrt-files:
File Operations
@@ -29,7 +34,7 @@ File Operations
.. function:: locking(fd, mode, nbytes)
Lock part of a file based on file descriptor *fd* from the C runtime. Raises
- :exc:`IOError` on failure. The locked region of the file extends from the
+ :exc:`OSError` on failure. The locked region of the file extends from the
current file position for *nbytes* bytes, and may continue beyond the end of the
file. *mode* must be one of the :const:`LK_\*` constants listed below. Multiple
regions in a file may be locked at the same time, but may not overlap. Adjacent
@@ -41,13 +46,13 @@ File Operations
Locks the specified bytes. If the bytes cannot be locked, the program
immediately tries again after 1 second. If, after 10 attempts, the bytes cannot
- be locked, :exc:`IOError` is raised.
+ be locked, :exc:`OSError` is raised.
.. data:: LK_NBLCK
LK_NBRLCK
- Locks the specified bytes. If the bytes cannot be locked, :exc:`IOError` is
+ Locks the specified bytes. If the bytes cannot be locked, :exc:`OSError` is
raised.
@@ -73,7 +78,7 @@ File Operations
.. function:: get_osfhandle(fd)
- Return the file handle for the file descriptor *fd*. Raises :exc:`IOError` if
+ Return the file handle for the file descriptor *fd*. Raises :exc:`OSError` if
*fd* is not recognized.
@@ -144,4 +149,4 @@ Other Functions
.. function:: heapmin()
Force the :c:func:`malloc` heap to clean itself up and return unused blocks to
- the operating system. On failure, this raises :exc:`IOError`.
+ the operating system. On failure, this raises :exc:`OSError`.
diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst
index 7db2aed..d8e1d92 100644
--- a/Doc/library/multiprocessing.rst
+++ b/Doc/library/multiprocessing.rst
@@ -297,7 +297,7 @@ The :mod:`multiprocessing` package mostly replicates the API of the
:class:`Process` and exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. class:: Process([group[, target[, name[, args[, kwargs]]]]])
+.. class:: Process([group[, target[, name[, args[, kwargs]]]]], *, daemon=None)
Process objects represent activity that is run in a separate process. The
:class:`Process` class has equivalents of all the methods of
@@ -312,13 +312,19 @@ The :mod:`multiprocessing` package mostly replicates the API of the
:sub:`1`,N\ :sub:`2`,...,N\ :sub:`k` is a sequence of integers whose length
is determined by the *generation* of the process. *args* is the argument
tuple for the target invocation. *kwargs* is a dictionary of keyword
- arguments for the target invocation. By default, no arguments are passed to
- *target*.
+ arguments for the target invocation. If provided, the keyword-only *daemon* argument
+ sets the process :attr:`daemon` flag to ``True`` or ``False``. If ``None``
+ (the default), this flag will be inherited from the creating process.
+
+ By default, no arguments are passed to *target*.
If a subclass overrides the constructor, it must make sure it invokes the
base class constructor (:meth:`Process.__init__`) before doing anything else
to the process.
+ .. versionchanged:: 3.3
+ Added the *daemon* argument.
+
.. method:: run()
Method representing the process's activity.
@@ -337,10 +343,9 @@ The :mod:`multiprocessing` package mostly replicates the API of the
.. method:: join([timeout])
- Block the calling thread until the process whose :meth:`join` method is
- called terminates or until the optional timeout occurs.
-
- If *timeout* is ``None`` then there is no timeout.
+ If the optional argument *timeout* is ``None`` (the default), the method
+ blocks until the process whose :meth:`join` method is called terminates.
+ If *timeout* is a positive number, it blocks at most *timeout* seconds.
A process can be joined many times.
@@ -405,6 +410,21 @@ The :mod:`multiprocessing` package mostly replicates the API of the
See :ref:`multiprocessing-auth-keys`.
+ .. attribute:: sentinel
+
+ A numeric handle of a system object which will become "ready" when
+ the process ends.
+
+ You can use this value if you want to wait on several events at
+ once using :func:`multiprocessing.connection.wait`. Otherwise
+ calling :meth:`join()` is simpler.
+
+ On Windows, this is an OS handle usable with the ``WaitForSingleObject``
+ and ``WaitForMultipleObjects`` family of API calls. On Unix, this is
+ a file descriptor usable with primitives from the :mod:`select` module.
+
+ .. versionadded:: 3.3
+
.. method:: terminate()
Terminate the process. On Unix this is done using the ``SIGTERM`` signal;
@@ -464,7 +484,7 @@ primitives like locks.
For passing messages one can use :func:`Pipe` (for a connection between two
processes) or a queue (which allows multiple producers and consumers).
-The :class:`Queue`, :class:`multiprocessing.queues.SimpleQueue` and :class:`JoinableQueue` types are multi-producer,
+The :class:`Queue`, :class:`SimpleQueue` and :class:`JoinableQueue` types are multi-producer,
multi-consumer FIFO queues modelled on the :class:`queue.Queue` class in the
standard library. They differ in that :class:`Queue` lacks the
:meth:`~queue.Queue.task_done` and :meth:`~queue.Queue.join` methods introduced
@@ -610,7 +630,7 @@ For an example of the usage of queues for interprocess communication see
exits -- see :meth:`join_thread`.
-.. class:: multiprocessing.queues.SimpleQueue()
+.. class:: SimpleQueue()
It is a simplified :class:`Queue` type, very close to a locked :class:`Pipe`.
@@ -766,6 +786,9 @@ Connection objects are usually created using :func:`Pipe` -- see also
*timeout* is a number then this specifies the maximum time in seconds to
block. If *timeout* is ``None`` then an infinite timeout is used.
+ Note that multiple connection objects may be polled at once by
+ using :func:`multiprocessing.connection.wait`.
+
.. method:: send_bytes(buffer[, offset[, size]])
Send byte data from an object supporting the buffer interface as a
@@ -784,9 +807,14 @@ Connection objects are usually created using :func:`Pipe` -- see also
to receive and the other end has closed.
If *maxlength* is specified and the message is longer than *maxlength*
- then :exc:`IOError` is raised and the connection will no longer be
+ then :exc:`OSError` is raised and the connection will no longer be
readable.
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`IOError`, which is now an
+ alias of :exc:`OSError`.
+
+
.. method:: recv_bytes_into(buffer[, offset])
Read into *buffer* a complete message of byte data sent from the other end
@@ -1657,6 +1685,24 @@ with the :class:`Pool` class.
returned iterator should be considered arbitrary. (Only when there is
only one worker process is the order guaranteed to be "correct".)
+ .. method:: starmap(func, iterable[, chunksize])
+
+ Like :meth:`map` except that the elements of the `iterable` are expected
+ to be iterables that are unpacked as arguments.
+
+ Hence an `iterable` of `[(1,2), (3, 4)]` results in `[func(1,2),
+ func(3,4)]`.
+
+ .. versionadded:: 3.3
+
+ .. method:: starmap_async(func, iterable[, chunksize[, callback[, error_back]]])
+
+ A combination of :meth:`starmap` and :meth:`map_async` that iterates over
+ `iterable` of iterables and calls `func` with the iterables unpacked.
+ Returns a result object.
+
+ .. versionadded:: 3.3
+
.. method:: close()
Prevents any more tasks from being submitted to the pool. Once all the
@@ -1737,8 +1783,9 @@ Usually message passing between processes is done using queues or by using
However, the :mod:`multiprocessing.connection` module allows some extra
flexibility. It basically gives a high level message oriented API for dealing
-with sockets or Windows named pipes, and also has support for *digest
-authentication* using the :mod:`hmac` module.
+with sockets or Windows named pipes. It also has support for *digest
+authentication* using the :mod:`hmac` module, and for polling
+multiple connections at the same time.
.. function:: deliver_challenge(connection, authkey)
@@ -1836,6 +1883,38 @@ authentication* using the :mod:`hmac` module.
The address from which the last accepted connection came. If this is
unavailable then it is ``None``.
+.. function:: wait(object_list, timeout=None)
+
+ Wait till an object in *object_list* is ready. Returns the list of
+ those objects in *object_list* which are ready. If *timeout* is a
+ float then the call blocks for at most that many seconds. If
+ *timeout* is ``None`` then it will block for an unlimited period.
+
+ For both Unix and Windows, an object can appear in *object_list* if
+ it is
+
+ * a readable :class:`~multiprocessing.Connection` object;
+ * a connected and readable :class:`socket.socket` object; or
+ * the :attr:`~multiprocessing.Process.sentinel` attribute of a
+ :class:`~multiprocessing.Process` object.
+
+ A connection or socket object is ready when there is data available
+ to be read from it, or the other end has been closed.
+
+ **Unix**: ``wait(object_list, timeout)`` almost equivalent
+ ``select.select(object_list, [], [], timeout)``. The difference is
+ that, if :func:`select.select` is interrupted by a signal, it can
+ raise :exc:`OSError` with an error number of ``EINTR``, whereas
+ :func:`wait` will not.
+
+ **Windows**: An item in *object_list* must either be an integer
+ handle which is waitable (according to the definition used by the
+ documentation of the Win32 function ``WaitForMultipleObjects()``)
+ or it can be an object with a :meth:`fileno` method which returns a
+ socket handle or pipe handle. (Note that pipe handles and socket
+ handles are **not** waitable handles.)
+
+ .. versionadded:: 3.3
The module defines two exceptions:
@@ -1887,6 +1966,41 @@ server::
conn.close()
+The following code uses :func:`~multiprocessing.connection.wait` to
+wait for messages from multiple processes at once::
+
+ import time, random
+ from multiprocessing import Process, Pipe, current_process
+ from multiprocessing.connection import wait
+
+ def foo(w):
+ for i in range(10):
+ w.send((i, current_process().name))
+ w.close()
+
+ if __name__ == '__main__':
+ readers = []
+
+ for i in range(4):
+ r, w = Pipe(duplex=False)
+ readers.append(r)
+ p = Process(target=foo, args=(w,))
+ p.start()
+ # We close the writable end of the pipe now to be sure that
+ # p is the only process which owns a handle for it. This
+ # ensures that when p closes its handle for the writable end,
+ # wait() will promptly report the readable end as being ready.
+ w.close()
+
+ while readers:
+ for r in wait(readers):
+ try:
+ msg = r.recv()
+ except EOFError:
+ readers.remove(r)
+ else:
+ print(msg)
+
.. _multiprocessing-address-formats:
diff --git a/Doc/library/nntplib.rst b/Doc/library/nntplib.rst
index ef507e1..62da72c 100644
--- a/Doc/library/nntplib.rst
+++ b/Doc/library/nntplib.rst
@@ -70,10 +70,23 @@ The module itself defines the following classes:
connecting to an NNTP server on the local machine and intend to call
reader-specific commands, such as ``group``. If you get unexpected
:exc:`NNTPPermanentError`\ s, you might need to set *readermode*.
+ :class:`NNTP` class supports the :keyword:`with` statement to
+ unconditionally consume :exc:`socket.error` exceptions and to close the NNTP
+ connection when done. Here is a sample on how using it:
+
+ >>> from nntplib import NNTP
+ >>> with NNTP('news.gmane.org') as n:
+ ... n.group('gmane.comp.python.committers')
+ ...
+ ('211 1755 1 1755 gmane.comp.python.committers', 1755, 1, 1755, 'gmane.comp.python.committers')
+ >>>
+
.. versionchanged:: 3.2
*usenetrc* is now False by default.
+ .. versionchanged:: 3.3
+ Support for the :keyword:`with` statement was added.
.. class:: NNTP_SSL(host, port=563, user=None, password=None, ssl_context=None, readermode=None, usenetrc=False, [timeout])
@@ -504,6 +517,9 @@ them have been superseded by newer commands in :rfc:`3977`.
article with message ID *id*. Most of the time, this extension is not
enabled by NNTP server administrators.
+ .. deprecated:: 3.3
+ The XPATH extension is not actively used.
+
.. XXX deprecated:
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index be322a0..74b89b8 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -221,6 +221,17 @@ process and user.
Availability: Unix.
+.. function:: getgrouplist(user, group)
+
+ Return list of group ids that *user* belongs to. If *group* is not in the
+ list, it is included; typically, *group* is specified as the group ID
+ field from the password record for *user*.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: getgroups()
Return list of supplemental group ids associated with the current process.
@@ -288,6 +299,22 @@ process and user.
.. versionchanged:: 3.2
Added support for Windows.
+.. function:: getpriority(which, who)
+
+ .. index:: single: process; scheduling priority
+
+ Get program scheduling priority. The value *which* is one of
+ :const:`PRIO_PROCESS`, :const:`PRIO_PGRP`, or :const:`PRIO_USER`, and *who*
+ is interpreted relative to *which* (a process identifier for
+ :const:`PRIO_PROCESS`, process group identifier for :const:`PRIO_PGRP`, and a
+ user ID for :const:`PRIO_USER`). A zero value for *who* denotes
+ (respectively) the calling process, the process group of the calling process,
+ or the real user ID of the calling process.
+
+ Availability: Unix
+
+ .. versionadded:: 3.3
+
.. function:: getresuid()
Return a tuple (ruid, euid, suid) denoting the current process's
@@ -338,6 +365,15 @@ process and user.
.. versionadded:: 3.2
+.. data:: PRIO_PROCESS
+ PRIO_PGRP
+ PRIO_USER
+
+ Parameters for :func:`getpriority` and :func:`setpriority` functions.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
.. function:: putenv(key, value)
@@ -407,6 +443,25 @@ process and user.
Availability: Unix.
+.. function:: setpriority(which, who, priority)
+
+ .. index:: single: process; scheduling priority
+
+ Set program scheduling priority. The value *which* is one of
+ :const:`PRIO_PROCESS`, :const:`PRIO_PGRP`, or :const:`PRIO_USER`, and *who*
+ is interpreted relative to *which* (a process identifier for
+ :const:`PRIO_PROCESS`, process group identifier for :const:`PRIO_PGRP`, and a
+ user ID for :const:`PRIO_USER`). A zero value for *who* denotes
+ (respectively) the calling process, the process group of the calling process,
+ or the real user ID of the calling process.
+ *priority* is a value in the range -20 to 19. The default priority is 0;
+ lower priorities cause more favorable scheduling.
+
+ Availability: Unix
+
+ .. versionadded:: 3.3
+
+
.. function:: setregid(rgid, egid)
Set the current process's real and effective group ids.
@@ -536,7 +591,8 @@ These functions create new :term:`file objects <file object>`. (See also :func:`
the built-in :func:`open` function.
When specified, the *mode* argument must start with one of the letters
- ``'r'``, ``'w'``, or ``'a'``, otherwise a :exc:`ValueError` is raised.
+ ``'r'``, ``'w'``, ``'x'`` or ``'a'``, otherwise a :exc:`ValueError` is
+ raised.
On Unix, when the *mode* argument starts with ``'a'``, the *O_APPEND* flag is
set on the file descriptor (which the :c:func:`fdopen` implementation already
@@ -544,6 +600,8 @@ These functions create new :term:`file objects <file object>`. (See also :func:`
Availability: Unix, Windows.
+ .. versionchanged:: 3.3
+ The ``'x'`` mode was added.
.. _os-fd-ops:
@@ -564,6 +622,21 @@ associated with a :term:`file object` when required. Note that using the file
descriptor directly will bypass the file object methods, ignoring aspects such
as internal buffering of data.
+.. data:: AT_SYMLINK_NOFOLLOW
+ AT_EACCESS
+ AT_FDCWD
+ AT_REMOVEDIR
+ AT_SYMLINK_FOLLOW
+ UTIME_NOW
+ UTIME_OMIT
+
+ These parameters are used as flags to the \*at family of functions.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: close(fd)
Close file descriptor *fd*.
@@ -612,6 +685,19 @@ as internal buffering of data.
Availability: Unix, Windows.
+.. function:: faccessat(dirfd, path, mode, flags=0)
+
+ Like :func:`access` but if *path* is relative, it is taken as relative to *dirfd*.
+ *flags* is optional and can be constructed by ORing together zero or more
+ of these values: :data:`AT_SYMLINK_NOFOLLOW`, :data:`AT_EACCESS`.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: fchmod(fd, mode)
Change the mode of the file given by *fd* to the numeric *mode*. See the docs
@@ -620,6 +706,18 @@ as internal buffering of data.
Availability: Unix.
+.. function:: fchmodat(dirfd, path, mode, flags=0)
+
+ Like :func:`chmod` but if *path* is relative, it is taken as relative to *dirfd*.
+ *flags* is optional and may be 0 or :data:`AT_SYMLINK_NOFOLLOW`.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: fchown(fd, uid, gid)
Change the owner and group id of the file given by *fd* to the numeric *uid*
@@ -628,6 +726,18 @@ as internal buffering of data.
Availability: Unix.
+.. function:: fchownat(dirfd, path, uid, gid, flags=0)
+
+ Like :func:`chown` but if *path* is relative, it is taken as relative to *dirfd*.
+ *flags* is optional and may be 0 or :data:`AT_SYMLINK_NOFOLLOW`.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: fdatasync(fd)
Force write of file with filedescriptor *fd* to disk. Does not force update of
@@ -639,6 +749,47 @@ as internal buffering of data.
This function is not available on MacOS.
+.. function:: fgetxattr(fd, attr)
+
+ This works exactly like :func:`getxattr` but operates on a file descriptor,
+ *fd*, instead of a path.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
+.. function:: flistxattr(fd)
+
+ This is exactly like :func:`listxattr` but operates on a file descriptor,
+ *fd*, instead of a path.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
+.. function:: flistdir(fd)
+
+ Like :func:`listdir`, but uses a file descriptor instead and always returns
+ strings.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: fexecve(fd, args, env)
+
+ Execute the program specified by a file descriptor *fd* with arguments given
+ by *args* and environment given by *env*, replacing the current process.
+ *args* and *env* are given as in :func:`execve`.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: fpathconf(fd, name)
Return system configuration information relevant to an open file. *name*
@@ -663,6 +814,17 @@ as internal buffering of data.
Availability: Unix, Windows.
+.. function:: fstatat(dirfd, path, flags=0)
+
+ Like :func:`stat` but if *path* is relative, it is taken as relative to *dirfd*.
+ *flags* is optional and may be 0 or :data:`AT_SYMLINK_NOFOLLOW`.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
.. function:: fstatvfs(fd)
@@ -692,6 +854,80 @@ as internal buffering of data.
Availability: Unix.
+.. function:: fremovexattr(fd, attr)
+
+ This works exactly like :func:`removexattr` but operates on a file
+ descriptor, *fd*, instead of a path.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
+.. function:: fsetxattr(fd, attr, value, flags=0)
+
+ This works exactly like :func:`setxattr` but on a file descriptor, *fd*,
+ instead of a path.
+
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
+.. function:: futimesat(dirfd, path[, times])
+
+ Like :func:`utime` but if *path* is relative, it is taken as relative to *dirfd*.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory. *times* must be a
+ 2-tuple of numbers, of the form ``(atime, mtime)``, or None.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: futimens(fd[, atimes, mtimes])
+
+ Updates the timestamps of a file specified by the file descriptor *fd*, with
+ nanosecond precision.
+ If no second argument is given, set *atime* and *mtime* to the current time.
+ *atimes* and *mtimes* must be 2-tuples of numbers, of the form
+ ``(atime_sec, atime_nsec)`` and ``(mtime_sec, mtime_nsec)`` respectively,
+ or ``None``.
+ If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_NOW`, the corresponding
+ timestamp is updated to the current time.
+ If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_OMIT`, the corresponding
+ timestamp is not updated.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. data:: UTIME_NOW
+ UTIME_OMIT
+
+ Flags used with :func:`futimens` to specify that the timestamp must be
+ updated either to the current time or not updated at all.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: futimes(fd[, times])
+
+ Set the access and modified time of the file specified by the file
+ descriptor *fd* to the given values. *atimes* must be a 2-tuple of numbers,
+ of the form ``(atime, mtime)``, or None. If no second argument is used,
+ set the access and modified times to the current time.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: isatty(fd)
Return ``True`` if the file descriptor *fd* is open and connected to a
@@ -700,6 +936,44 @@ as internal buffering of data.
Availability: Unix.
+.. function:: linkat(srcfd, srcpath, dstfd, dstpath, flags=0)
+
+ Like :func:`link` but if *srcpath* is relative, it is taken as relative to *srcfd*
+ and if *dstpath* is relative, it is taken as relative to *dstfd*.
+ *flags* is optional and may be 0 or :data:`AT_SYMLINK_FOLLOW`.
+ If *srcpath* is relative and *srcfd* is the special value :data:`AT_FDCWD`, then
+ *srcpath* is interpreted relative to the current working directory. This
+ also applies for *dstpath*.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: lockf(fd, cmd, len)
+
+ Apply, test or remove a POSIX lock on an open file descriptor.
+ *fd* is an open file descriptor.
+ *cmd* specifies the command to use - one of :data:`F_LOCK`, :data:`F_TLOCK`,
+ :data:`F_ULOCK` or :data:`F_TEST`.
+ *len* specifies the section of the file to lock.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. data:: F_LOCK
+ F_TLOCK
+ F_ULOCK
+ F_TEST
+
+ Flags that specify what action :func:`lockf` will take.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
.. function:: lseek(fd, pos, how)
Set the current position of file descriptor *fd* to position *pos*, modified
@@ -719,6 +993,39 @@ as internal buffering of data.
respectively. Availability: Windows, Unix.
+.. function:: mkdirat(dirfd, path, mode=0o777)
+
+ Like :func:`mkdir` but if *path* is relative, it is taken as relative to *dirfd*.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: mkfifoat(dirfd, path, mode=0o666)
+
+ Like :func:`mkfifo` but if *path* is relative, it is taken as relative to *dirfd*.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: mknodat(dirfd, path, mode=0o600, device=0)
+
+ Like :func:`mknod` but if *path* is relative, it is taken as relative to *dirfd*.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: open(file, flags[, mode])
Open the file *file* and set various flags according to *flags* and possibly
@@ -741,6 +1048,17 @@ as internal buffering of data.
wrap a file descriptor in a file object, use :func:`fdopen`.
+.. function:: openat(dirfd, path, flags, mode=0o777)
+
+ Like :func:`open` but if *path* is relative, it is taken as relative to *dirfd*.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: openpty()
.. index:: module: pty
@@ -760,6 +1078,79 @@ as internal buffering of data.
Availability: Unix, Windows.
+.. function:: pipe2(flags)
+
+ Create a pipe with *flags* set atomically.
+ *flags* can be constructed by ORing together one or more of these values:
+ :data:`O_NONBLOCK`, :data:`O_CLOEXEC`.
+ Return a pair of file descriptors ``(r, w)`` usable for reading and writing,
+ respectively.
+
+ Availability: some flavors of Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: posix_fallocate(fd, offset, len)
+
+ Ensures that enough disk space is allocated for the file specified by *fd*
+ starting from *offset* and continuing for *len* bytes.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: posix_fadvise(fd, offset, len, advice)
+
+ Announces an intention to access data in a specific pattern thus allowing
+ the kernel to make optimizations.
+ The advice applies to the region of the file specified by *fd* starting at
+ *offset* and continuing for *len* bytes.
+ *advice* is one of :data:`POSIX_FADV_NORMAL`, :data:`POSIX_FADV_SEQUENTIAL`,
+ :data:`POSIX_FADV_RANDOM`, :data:`POSIX_FADV_NOREUSE`,
+ :data:`POSIX_FADV_WILLNEED` or :data:`POSIX_FADV_DONTNEED`.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. data:: POSIX_FADV_NORMAL
+ POSIX_FADV_SEQUENTIAL
+ POSIX_FADV_RANDOM
+ POSIX_FADV_NOREUSE
+ POSIX_FADV_WILLNEED
+ POSIX_FADV_DONTNEED
+
+ Flags that can be used in *advice* in :func:`posix_fadvise` that specify
+ the access pattern that is likely to be used.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: pread(fd, buffersize, offset)
+
+ Read from a file descriptor, *fd*, at a position of *offset*. It will read up
+ to *buffersize* number of bytes. The file offset remains unchanged.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: pwrite(fd, string, offset)
+
+ Write *string* to a file descriptor, *fd*, from *offset*, leaving the file
+ offset unchanged.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: read(fd, n)
Read at most *n* bytes from file descriptor *fd*. Return a bytestring containing the
@@ -777,6 +1168,93 @@ as internal buffering of data.
:meth:`~file.readline` methods.
+.. function:: sendfile(out, in, offset, nbytes)
+ sendfile(out, in, offset, nbytes, headers=None, trailers=None, flags=0)
+
+ Copy *nbytes* bytes from file descriptor *in* to file descriptor *out*
+ starting at *offset*.
+ Return the number of bytes sent. When EOF is reached return 0.
+
+ The first function notation is supported by all platforms that define
+ :func:`sendfile`.
+
+ On Linux, if *offset* is given as ``None``, the bytes are read from the
+ current position of *in* and the position of *in* is updated.
+
+ The second case may be used on Mac OS X and FreeBSD where *headers* and
+ *trailers* are arbitrary sequences of buffers that are written before and
+ after the data from *in* is written. It returns the same as the first case.
+
+ On Mac OS X and FreeBSD, a value of 0 for *nbytes* specifies to send until
+ the end of *in* is reached.
+
+ On Solaris, *out* may be the file descriptor of a regular file or the file
+ descriptor of a socket. On all other platforms, *out* must be the file
+ descriptor of an open socket.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. data:: SF_NODISKIO
+ SF_MNOWAIT
+ SF_SYNC
+
+ Parameters to the :func:`sendfile` function, if the implementation supports
+ them.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: readlinkat(dirfd, path)
+
+ Like :func:`readlink` but if *path* is relative, it is taken as relative to *dirfd*.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: renameat(olddirfd, oldpath, newdirfd, newpath)
+
+ Like :func:`rename` but if *oldpath* is relative, it is taken as relative to
+ *olddirfd* and if *newpath* is relative, it is taken as relative to *newdirfd*.
+ If *oldpath* is relative and *olddirfd* is the special value :data:`AT_FDCWD`, then
+ *oldpath* is interpreted relative to the current working directory. This
+ also applies for *newpath*.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: symlinkat(src, dstfd, dst)
+
+ Like :func:`symlink` but if *dst* is relative, it is taken as relative to *dstfd*.
+ If *dst* is relative and *dstfd* is the special value :data:`AT_FDCWD`, then *dst*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: readv(fd, buffers)
+
+ Read from a file descriptor into a number of writable buffers. *buffers* is
+ an arbitrary sequence of writable buffers. Returns the total number of bytes
+ read.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: tcgetpgrp(fd)
Return the process group associated with the terminal given by *fd* (an open
@@ -802,6 +1280,38 @@ as internal buffering of data.
Availability: Unix.
+.. function:: unlinkat(dirfd, path, flags=0)
+
+ Like :func:`unlink` but if *path* is relative, it is taken as relative to *dirfd*.
+ *flags* is optional and may be 0 or :data:`AT_REMOVEDIR`. If :data:`AT_REMOVEDIR` is
+ specified, :func:`unlinkat` behaves like :func:`rmdir`.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: utimensat(dirfd, path[, atime=(atime_sec, atime_nsec), mtime=(mtime_sec, mtime_nsec), flags=0])
+
+ Updates the timestamps of a file with nanosecond precision.
+ The *atime* and *mtime* tuples default to ``None``, which sets those
+ values to the current time.
+ If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_NOW`, the corresponding
+ timestamp is updated to the current time.
+ If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_OMIT`, the corresponding
+ timestamp is not updated.
+ If *path* is relative, it is taken as relative to *dirfd*.
+ *flags* is optional and may be 0 (the default) or :data:`AT_SYMLINK_NOFOLLOW`.
+ If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path*
+ is interpreted relative to the current working directory.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: write(fd, str)
Write the bytestring in *str* to file descriptor *fd*. Return the number of
@@ -818,6 +1328,17 @@ as internal buffering of data.
:meth:`~file.write` method.
+.. function:: writev(fd, buffers)
+
+ Write the contents of *buffers* to file descriptor *fd*, where *buffers*
+ is an arbitrary sequence of buffers.
+ Returns the total number of bytes written.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. _open-constants:
``open()`` flag constants
@@ -849,9 +1370,12 @@ or `the MSDN <http://msdn.microsoft.com/en-us/library/z0kc8e3z.aspx>`_ on Window
O_NOCTTY
O_SHLOCK
O_EXLOCK
+ O_CLOEXEC
These constants are only available on Unix.
+ .. versionchanged:: 3.3
+ Add :data:`O_CLOEXEC` constant.
.. data:: O_BINARY
O_NOINHERIT
@@ -874,6 +1398,56 @@ or `the MSDN <http://msdn.microsoft.com/en-us/library/z0kc8e3z.aspx>`_ on Window
the C library.
+.. data:: RTLD_LAZY
+ RTLD_NOW
+ RTLD_GLOBAL
+ RTLD_LOCAL
+ RTLD_NODELETE
+ RTLD_NOLOAD
+ RTLD_DEEPBIND
+
+ See the Unix manual page :manpage:`dlopen(3)`.
+
+ .. versionadded:: 3.3
+
+
+.. _terminal-size:
+
+Querying the size of a terminal
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionadded:: 3.3
+
+.. function:: get_terminal_size(fd=STDOUT_FILENO)
+
+ Return the size of the terminal window as ``(columns, lines)``,
+ tuple of type :class:`terminal_size`.
+
+ The optional argument ``fd`` (default ``STDOUT_FILENO``, or standard
+ output) specifies which file descriptor should be queried.
+
+ If the file descriptor is not connected to a terminal, an :exc:`OSError`
+ is thrown.
+
+ :func:`shutil.get_terminal_size` is the high-level function which
+ should normally be used, ``os.get_terminal_size`` is the low-level
+ implementation.
+
+ Availability: Unix, Windows.
+
+.. class:: terminal_size(tuple)
+
+ A tuple of ``(columns, lines)`` for holding terminal window size.
+
+ .. attribute:: columns
+
+ Width of the terminal window in characters.
+
+ .. attribute:: lines
+
+ Height of the terminal window in characters.
+
+
.. _os-file-dir:
Files and Directories
@@ -909,11 +1483,8 @@ Files and Directories
try:
fp = open("myfile")
- except IOError as e:
- if e.errno == errno.EACCES:
- return "some default data"
- # Not a permission error.
- raise
+ except PermissionError:
+ return "some default data"
else:
with fp:
return fp.read()
@@ -1049,9 +1620,23 @@ Files and Directories
Change the owner and group id of *path* to the numeric *uid* and *gid*. To leave
one of the ids unchanged, set it to -1.
+ See :func:`shutil.chown` for a higher-level function that accepts names in
+ addition to numeric ids.
+
Availability: Unix.
+.. function:: getxattr(path, attr)
+
+ Return the value of the extended filesystem attribute *attr* for
+ *path*. *attr* can be bytes or str. If it is str, it is encoded with the
+ filesystem encoding.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
.. function:: lchflags(path, flags)
Set the flags of *path* to the numeric *flags*, like :func:`chflags`, but do not
@@ -1077,6 +1662,15 @@ Files and Directories
Availability: Unix.
+.. function:: lgetxattr(path, attr)
+
+ This works exactly like :func:`getxattr` but doesn't follow symlinks.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
.. function:: link(source, link_name)
Create a hard link pointing to *source* named *link_name*.
@@ -1101,6 +1695,44 @@ Files and Directories
.. versionchanged:: 3.2
The *path* parameter became optional.
+
+.. function:: listxattr(path)
+
+ Return a list of the extended filesystem attributes on *path*. Attributes are
+ returned as string decoded with the filesystem encoding.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
+.. function:: llistxattr(path)
+
+ This works exactly like :func:`listxattr` but doesn't follow symlinks.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
+.. function:: lremovexattr(path, attr)
+
+ This works exactly like :func:`removexattr` but doesn't follow symlinks.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
+.. function:: lsetxattr(path, attr, value, flags=0)
+
+ This works exactly like :func:`setxattr` but doesn't follow symlinks.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
.. function:: lstat(path)
Perform the equivalent of an :c:func:`lstat` system call on the given path.
@@ -1112,6 +1744,18 @@ Files and Directories
Added support for Windows 6.0 (Vista) symbolic links.
+.. function:: lutimes(path[, times])
+
+ Like :func:`utime`, but if *path* is a symbolic link, it is not
+ dereferenced. *times* must be a 2-tuple of numbers, of the form
+ ``(atime, mtime)``, or None.
+
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: mkfifo(path[, mode])
Create a FIFO (a named pipe) named *path* with numeric mode *mode*. The
@@ -1263,6 +1907,17 @@ Files and Directories
successfully removed.
+.. function:: removexattr(path, attr)
+
+ Removes the extended filesystem attribute *attr* from *path*. *attr* should
+ be bytes or str. If it is a string, it is encoded with the filesystem
+ encoding.
+
+ Availability: Linux
+
+ .. versionadded:: 3.3
+
+
.. function:: rename(src, dst)
Rename the file or directory *src* to *dst*. If *dst* is a directory,
@@ -1271,8 +1926,9 @@ Files and Directories
Unix flavors if *src* and *dst* are on different filesystems. If successful,
the renaming will be an atomic operation (this is a POSIX requirement). On
Windows, if *dst* already exists, :exc:`OSError` will be raised even if it is a
- file; there may be no way to implement an atomic rename when *dst* names an
- existing file.
+ file.
+
+ If you want cross-platform overwriting of the destination, use :func:`replace`.
Availability: Unix, Windows.
@@ -1290,6 +1946,19 @@ Files and Directories
permissions needed to remove the leaf directory or file.
+.. function:: replace(src, dst)
+
+ Rename the file or directory *src* to *dst*. If *dst* is a directory,
+ :exc:`OSError` will be raised. If *dst* exists and is a file, it will
+ be replaced silently if the user has permission. The operation may fail
+ if *src* and *dst* are on different filesystems. If successful,
+ the renaming will be an atomic operation (this is a POSIX requirement).
+
+ Availability: Unix, Windows
+
+ .. versionadded:: 3.3
+
+
.. function:: rmdir(path)
Remove (delete) the directory *path*. Only works when the directory is
@@ -1299,6 +1968,44 @@ Files and Directories
Availability: Unix, Windows.
+.. data:: XATTR_SIZE_MAX
+
+ The maximum size the value of an extended attribute can be. Currently, this
+ is 64 kilobytes on Linux.
+
+
+.. data:: XATTR_CREATE
+
+ This is a possible value for the flags argument in :func:`setxattr`. It
+ indicates the operation must create an attribute.
+
+
+.. data:: XATTR_REPLACE
+
+ This is a possible value for the flags argument in :func:`setxattr`. It
+ indicates the operation must replace an existing attribute.
+
+
+.. function:: setxattr(path, attr, value, flags=0)
+
+ Set the extended filesystem attribute *attr* on *path* to *value*. *attr*
+ must be a bytes or str with no embedded NULs. If it is str, it is encoded
+ with the filesystem encoding. *flags* may be :data:`XATTR_REPLACE` or
+ :data:`XATTR_CREATE`. If :data:`XATTR_REPLACE` is given and the attribute
+ does not exist, ``EEXISTS`` will be raised. If :data:`XATTR_CREATE` is given
+ and the attribute already exists, the attribute will not be created and
+ ``ENODATA`` will be raised.
+
+ Availability: Linux
+
+ .. note::
+
+ A bug in Linux kernel versions less than 2.6.39 caused the flags argument
+ to be ignored on some filesystems.
+
+ .. versionadded:: 3.3
+
+
.. function:: stat(path)
Perform the equivalent of a :c:func:`stat` system call on the given path.
@@ -1453,6 +2160,25 @@ Files and Directories
Added support for Windows 6.0 (Vista) symbolic links.
+.. function:: sync()
+
+ Force write of everything to disk.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: truncate(path, length)
+
+ Truncate the file corresponding to *path*, so that it is at most
+ *length* bytes in size.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. function:: unlink(path)
Remove (delete) the file *path*. This is the same function as
@@ -1462,18 +2188,19 @@ Files and Directories
Availability: Unix, Windows.
-.. function:: utime(path, times)
+.. function:: utime(path[, times])
Set the access and modified times of the file specified by *path*. If *times*
- is ``None``, then the file's access and modified times are set to the current
- time. (The effect is similar to running the Unix program :program:`touch` on
- the path.) Otherwise, *times* must be a 2-tuple of numbers, of the form
- ``(atime, mtime)`` which is used to set the access and modified times,
- respectively. Whether a directory can be given for *path* depends on whether
- the operating system implements directories as files (for example, Windows
- does not). Note that the exact times you set here may not be returned by a
- subsequent :func:`~os.stat` call, depending on the resolution with which your
- operating system records access and modification times; see :func:`~os.stat`.
+ is ``None`` or not specified, then the file's access and modified times are
+ set to the current time. (The effect is similar to running the Unix program
+ :program:`touch` on the path.) Otherwise, *times* must be a 2-tuple of
+ numbers, of the form ``(atime, mtime)`` which is used to set the access and
+ modified times, respectively. Whether a directory can be given for *path*
+ depends on whether the operating system implements directories as files
+ (for example, Windows does not). Note that the exact times you set here may
+ not be returned by a subsequent :func:`~os.stat` call, depending on the
+ resolution with which your operating system records access and modification
+ times; see :func:`~os.stat`.
Availability: Unix, Windows.
@@ -1561,6 +2288,57 @@ Files and Directories
os.rmdir(os.path.join(root, name))
+.. function:: fwalk(top, topdown=True, onerror=None, followlinks=False)
+
+ .. index::
+ single: directory; walking
+ single: directory; traversal
+
+ This behaves exactly like :func:`walk`, except that it yields a 4-tuple
+ ``(dirpath, dirnames, filenames, dirfd)``.
+
+ *dirpath*, *dirnames* and *filenames* are identical to :func:`walk` output,
+ and *dirfd* is a file descriptor referring to the directory *dirpath*.
+
+ .. note::
+
+ Since :func:`fwalk` yields file descriptors, those are only valid until
+ the next iteration step, so you should duplicate them (e.g. with
+ :func:`dup`) if you want to keep them longer.
+
+ This example displays the number of bytes taken by non-directory files in each
+ directory under the starting directory, except that it doesn't look under any
+ CVS subdirectory::
+
+ import os
+ for root, dirs, files, rootfd in os.fwalk('python/Lib/email'):
+ print(root, "consumes", end="")
+ print(sum([os.fstatat(rootfd, name).st_size for name in files]),
+ end="")
+ print("bytes in", len(files), "non-directory files")
+ if 'CVS' in dirs:
+ dirs.remove('CVS') # don't visit CVS directories
+
+ In the next example, walking the tree bottom-up is essential:
+ :func:`unlinkat` doesn't allow deleting a directory before the directory is
+ empty::
+
+ # Delete everything reachable from the directory named in "top",
+ # assuming there are no symbolic links.
+ # CAUTION: This is dangerous! For example, if top == '/', it
+ # could delete all your disk files.
+ import os
+ for root, dirs, files, rootfd in os.fwalk(top, topdown=False):
+ for name in files:
+ os.unlinkat(rootfd, name)
+ for name in dirs:
+ os.unlinkat(rootfd, name, os.AT_REMOVEDIR)
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. _os-process:
Process Management
@@ -1825,6 +2603,8 @@ written in Python, such as a mail server's external command delivery program.
will be set to *sig*. The Windows version of :func:`kill` additionally takes
process handles to be killed.
+ See also :func:`signal.pthread_kill`.
+
.. versionadded:: 3.2
Windows support.
@@ -2035,6 +2815,58 @@ written in Python, such as a mail server's external command delivery program.
Availability: Unix.
+.. function:: waitid(idtype, id, options)
+
+ Wait for the completion of one or more child processes.
+ *idtype* can be :data:`P_PID`, :data:`P_PGID` or :data:`P_ALL`.
+ *id* specifies the pid to wait on.
+ *options* is constructed from the ORing of one or more of :data:`WEXITED`,
+ :data:`WSTOPPED` or :data:`WCONTINUED` and additionally may be ORed with
+ :data:`WNOHANG` or :data:`WNOWAIT`. The return value is an object
+ representing the data contained in the :c:type:`siginfo_t` structure, namely:
+ :attr:`si_pid`, :attr:`si_uid`, :attr:`si_signo`, :attr:`si_status`,
+ :attr:`si_code` or ``None`` if :data:`WNOHANG` is specified and there are no
+ children in a waitable state.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+.. data:: P_PID
+ P_PGID
+ P_ALL
+
+ These are the possible values for *idtype* in :func:`waitid`. They affect
+ how *id* is interpreted.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+.. data:: WEXITED
+ WSTOPPED
+ WNOWAIT
+
+ Flags that can be used in *options* in :func:`waitid` that specify what
+ child signal to wait for.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. data:: CLD_EXITED
+ CLD_DUMPED
+ CLD_TRAPPED
+ CLD_CONTINUED
+
+ These are the possible values for :attr:`si_code` in the result returned by
+ :func:`waitid`.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
.. function:: waitpid(pid, options)
@@ -2176,6 +3008,155 @@ used to determine the disposition of a process.
Availability: Unix.
+Interface to the scheduler
+--------------------------
+
+These functions control how a process is allocated CPU time by the operating
+system. They are only available on some Unix platforms. For more detailed
+information, consult your Unix manpages.
+
+.. versionadded:: 3.3
+
+The following scheduling policies are exposed if they are a supported by the
+operating system.
+
+.. data:: SCHED_OTHER
+
+ The default scheduling policy.
+
+.. data:: SCHED_BATCH
+
+ Scheduling policy for CPU-intensive processes that tries to preserve
+ interactivity on the rest of the computer.
+
+.. data:: SCHED_IDLE
+
+ Scheduling policy for extremely low priority background tasks.
+
+.. data:: SCHED_SPORADIC
+
+ Scheduling policy for sporadic server programs.
+
+.. data:: SCHED_FIFO
+
+ A First In First Out scheduling policy.
+
+.. data:: SCHED_RR
+
+ A round-robin scheduling policy.
+
+.. data:: SCHED_RESET_ON_FORK
+
+ This flag can OR'ed with any other scheduling policy. When a process with
+ this flag set forks, its child's scheduling policy and priority are reset to
+ the default.
+
+
+.. class:: sched_param(sched_priority)
+
+ This class represents tunable scheduling parameters used in
+ :func:`sched_setparam`, :func:`sched_setscheduler`, and
+ :func:`sched_getparam`. It is immutable.
+
+ At the moment, there is only one possible parameter:
+
+ .. attribute:: sched_priority
+
+ The scheduling priority for a scheduling policy.
+
+
+.. function:: sched_get_priority_min(policy)
+
+ Get the minimum priority value for *policy*. *policy* is one of the
+ scheduling policy constants above.
+
+
+.. function:: sched_get_priority_max(policy)
+
+ Get the maximum priority value for *policy*. *policy* is one of the
+ scheduling policy constants above.
+
+
+.. function:: sched_setscheduler(pid, policy, param)
+
+ Set the scheduling policy for the process with PID *pid*. A *pid* of 0 means
+ the calling process. *policy* is one of the scheduling policy constants
+ above. *param* is a :class:`sched_param` instance.
+
+
+.. function:: sched_getscheduler(pid)
+
+ Return the scheduling policy for the process with PID *pid*. A *pid* of 0
+ means the calling process. The result is one of the scheduling policy
+ constants above.
+
+
+.. function:: sched_setparam(pid, param)
+
+ Set a scheduling parameters for the process with PID *pid*. A *pid* of 0 means
+ the calling process. *param* is a :class:`sched_param` instance.
+
+
+.. function:: sched_getparam(pid)
+
+ Return the scheduling parameters as a :class:`sched_param` instance for the
+ process with PID *pid*. A *pid* of 0 means the calling process.
+
+
+.. function:: sched_rr_get_interval(pid)
+
+ Return the round-robin quantum in seconds for the process with PID *pid*. A
+ *pid* of 0 means the calling process.
+
+
+.. function:: sched_yield()
+
+ Voluntarily relinquish the CPU.
+
+
+.. class:: cpu_set(ncpus)
+
+ :class:`cpu_set` represents a set of CPUs on which a process is eligible to
+ run. *ncpus* is the number of CPUs the set should describe. Methods on
+ :class:`cpu_set` allow CPUs to be add or removed.
+
+ :class:`cpu_set` supports the AND, OR, and XOR bitwise operations. For
+ example, given two cpu_sets, ``one`` and ``two``, ``one | two`` returns a
+ :class:`cpu_set` containing the cpus enabled both in ``one`` and ``two``.
+
+ .. method:: set(i)
+
+ Enable CPU *i*.
+
+ .. method:: clear(i)
+
+ Remove CPU *i*.
+
+ .. method:: isset(i)
+
+ Return ``True`` if CPU *i* is enabled in the set.
+
+ .. method:: count()
+
+ Return the number of enabled CPUs in the set.
+
+ .. method:: zero()
+
+ Clear the set completely.
+
+
+.. function:: sched_setaffinity(pid, mask)
+
+ Restrict the process with PID *pid* to a set of CPUs. *mask* is a
+ :class:`cpu_set` instance.
+
+
+.. function:: sched_getaffinity(pid, size)
+
+ Return the :class:`cpu_set` the process with PID *pid* is restricted to. The
+ result will contain *size* CPUs.
+
+
.. _os-path:
Miscellaneous System Information
diff --git a/Doc/library/ossaudiodev.rst b/Doc/library/ossaudiodev.rst
index 0a08428..51c5857 100644
--- a/Doc/library/ossaudiodev.rst
+++ b/Doc/library/ossaudiodev.rst
@@ -38,6 +38,10 @@ the standard audio interface for Linux and recent versions of FreeBSD.
This probably all warrants a footnote or two, but I don't understand
things well enough right now to write it! --GPW
+.. versionchanged:: 3.3
+ Operations in this module now raise :exc:`OSError` where :exc:`IOError`
+ was raised.
+
.. seealso::
@@ -56,7 +60,7 @@ the standard audio interface for Linux and recent versions of FreeBSD.
what went wrong.
(If :mod:`ossaudiodev` receives an error from a system call such as
- :c:func:`open`, :c:func:`write`, or :c:func:`ioctl`, it raises :exc:`IOError`.
+ :c:func:`open`, :c:func:`write`, or :c:func:`ioctl`, it raises :exc:`OSError`.
Errors detected directly by :mod:`ossaudiodev` result in :exc:`OSSAudioError`.)
(For backwards compatibility, the exception class is also available as
@@ -168,7 +172,7 @@ The following methods each map to exactly one :func:`ioctl` system call. The
correspondence is obvious: for example, :meth:`setfmt` corresponds to the
``SNDCTL_DSP_SETFMT`` ioctl, and :meth:`sync` to ``SNDCTL_DSP_SYNC`` (this can
be useful when consulting the OSS documentation). If the underlying
-:func:`ioctl` fails, they all raise :exc:`IOError`.
+:func:`ioctl` fails, they all raise :exc:`OSError`.
.. method:: oss_audio_device.nonblock()
@@ -344,7 +348,7 @@ The mixer object provides two file-like methods:
.. method:: oss_mixer_device.close()
This method closes the open mixer device file. Any further attempts to use the
- mixer after this file is closed will raise an :exc:`IOError`.
+ mixer after this file is closed will raise an :exc:`OSError`.
.. method:: oss_mixer_device.fileno()
@@ -403,7 +407,7 @@ The remaining methods are specific to audio mixing:
returned, but both volumes are the same.
Raises :exc:`OSSAudioError` if an invalid control was is specified, or
- :exc:`IOError` if an unsupported control is specified.
+ :exc:`OSError` if an unsupported control is specified.
.. method:: oss_mixer_device.set(control, (left, right))
@@ -427,7 +431,7 @@ The remaining methods are specific to audio mixing:
.. method:: oss_mixer_device.set_recsrc(bitmask)
Call this function to specify a recording source. Returns a bitmask indicating
- the new recording source (or sources) if successful; raises :exc:`IOError` if an
+ the new recording source (or sources) if successful; raises :exc:`OSError` if an
invalid source was specified. To set the current recording source to the
microphone input::
diff --git a/Doc/library/packaging-misc.rst b/Doc/library/packaging-misc.rst
new file mode 100644
index 0000000..5e56247
--- /dev/null
+++ b/Doc/library/packaging-misc.rst
@@ -0,0 +1,27 @@
+.. temporary file for modules that don't need a dedicated file yet
+
+:mod:`packaging.errors` --- Packaging exceptions
+================================================
+
+.. module:: packaging.errors
+ :synopsis: Packaging exceptions.
+
+
+Provides exceptions used by the Packaging modules. Note that Packaging modules
+may raise standard exceptions; in particular, SystemExit is usually raised for
+errors that are obviously the end-user's fault (e.g. bad command-line arguments).
+
+This module is safe to use in ``from ... import *`` mode; it only exports
+symbols whose names start with ``Packaging`` and end with ``Error``.
+
+
+:mod:`packaging.manifest` --- The Manifest class
+================================================
+
+.. module:: packaging.manifest
+ :synopsis: The Manifest class, used for poking about the file system and
+ building lists of files.
+
+
+This module provides the :class:`Manifest` class, used for poking about the
+filesystem and building lists of files.
diff --git a/Doc/library/packaging.command.rst b/Doc/library/packaging.command.rst
new file mode 100644
index 0000000..6a85351
--- /dev/null
+++ b/Doc/library/packaging.command.rst
@@ -0,0 +1,111 @@
+:mod:`packaging.command` --- Standard Packaging commands
+========================================================
+
+.. module:: packaging.command
+ :synopsis: Standard packaging commands.
+
+
+This subpackage contains one module for each standard Packaging command, such as
+:command:`build` or :command:`upload`. Each command is implemented as a
+separate module, with the command name as the name of the module and of the
+class defined therein.
+
+
+
+:mod:`packaging.command.cmd` --- Abstract base class for Packaging commands
+===========================================================================
+
+.. module:: packaging.command.cmd
+ :synopsis: Abstract base class for commands.
+
+
+This module supplies the abstract base class :class:`Command`. This class is
+subclassed by the modules in the packaging.command subpackage.
+
+
+.. class:: Command(dist)
+
+ Abstract base class for defining command classes, the "worker bees" of the
+ Packaging. A useful analogy for command classes is to think of them as
+ subroutines with local variables called *options*. The options are declared
+ in :meth:`initialize_options` and defined (given their final values) in
+ :meth:`finalize_options`, both of which must be defined by every command
+ class. The distinction between the two is necessary because option values
+ might come from the outside world (command line, config file, ...), and any
+ options dependent on other options must be computed after these outside
+ influences have been processed --- hence :meth:`finalize_options`. The body
+ of the subroutine, where it does all its work based on the values of its
+ options, is the :meth:`run` method, which must also be implemented by every
+ command class.
+
+ The class constructor takes a single argument *dist*, a
+ :class:`~packaging.dist.Distribution` instance.
+
+
+Creating a new Packaging command
+--------------------------------
+
+This section outlines the steps to create a new Packaging command.
+
+.. XXX the following paragraph is focused on the stdlib; expand it to document
+ how to write and register a command in third-party projects
+
+A new command lives in a module in the :mod:`packaging.command` package. There
+is a sample template in that directory called :file:`command_template`. Copy
+this file to a new module with the same name as the new command you're
+implementing. This module should implement a class with the same name as the
+module (and the command). So, for instance, to create the command
+``peel_banana`` (so that users can run ``setup.py peel_banana``), you'd copy
+:file:`command_template` to :file:`packaging/command/peel_banana.py`, then edit
+it so that it's implementing the class :class:`peel_banana`, a subclass of
+:class:`Command`. It must define the following methods:
+
+.. method:: Command.initialize_options()
+
+ Set default values for all the options that this command supports. Note that
+ these defaults may be overridden by other commands, by the setup script, by
+ config files, or by the command line. Thus, this is not the place to code
+ dependencies between options; generally, :meth:`initialize_options`
+ implementations are just a bunch of ``self.foo = None`` assignments.
+
+
+.. method:: Command.finalize_options()
+
+ Set final values for all the options that this command supports. This is
+ always called as late as possible, i.e. after any option assignments from the
+ command line or from other commands have been done. Thus, this is the place
+ to code option dependencies: if *foo* depends on *bar*, then it is safe to
+ set *foo* from *bar* as long as *foo* still has the same value it was
+ assigned in :meth:`initialize_options`.
+
+
+.. method:: Command.run()
+
+ A command's raison d'etre: carry out the action it exists to perform,
+ controlled by the options initialized in :meth:`initialize_options`,
+ customized by other commands, the setup script, the command line, and config
+ files, and finalized in :meth:`finalize_options`. All terminal output and
+ filesystem interaction should be done by :meth:`run`.
+
+
+Command classes may define this attribute:
+
+
+.. attribute:: Command.sub_commands
+
+ *sub_commands* formalizes the notion of a "family" of commands,
+ e.g. ``install_dist`` as the parent with sub-commands ``install_lib``,
+ ``install_headers``, etc. The parent of a family of commands defines
+ *sub_commands* as a class attribute; it's a list of 2-tuples ``(command_name,
+ predicate)``, with *command_name* a string and *predicate* a function, a
+ string or ``None``. *predicate* is a method of the parent command that
+ determines whether the corresponding command is applicable in the current
+ situation. (E.g. ``install_headers`` is only applicable if we have any C
+ header files to install.) If *predicate* is ``None``, that command is always
+ applicable.
+
+ *sub_commands* is usually defined at the *end* of a class, because
+ predicates can be methods of the class, so they must already have been
+ defined. The canonical example is the :command:`install_dist` command.
+
+.. XXX document how to add a custom command to another one's subcommands
diff --git a/Doc/library/packaging.compiler.rst b/Doc/library/packaging.compiler.rst
new file mode 100644
index 0000000..f23c551
--- /dev/null
+++ b/Doc/library/packaging.compiler.rst
@@ -0,0 +1,681 @@
+:mod:`packaging.compiler` --- Compiler classes
+==============================================
+
+.. module:: packaging.compiler
+ :synopsis: Compiler classes to build C/C++ extensions or libraries.
+
+
+This subpackage contains an abstract base class representing a compiler and
+concrete implementations for common compilers. The compiler classes should not
+be instantiated directly, but created using the :func:`new_compiler` factory
+function. Compiler types provided by Packaging are listed in
+:ref:`packaging-standard-compilers`.
+
+
+Public functions
+----------------
+
+.. function:: new_compiler(plat=None, compiler=None, dry_run=False, force=False)
+
+ Factory function to generate an instance of some
+ :class:`~.ccompiler.CCompiler` subclass for the requested platform or
+ compiler type.
+
+ If no argument is given for *plat* and *compiler*, the default compiler type
+ for the platform (:attr:`os.name`) will be used: ``'unix'`` for Unix and
+ Mac OS X, ``'msvc'`` for Windows.
+
+ If *plat* is given, it must be one of ``'posix'``, ``'darwin'`` or ``'nt'``.
+ An invalid value will not raise an exception but use the default compiler
+ type for the current platform.
+
+ .. XXX errors should never pass silently; this behavior is particularly
+ harmful when a compiler type is given as first argument
+
+ If *compiler* is given, *plat* will be ignored, allowing you to get for
+ example a ``'unix'`` compiler object under Windows or an ``'msvc'`` compiler
+ under Unix. However, not all compiler types can be instantiated on every
+ platform.
+
+
+.. function:: customize_compiler(compiler)
+
+ Do any platform-specific customization of a CCompiler instance. Mainly
+ needed on Unix to plug in the information that varies across Unices and is
+ stored in CPython's Makefile.
+
+
+.. function:: gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries)
+
+ Generate linker options for searching library directories and linking with
+ specific libraries. *libraries* and *library_dirs* are, respectively, lists
+ of library names (not filenames!) and search directories. Returns a list of
+ command-line options suitable for use with some compiler (depending on the
+ two format strings passed in).
+
+
+.. function:: gen_preprocess_options(macros, include_dirs)
+
+ Generate C preprocessor options (:option:`-D`, :option:`-U`, :option:`-I`) as
+ used by at least two types of compilers: the typical Unix compiler and Visual
+ C++. *macros* is the usual thing, a list of 1- or 2-tuples, where ``(name,)``
+ means undefine (:option:`-U`) macro *name*, and ``(name, value)`` means
+ define (:option:`-D`) macro *name* to *value*. *include_dirs* is just a list
+ of directory names to be added to the header file search path (:option:`-I`).
+ Returns a list of command-line options suitable for either Unix compilers or
+ Visual C++.
+
+
+.. function:: get_default_compiler(osname, platform)
+
+ Determine the default compiler to use for the given platform.
+
+ *osname* should be one of the standard Python OS names (i.e. the ones
+ returned by ``os.name``) and *platform* the common value returned by
+ ``sys.platform`` for the platform in question.
+
+ The default values are ``os.name`` and ``sys.platform``.
+
+
+.. function:: set_compiler(location)
+
+ Add or change a compiler
+
+
+.. function:: show_compilers()
+
+ Print list of available compilers (used by the :option:`--help-compiler`
+ options to :command:`build`, :command:`build_ext`, :command:`build_clib`).
+
+
+.. _packaging-standard-compilers:
+
+Standard compilers
+------------------
+
+Concrete subclasses of :class:`~.ccompiler.CCompiler` are provided in submodules
+of the :mod:`packaging.compiler` package. You do not need to import them, using
+:func:`new_compiler` is the public API to use. This table documents the
+standard compilers; be aware that they can be replaced by other classes on your
+platform.
+
+=============== ======================================================== =======
+name description notes
+=============== ======================================================== =======
+``'unix'`` typical Unix-style command-line C compiler [#]_
+``'msvc'`` Microsoft compiler [#]_
+``'bcpp'`` Borland C++ compiler
+``'cygwin'`` Cygwin compiler (Windows port of GCC)
+``'mingw32'`` Mingw32 port of GCC (same as Cygwin in no-Cygwin mode)
+=============== ======================================================== =======
+
+
+.. [#] The Unix compiler class assumes this behavior:
+
+ * macros defined with :option:`-Dname[=value]`
+
+ * macros undefined with :option:`-Uname`
+
+ * include search directories specified with :option:`-Idir`
+
+ * libraries specified with :option:`-llib`
+
+ * library search directories specified with :option:`-Ldir`
+
+ * compile handled by :program:`cc` (or similar) executable with
+ :option:`-c` option: compiles :file:`.c` to :file:`.o`
+
+ * link static library handled by :program:`ar` command (possibly with
+ :program:`ranlib`)
+
+ * link shared library handled by :program:`cc` :option:`-shared`
+
+
+.. [#] On Windows, extension modules typically need to be compiled with the same
+ compiler that was used to compile CPython (for example Microsoft Visual
+ Studio .NET 2003 for CPython 2.4 and 2.5). The AMD64 and Itanium
+ binaries are created using the Platform SDK.
+
+ Under the hood, there are actually two different subclasses of
+ :class:`~.ccompiler.CCompiler` defined: one is compatible with MSVC 2005
+ and 2008, the other works with older versions. This should not be a
+ concern for regular use of the functions in this module.
+
+ Packaging will normally choose the right compiler, linker etc. on its
+ own. To override this choice, the environment variables
+ *DISTUTILS_USE_SDK* and *MSSdk* must be both set. *MSSdk* indicates that
+ the current environment has been setup by the SDK's ``SetEnv.Cmd``
+ script, or that the environment variables had been registered when the
+ SDK was installed; *DISTUTILS_USE_SDK* indicates that the user has made
+ an explicit choice to override the compiler selection done by Packaging.
+
+ .. TODO document the envvars in Doc/using and the man page
+
+
+:mod:`packaging.compiler.ccompiler` --- CCompiler base class
+============================================================
+
+.. module:: packaging.compiler.ccompiler
+ :synopsis: Abstract CCompiler class.
+
+
+This module provides the abstract base class for the :class:`CCompiler`
+classes. A :class:`CCompiler` instance can be used for all the compile and
+link steps needed to build a single project. Methods are provided to set
+options for the compiler --- macro definitions, include directories, link path,
+libraries and the like.
+
+.. class:: CCompiler(dry_run=False, force=False)
+
+ The abstract base class :class:`CCompiler` defines the interface that must be
+ implemented by real compiler classes. The class also has some utility
+ methods used by several compiler classes.
+
+ The basic idea behind a compiler abstraction class is that each instance can
+ be used for all the compile/link steps in building a single project. Thus,
+ attributes common to all of those compile and link steps --- include
+ directories, macros to define, libraries to link against, etc. --- are
+ attributes of the compiler instance. To allow for variability in how
+ individual files are treated, most of those attributes may be varied on a
+ per-compilation or per-link basis.
+
+ The constructor for each subclass creates an instance of the Compiler object.
+ Flags are *dry_run* (don't actually execute
+ the steps) and *force* (rebuild everything, regardless of dependencies). All
+ of these flags default to ``False`` (off). Note that you probably don't want to
+ instantiate :class:`CCompiler` or one of its subclasses directly - use the
+ :func:`new_compiler` factory function instead.
+
+ The following methods allow you to manually alter compiler options for the
+ instance of the Compiler class.
+
+
+ .. method:: CCompiler.add_include_dir(dir)
+
+ Add *dir* to the list of directories that will be searched for header
+ files. The compiler is instructed to search directories in the order in
+ which they are supplied by successive calls to :meth:`add_include_dir`.
+
+
+ .. method:: CCompiler.set_include_dirs(dirs)
+
+ Set the list of directories that will be searched to *dirs* (a list of
+ strings). Overrides any preceding calls to :meth:`add_include_dir`;
+ subsequent calls to :meth:`add_include_dir` add to the list passed to
+ :meth:`set_include_dirs`. This does not affect any list of standard
+ include directories that the compiler may search by default.
+
+
+ .. method:: CCompiler.add_library(libname)
+
+ Add *libname* to the list of libraries that will be included in all links
+ driven by this compiler object. Note that *libname* should *not* be the
+ name of a file containing a library, but the name of the library itself:
+ the actual filename will be inferred by the linker, the compiler, or the
+ compiler class (depending on the platform).
+
+ The linker will be instructed to link against libraries in the order they
+ were supplied to :meth:`add_library` and/or :meth:`set_libraries`. It is
+ perfectly valid to duplicate library names; the linker will be instructed
+ to link against libraries as many times as they are mentioned.
+
+
+ .. method:: CCompiler.set_libraries(libnames)
+
+ Set the list of libraries to be included in all links driven by this
+ compiler object to *libnames* (a list of strings). This does not affect
+ any standard system libraries that the linker may include by default.
+
+
+ .. method:: CCompiler.add_library_dir(dir)
+
+ Add *dir* to the list of directories that will be searched for libraries
+ specified to :meth:`add_library` and :meth:`set_libraries`. The linker
+ will be instructed to search for libraries in the order they are supplied
+ to :meth:`add_library_dir` and/or :meth:`set_library_dirs`.
+
+
+ .. method:: CCompiler.set_library_dirs(dirs)
+
+ Set the list of library search directories to *dirs* (a list of strings).
+ This does not affect any standard library search path that the linker may
+ search by default.
+
+
+ .. method:: CCompiler.add_runtime_library_dir(dir)
+
+ Add *dir* to the list of directories that will be searched for shared
+ libraries at runtime.
+
+
+ .. method:: CCompiler.set_runtime_library_dirs(dirs)
+
+ Set the list of directories to search for shared libraries at runtime to
+ *dirs* (a list of strings). This does not affect any standard search path
+ that the runtime linker may search by default.
+
+
+ .. method:: CCompiler.define_macro(name[, value=None])
+
+ Define a preprocessor macro for all compilations driven by this compiler
+ object. The optional parameter *value* should be a string; if it is not
+ supplied, then the macro will be defined without an explicit value and the
+ exact outcome depends on the compiler used (XXX true? does ANSI say
+ anything about this?)
+
+
+ .. method:: CCompiler.undefine_macro(name)
+
+ Undefine a preprocessor macro for all compilations driven by this compiler
+ object. If the same macro is defined by :meth:`define_macro` and
+ undefined by :meth:`undefine_macro` the last call takes precedence
+ (including multiple redefinitions or undefinitions). If the macro is
+ redefined/undefined on a per-compilation basis (i.e. in the call to
+ :meth:`compile`), then that takes precedence.
+
+
+ .. method:: CCompiler.add_link_object(object)
+
+ Add *object* to the list of object files (or analogues, such as explicitly
+ named library files or the output of "resource compilers") to be included
+ in every link driven by this compiler object.
+
+
+ .. method:: CCompiler.set_link_objects(objects)
+
+ Set the list of object files (or analogues) to be included in every link
+ to *objects*. This does not affect any standard object files that the
+ linker may include by default (such as system libraries).
+
+ The following methods implement methods for autodetection of compiler
+ options, providing some functionality similar to GNU :program:`autoconf`.
+
+
+ .. method:: CCompiler.detect_language(sources)
+
+ Detect the language of a given file, or list of files. Uses the instance
+ attributes :attr:`language_map` (a dictionary), and :attr:`language_order`
+ (a list) to do the job.
+
+
+ .. method:: CCompiler.find_library_file(dirs, lib[, debug=0])
+
+ Search the specified list of directories for a static or shared library file
+ *lib* and return the full path to that file. If *debug* is true, look for a
+ debugging version (if that makes sense on the current platform). Return
+ ``None`` if *lib* wasn't found in any of the specified directories.
+
+
+ .. method:: CCompiler.has_function(funcname [, includes=None, include_dirs=None, libraries=None, library_dirs=None])
+
+ Return a boolean indicating whether *funcname* is supported on the current
+ platform. The optional arguments can be used to augment the compilation
+ environment by providing additional include files and paths and libraries and
+ paths.
+
+
+ .. method:: CCompiler.library_dir_option(dir)
+
+ Return the compiler option to add *dir* to the list of directories searched for
+ libraries.
+
+
+ .. method:: CCompiler.library_option(lib)
+
+ Return the compiler option to add *dir* to the list of libraries linked into the
+ shared library or executable.
+
+
+ .. method:: CCompiler.runtime_library_dir_option(dir)
+
+ Return the compiler option to add *dir* to the list of directories searched for
+ runtime libraries.
+
+
+ .. method:: CCompiler.set_executables(**args)
+
+ Define the executables (and options for them) that will be run to perform the
+ various stages of compilation. The exact set of executables that may be
+ specified here depends on the compiler class (via the 'executables' class
+ attribute), but most will have:
+
+ +--------------+------------------------------------------+
+ | attribute | description |
+ +==============+==========================================+
+ | *compiler* | the C/C++ compiler |
+ +--------------+------------------------------------------+
+ | *linker_so* | linker used to create shared objects and |
+ | | libraries |
+ +--------------+------------------------------------------+
+ | *linker_exe* | linker used to create binary executables |
+ +--------------+------------------------------------------+
+ | *archiver* | static library creator |
+ +--------------+------------------------------------------+
+
+ On platforms with a command line (Unix, DOS/Windows), each of these is a string
+ that will be split into executable name and (optional) list of arguments.
+ (Splitting the string is done similarly to how Unix shells operate: words are
+ delimited by spaces, but quotes and backslashes can override this. See
+ :func:`packaging.util.split_quoted`.)
+
+ The following methods invoke stages in the build process.
+
+
+ .. method:: CCompiler.compile(sources[, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None])
+
+ Compile one or more source files. Generates object files (e.g. transforms a
+ :file:`.c` file to a :file:`.o` file.)
+
+ *sources* must be a list of filenames, most likely C/C++ files, but in reality
+ anything that can be handled by a particular compiler and compiler class (e.g.
+ an ``'msvc'`` compiler can handle resource files in *sources*). Return a list of
+ object filenames, one per source filename in *sources*. Depending on the
+ implementation, not all source files will necessarily be compiled, but all
+ corresponding object filenames will be returned.
+
+ If *output_dir* is given, object files will be put under it, while retaining
+ their original path component. That is, :file:`foo/bar.c` normally compiles to
+ :file:`foo/bar.o` (for a Unix implementation); if *output_dir* is *build*, then
+ it would compile to :file:`build/foo/bar.o`.
+
+ *macros*, if given, must be a list of macro definitions. A macro definition is
+ either a ``(name, value)`` 2-tuple or a ``(name,)`` 1-tuple. The former defines
+ a macro; if the value is ``None``, the macro is defined without an explicit
+ value. The 1-tuple case undefines a macro. Later
+ definitions/redefinitions/undefinitions take precedence.
+
+ *include_dirs*, if given, must be a list of strings, the directories to add to
+ the default include file search path for this compilation only.
+
+ *debug* is a boolean; if true, the compiler will be instructed to output debug
+ symbols in (or alongside) the object file(s).
+
+ *extra_preargs* and *extra_postargs* are implementation-dependent. On platforms
+ that have the notion of a command line (e.g. Unix, DOS/Windows), they are most
+ likely lists of strings: extra command-line arguments to prepend/append to the
+ compiler command line. On other platforms, consult the implementation class
+ documentation. In any event, they are intended as an escape hatch for those
+ occasions when the abstract compiler framework doesn't cut the mustard.
+
+ *depends*, if given, is a list of filenames that all targets depend on. If a
+ source file is older than any file in depends, then the source file will be
+ recompiled. This supports dependency tracking, but only at a coarse
+ granularity.
+
+ Raises :exc:`CompileError` on failure.
+
+
+ .. method:: CCompiler.create_static_lib(objects, output_libname[, output_dir=None, debug=0, target_lang=None])
+
+ Link a bunch of stuff together to create a static library file. The "bunch of
+ stuff" consists of the list of object files supplied as *objects*, the extra
+ object files supplied to :meth:`add_link_object` and/or
+ :meth:`set_link_objects`, the libraries supplied to :meth:`add_library` and/or
+ :meth:`set_libraries`, and the libraries supplied as *libraries* (if any).
+
+ *output_libname* should be a library name, not a filename; the filename will be
+ inferred from the library name. *output_dir* is the directory where the library
+ file will be put. XXX defaults to what?
+
+ *debug* is a boolean; if true, debugging information will be included in the
+ library (note that on most platforms, it is the compile step where this matters:
+ the *debug* flag is included here just for consistency).
+
+ *target_lang* is the target language for which the given objects are being
+ compiled. This allows specific linkage time treatment of certain languages.
+
+ Raises :exc:`LibError` on failure.
+
+
+ .. method:: CCompiler.link(target_desc, objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+ Link a bunch of stuff together to create an executable or shared library file.
+
+ The "bunch of stuff" consists of the list of object files supplied as *objects*.
+ *output_filename* should be a filename. If *output_dir* is supplied,
+ *output_filename* is relative to it (i.e. *output_filename* can provide
+ directory components if needed).
+
+ *libraries* is a list of libraries to link against. These are library names,
+ not filenames, since they're translated into filenames in a platform-specific
+ way (e.g. *foo* becomes :file:`libfoo.a` on Unix and :file:`foo.lib` on
+ DOS/Windows). However, they can include a directory component, which means the
+ linker will look in that specific directory rather than searching all the normal
+ locations.
+
+ *library_dirs*, if supplied, should be a list of directories to search for
+ libraries that were specified as bare library names (i.e. no directory
+ component). These are on top of the system default and those supplied to
+ :meth:`add_library_dir` and/or :meth:`set_library_dirs`. *runtime_library_dirs*
+ is a list of directories that will be embedded into the shared library and used
+ to search for other shared libraries that \*it\* depends on at run-time. (This
+ may only be relevant on Unix.)
+
+ *export_symbols* is a list of symbols that the shared library will export.
+ (This appears to be relevant only on Windows.)
+
+ *debug* is as for :meth:`compile` and :meth:`create_static_lib`, with the
+ slight distinction that it actually matters on most platforms (as opposed to
+ :meth:`create_static_lib`, which includes a *debug* flag mostly for form's
+ sake).
+
+ *extra_preargs* and *extra_postargs* are as for :meth:`compile` (except of
+ course that they supply command-line arguments for the particular linker being
+ used).
+
+ *target_lang* is the target language for which the given objects are being
+ compiled. This allows specific linkage time treatment of certain languages.
+
+ Raises :exc:`LinkError` on failure.
+
+
+ .. method:: CCompiler.link_executable(objects, output_progname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, target_lang=None])
+
+ Link an executable. *output_progname* is the name of the file executable, while
+ *objects* are a list of object filenames to link in. Other arguments are as for
+ the :meth:`link` method.
+
+
+ .. method:: CCompiler.link_shared_lib(objects, output_libname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+ Link a shared library. *output_libname* is the name of the output library,
+ while *objects* is a list of object filenames to link in. Other arguments are
+ as for the :meth:`link` method.
+
+
+ .. method:: CCompiler.link_shared_object(objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+ Link a shared object. *output_filename* is the name of the shared object that
+ will be created, while *objects* is a list of object filenames to link in.
+ Other arguments are as for the :meth:`link` method.
+
+
+ .. method:: CCompiler.preprocess(source[, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None])
+
+ Preprocess a single C/C++ source file, named in *source*. Output will be written
+ to file named *output_file*, or *stdout* if *output_file* not supplied.
+ *macros* is a list of macro definitions as for :meth:`compile`, which will
+ augment the macros set with :meth:`define_macro` and :meth:`undefine_macro`.
+ *include_dirs* is a list of directory names that will be added to the default
+ list, in the same way as :meth:`add_include_dir`.
+
+ Raises :exc:`PreprocessError` on failure.
+
+ The following utility methods are defined by the :class:`CCompiler` class, for
+ use by the various concrete subclasses.
+
+
+ .. method:: CCompiler.executable_filename(basename[, strip_dir=0, output_dir=''])
+
+ Returns the filename of the executable for the given *basename*. Typically for
+ non-Windows platforms this is the same as the basename, while Windows will get
+ a :file:`.exe` added.
+
+
+ .. method:: CCompiler.library_filename(libname[, lib_type='static', strip_dir=0, output_dir=''])
+
+ Returns the filename for the given library name on the current platform. On Unix
+ a library with *lib_type* of ``'static'`` will typically be of the form
+ :file:`liblibname.a`, while a *lib_type* of ``'dynamic'`` will be of the form
+ :file:`liblibname.so`.
+
+
+ .. method:: CCompiler.object_filenames(source_filenames[, strip_dir=0, output_dir=''])
+
+ Returns the name of the object files for the given source files.
+ *source_filenames* should be a list of filenames.
+
+
+ .. method:: CCompiler.shared_object_filename(basename[, strip_dir=0, output_dir=''])
+
+ Returns the name of a shared object file for the given file name *basename*.
+
+
+ .. method:: CCompiler.execute(func, args[, msg=None, level=1])
+
+ Invokes :func:`packaging.util.execute` This method invokes a Python function
+ *func* with the given arguments *args*, after logging and taking into account
+ the *dry_run* flag. XXX see also.
+
+
+ .. method:: CCompiler.spawn(cmd)
+
+ Invokes :func:`packaging.util.spawn`. This invokes an external process to run
+ the given command. XXX see also.
+
+
+ .. method:: CCompiler.mkpath(name[, mode=511])
+
+ Invokes :func:`packaging.dir_util.mkpath`. This creates a directory and any
+ missing ancestor directories. XXX see also.
+
+
+ .. method:: CCompiler.move_file(src, dst)
+
+ Invokes :meth:`packaging.file_util.move_file`. Renames *src* to *dst*. XXX see
+ also.
+
+
+:mod:`packaging.compiler.extension` --- The Extension class
+===========================================================
+
+.. module:: packaging.compiler.extension
+ :synopsis: Class used to represent C/C++ extension modules.
+
+
+This module provides the :class:`Extension` class, used to represent C/C++
+extension modules.
+
+.. class:: Extension
+
+ The Extension class describes a single C or C++ extension module. It accepts
+ the following keyword arguments in its constructor:
+
+ +------------------------+--------------------------------+---------------------------+
+ | argument name | value | type |
+ +========================+================================+===========================+
+ | *name* | the full name of the | string |
+ | | extension, including any | |
+ | | packages --- i.e. *not* a | |
+ | | filename or pathname, but | |
+ | | Python dotted name | |
+ +------------------------+--------------------------------+---------------------------+
+ | *sources* | list of source filenames, | list of strings |
+ | | relative to the distribution | |
+ | | root (where the setup script | |
+ | | lives), in Unix form (slash- | |
+ | | separated) for portability. | |
+ | | Source files may be C, C++, | |
+ | | SWIG (.i), platform-specific | |
+ | | resource files, or whatever | |
+ | | else is recognized by the | |
+ | | :command:`build_ext` command | |
+ | | as source for a Python | |
+ | | extension. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *include_dirs* | list of directories to search | list of strings |
+ | | for C/C++ header files (in | |
+ | | Unix form for portability) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *define_macros* | list of macros to define; each | list of tuples |
+ | | macro is defined using a | |
+ | | 2-tuple ``(name, value)``, | |
+ | | where *value* is | |
+ | | either the string to define it | |
+ | | to or ``None`` to define it | |
+ | | without a particular value | |
+ | | (equivalent of ``#define FOO`` | |
+ | | in source or :option:`-DFOO` | |
+ | | on Unix C compiler command | |
+ | | line) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *undef_macros* | list of macros to undefine | list of strings |
+ | | explicitly | |
+ +------------------------+--------------------------------+---------------------------+
+ | *library_dirs* | list of directories to search | list of strings |
+ | | for C/C++ libraries at link | |
+ | | time | |
+ +------------------------+--------------------------------+---------------------------+
+ | *libraries* | list of library names (not | list of strings |
+ | | filenames or paths) to link | |
+ | | against | |
+ +------------------------+--------------------------------+---------------------------+
+ | *runtime_library_dirs* | list of directories to search | list of strings |
+ | | for C/C++ libraries at run | |
+ | | time (for shared extensions, | |
+ | | this is when the extension is | |
+ | | loaded) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *extra_objects* | list of extra files to link | list of strings |
+ | | with (e.g. object files not | |
+ | | implied by 'sources', static | |
+ | | library that must be | |
+ | | explicitly specified, binary | |
+ | | resource files, etc.) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *extra_compile_args* | any extra platform- and | list of strings |
+ | | compiler-specific information | |
+ | | to use when compiling the | |
+ | | source files in 'sources'. For | |
+ | | platforms and compilers where | |
+ | | a command line makes sense, | |
+ | | this is typically a list of | |
+ | | command-line arguments, but | |
+ | | for other platforms it could | |
+ | | be anything. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *extra_link_args* | any extra platform- and | list of strings |
+ | | compiler-specific information | |
+ | | to use when linking object | |
+ | | files together to create the | |
+ | | extension (or to create a new | |
+ | | static Python interpreter). | |
+ | | Similar interpretation as for | |
+ | | 'extra_compile_args'. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *export_symbols* | list of symbols to be exported | list of strings |
+ | | from a shared extension. Not | |
+ | | used on all platforms, and not | |
+ | | generally necessary for Python | |
+ | | extensions, which typically | |
+ | | export exactly one symbol: | |
+ | | ``init`` + extension_name. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *depends* | list of files that the | list of strings |
+ | | extension depends on | |
+ +------------------------+--------------------------------+---------------------------+
+ | *language* | extension language (i.e. | string |
+ | | ``'c'``, ``'c++'``, | |
+ | | ``'objc'``). Will be detected | |
+ | | from the source extensions if | |
+ | | not provided. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *optional* | specifies that a build failure | boolean |
+ | | in the extension should not | |
+ | | abort the build process, but | |
+ | | simply skip the extension. | |
+ +------------------------+--------------------------------+---------------------------+
+
+To distribute extension modules that live in a package (e.g. ``package.ext``),
+you need to create a :file:`{package}/__init__.py` file to let Python recognize
+and import your module.
diff --git a/Doc/library/packaging.database.rst b/Doc/library/packaging.database.rst
new file mode 100644
index 0000000..9d750f0
--- /dev/null
+++ b/Doc/library/packaging.database.rst
@@ -0,0 +1,345 @@
+:mod:`packaging.database` --- Database of installed distributions
+=================================================================
+
+.. module:: packaging.database
+ :synopsis: Functions to query and manipulate installed distributions.
+
+
+This module provides an implementation of :PEP:`376`. It was originally
+intended to land in :mod:`pkgutil`, but with the inclusion of Packaging in the
+standard library, it was thought best to include it in a submodule of
+:mod:`packaging`, leaving :mod:`pkgutil` to deal with imports.
+
+Installed Python distributions are represented by instances of
+:class:`Distribution`, or :class:`EggInfoDistribution` for legacy egg formats.
+Most functions also provide an extra argument ``use_egg_info`` to take legacy
+distributions into account.
+
+For the purpose of this module, "installed" means that the distribution's
+:file:`.dist-info`, :file:`.egg-info` or :file:`egg` directory or file is found
+on :data:`sys.path`. For example, if the parent directory of a
+:file:`dist-info` directory is added to :envvar:`PYTHONPATH`, then it will be
+available in the database.
+
+Classes representing installed distributions
+--------------------------------------------
+
+.. class:: Distribution(path)
+
+ Class representing an installed distribution. It is different from
+ :class:`packaging.dist.Distribution` which holds the list of files, the
+ metadata and options during the run of a Packaging command.
+
+ Instantiate with the *path* to a ``.dist-info`` directory. Instances can be
+ compared and sorted. Other available methods are:
+
+ .. XXX describe how comparison works
+
+ .. method:: get_distinfo_file(path, binary=False)
+
+ Return a read-only file object for a file located at
+ :file:`{project}-{version}.dist-info/{path}`. *path* should be a
+ ``'/'``-separated path relative to the ``.dist-info`` directory or an
+ absolute path; if it is an absolute path and doesn't start with the path
+ to the :file:`.dist-info` directory, a :class:`PackagingError` is raised.
+
+ If *binary* is ``True``, the file is opened in binary mode.
+
+ .. method:: get_resource_path(relative_path)
+
+ .. TODO
+
+ .. method:: list_distinfo_files(local=False)
+
+ Return an iterator over all files located in the :file:`.dist-info`
+ directory. If *local* is ``True``, each returned path is transformed into
+ a local absolute path, otherwise the raw value found in the :file:`RECORD`
+ file is returned.
+
+ .. method:: list_installed_files(local=False)
+
+ Iterate over the files installed with the distribution and registered in
+ the :file:`RECORD` file and yield a tuple ``(path, md5, size)`` for each
+ line. If *local* is ``True``, the returned path is transformed into a
+ local absolute path, otherwise the raw value is returned.
+
+ A local absolute path is an absolute path in which occurrences of ``'/'``
+ have been replaced by :data:`os.sep`.
+
+ .. method:: uses(path)
+
+ Check whether *path* was installed by this distribution (i.e. if the path
+ is present in the :file:`RECORD` file). *path* can be a local absolute
+ path or a relative ``'/'``-separated path. Returns a boolean.
+
+ Available attributes:
+
+ .. attribute:: metadata
+
+ Instance of :class:`packaging.metadata.Metadata` filled with the contents
+ of the :file:`{project}-{version}.dist-info/METADATA` file.
+
+ .. attribute:: name
+
+ Shortcut for ``metadata['Name']``.
+
+ .. attribute:: version
+
+ Shortcut for ``metadata['Version']``.
+
+ .. attribute:: requested
+
+ Boolean indicating whether this distribution was requested by the user of
+ automatically installed as a dependency.
+
+
+.. class:: EggInfoDistribution(path)
+
+ Class representing a legacy distribution. It is compatible with distutils'
+ and setuptools' :file:`.egg-info` and :file:`.egg` files and directories.
+
+ .. FIXME should be named EggDistribution
+
+ Instantiate with the *path* to an egg file or directory. Instances can be
+ compared and sorted. Other available methods are:
+
+ .. method:: list_installed_files(local=False)
+
+ .. method:: uses(path)
+
+ Available attributes:
+
+ .. attribute:: metadata
+
+ Instance of :class:`packaging.metadata.Metadata` filled with the contents
+ of the :file:`{project-version}.egg-info/PKG-INFO` or
+ :file:`{project-version}.egg` file.
+
+ .. attribute:: name
+
+ Shortcut for ``metadata['Name']``.
+
+ .. attribute:: version
+
+ Shortcut for ``metadata['Version']``.
+
+
+Functions to work with the database
+-----------------------------------
+
+.. function:: get_distribution(name, use_egg_info=False, paths=None)
+
+ Return an instance of :class:`Distribution` or :class:`EggInfoDistribution`
+ for the first installed distribution matching *name*. Egg distributions are
+ considered only if *use_egg_info* is true; if both a dist-info and an egg
+ file are found, the dist-info prevails. The directories to be searched are
+ given in *paths*, which defaults to :data:`sys.path`. Returns ``None`` if no
+ matching distribution is found.
+
+ .. FIXME param should be named use_egg
+
+
+.. function:: get_distributions(use_egg_info=False, paths=None)
+
+ Return an iterator of :class:`Distribution` instances for all installed
+ distributions found in *paths* (defaults to :data:`sys.path`). If
+ *use_egg_info* is true, also return instances of :class:`EggInfoDistribution`
+ for legacy distributions found.
+
+
+.. function:: get_file_users(path)
+
+ Return an iterator over all distributions using *path*, a local absolute path
+ or a relative ``'/'``-separated path.
+
+ .. XXX does this work with prefixes or full file path only?
+
+
+.. function:: obsoletes_distribution(name, version=None, use_egg_info=False)
+
+ Return an iterator over all distributions that declare they obsolete *name*.
+ *version* is an optional argument to match only specific releases (see
+ :mod:`packaging.version`). If *use_egg_info* is true, legacy egg
+ distributions will be considered as well.
+
+
+.. function:: provides_distribution(name, version=None, use_egg_info=False)
+
+ Return an iterator over all distributions that declare they provide *name*.
+ *version* is an optional argument to match only specific releases (see
+ :mod:`packaging.version`). If *use_egg_info* is true, legacy egg
+ distributions will be considered as well.
+
+
+Utility functions
+-----------------
+
+.. function:: distinfo_dirname(name, version)
+
+ Escape *name* and *version* into a filename-safe form and return the
+ directory name built from them, for example
+ :file:`{safename}-{safeversion}.dist-info.` In *name*, runs of
+ non-alphanumeric characters are replaced with one ``'_'``; in *version*,
+ spaces become dots, and runs of other non-alphanumeric characters (except
+ dots) a replaced by one ``'-'``.
+
+ .. XXX wth spaces in version numbers?
+
+For performance purposes, the list of distributions is being internally
+cached. Caching is enabled by default, but you can control it with these
+functions:
+
+.. function:: clear_cache()
+
+ Clear the cache.
+
+.. function:: disable_cache()
+
+ Disable the cache, without clearing it.
+
+.. function:: enable_cache()
+
+ Enable the internal cache, without clearing it.
+
+
+Examples
+--------
+
+Printing all information about a distribution
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Given the name of an installed distribution, we shall print out all
+information that can be obtained using functions provided in this module::
+
+ import sys
+ import packaging.database
+
+ try:
+ name = sys.argv[1]
+ except ValueError:
+ sys.exit('Not enough arguments')
+
+ # first create the Distribution instance
+ dist = packaging.database.Distribution(path)
+ if dist is None:
+ sys.exit('No such distribution')
+
+ print('Information about %r' % dist.name)
+ print()
+
+ print('Files')
+ print('=====')
+ for path, md5, size in dist.list_installed_files():
+ print('* Path: %s' % path)
+ print(' Hash %s, Size: %s bytes' % (md5, size))
+ print()
+
+ print('Metadata')
+ print('========')
+ for key, value in dist.metadata.items():
+ print('%20s: %s' % (key, value))
+ print()
+
+ print('Extra')
+ print('=====')
+ if dist.requested:
+ print('* It was installed by user request')
+ else:
+ print('* It was installed as a dependency')
+
+If we save the script above as ``print_info.py``, we can use it to extract
+information from a :file:`.dist-info` directory. By typing in the console:
+
+.. code-block:: sh
+
+ python print_info.py choxie
+
+we get the following output:
+
+.. code-block:: none
+
+ Information about 'choxie'
+
+ Files
+ =====
+ * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/truffles.py
+ Hash 5e052db6a478d06bad9ae033e6bc08af, Size: 111 bytes
+ * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
+ Hash ac56bf496d8d1d26f866235b95f31030, Size: 214 bytes
+ * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
+ Hash 416aab08dfa846f473129e89a7625bbc, Size: 25 bytes
+ * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
+ Hash d41d8cd98f00b204e9800998ecf8427e, Size: 0 bytes
+ * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
+ Hash 696a209967fef3c8b8f5a7bb10386385, Size: 225 bytes
+ * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
+ Hash d41d8cd98f00b204e9800998ecf8427e, Size: 0 bytes
+ * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
+ Hash None, Size: None bytes
+
+ Metadata
+ ========
+ Metadata-Version: 1.2
+ Name: choxie
+ Version: 2.0.0.9
+ Platform: []
+ Supported-Platform: UNKNOWN
+ Summary: Chocolate with a kick!
+ Description: UNKNOWN
+ Keywords: []
+ Home-page: UNKNOWN
+ Author: UNKNOWN
+ Author-email: UNKNOWN
+ Maintainer: UNKNOWN
+ Maintainer-email: UNKNOWN
+ License: UNKNOWN
+ Classifier: []
+ Download-URL: UNKNOWN
+ Obsoletes-Dist: ['truffles (<=0.8,>=0.5)', 'truffles (<=0.9,>=0.6)']
+ Project-URL: []
+ Provides-Dist: ['truffles (1.0)']
+ Requires-Dist: ['towel-stuff (0.1)']
+ Requires-Python: UNKNOWN
+ Requires-External: []
+
+ Extra
+ =====
+ * It was installed as a dependency
+
+
+Getting metadata about a distribution
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Sometimes you're not interested about the packaging information contained in a
+full :class:`Distribution` object but just want to do something with its
+:attr:`~Distribution.metadata`::
+
+ >>> from packaging.database import get_distribution
+ >>> info = get_distribution('chocolate').metadata
+ >>> info['Keywords']
+ ['cooking', 'happiness']
+
+
+Finding out obsoleted distributions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Now, we tackle a different problem, we are interested in finding out
+which distributions have been obsoleted. This can be easily done as follows::
+
+ import packaging.database
+
+ # iterate over all distributions in the system
+ for dist in packaging.database.get_distributions():
+ name, version = dist.name, dist.version
+ # find out which distributions obsolete this name/version combination
+ replacements = packaging.database.obsoletes_distribution(name, version)
+ if replacements:
+ print('%r %s is obsoleted by' % (name, version),
+ ', '.join(repr(r.name) for r in replacements))
+
+This is how the output might look like:
+
+.. code-block:: none
+
+ 'strawberry' 0.6 is obsoleted by 'choxie'
+ 'grammar' 1.0a4 is obsoleted by 'towel-stuff'
diff --git a/Doc/library/packaging.depgraph.rst b/Doc/library/packaging.depgraph.rst
new file mode 100644
index 0000000..c384788
--- /dev/null
+++ b/Doc/library/packaging.depgraph.rst
@@ -0,0 +1,199 @@
+:mod:`packaging.depgraph` --- Dependency graph builder
+======================================================
+
+.. module:: packaging.depgraph
+ :synopsis: Graph builder for dependencies between releases.
+
+
+This module provides the means to analyse the dependencies between various
+distributions and to create a graph representing these dependency relationships.
+In this document, "distribution" refers to an instance of
+:class:`packaging.database.Distribution` or
+:class:`packaging.database.EggInfoDistribution`.
+
+.. XXX terminology problem with dist vs. release: dists are installed, but deps
+ use releases
+
+.. XXX explain how to use it with dists not installed: Distribution can only be
+ instantiated with a path, but this module is useful for remote dist too
+
+.. XXX functions should accept and return iterators, not lists
+
+
+The :class:`DependencyGraph` class
+----------------------------------
+
+.. class:: DependencyGraph
+
+ Represent a dependency graph between releases. The nodes are distribution
+ instances; the edge model dependencies. An edge from ``a`` to ``b`` means
+ that ``a`` depends on ``b``.
+
+ .. method:: add_distribution(distribution)
+
+ Add *distribution* to the graph.
+
+ .. method:: add_edge(x, y, label=None)
+
+ Add an edge from distribution *x* to distribution *y* with the given
+ *label* (string).
+
+ .. method:: add_missing(distribution, requirement)
+
+ Add a missing *requirement* (string) for the given *distribution*.
+
+ .. method:: repr_node(dist, level=1)
+
+ Print a subgraph starting from *dist*. *level* gives the depth of the
+ subgraph.
+
+ Direct access to the graph nodes and edges is provided through these
+ attributes:
+
+ .. attribute:: adjacency_list
+
+ Dictionary mapping distributions to a list of ``(other, label)`` tuples
+ where ``other`` is a distribution and the edge is labeled with ``label``
+ (i.e. the version specifier, if such was provided).
+
+ .. attribute:: reverse_list
+
+ Dictionary mapping distributions to a list of predecessors. This allows
+ efficient traversal.
+
+ .. attribute:: missing
+
+ Dictionary mapping distributions to a list of requirements that were not
+ provided by any distribution.
+
+
+Auxiliary functions
+-------------------
+
+.. function:: dependent_dists(dists, dist)
+
+ Recursively generate a list of distributions from *dists* that are dependent
+ on *dist*.
+
+ .. XXX what does member mean here: "dist is a member of *dists* for which we
+ are interested"
+
+.. function:: generate_graph(dists)
+
+ Generate a :class:`DependencyGraph` from the given list of distributions.
+
+ .. XXX make this alternate constructor a DepGraph classmethod or rename;
+ 'generate' can suggest it creates a file or an image, use 'make'
+
+.. function:: graph_to_dot(graph, f, skip_disconnected=True)
+
+ Write a DOT output for the graph to the file-like object *f*.
+
+ If *skip_disconnected* is true, all distributions that are not dependent on
+ any other distribution are skipped.
+
+ .. XXX why is this not a DepGraph method?
+
+
+Example Usage
+-------------
+
+Depict all dependenciess in the system
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+First, we shall generate a graph of all the distributions on the system
+and then create an image out of it using the tools provided by
+`Graphviz <http://www.graphviz.org/>`_::
+
+ from packaging.database import get_distributions
+ from packaging.depgraph import generate_graph
+
+ dists = list(get_distributions())
+ graph = generate_graph(dists)
+
+It would be interesting to print out the missing requirements. This can be done
+as follows::
+
+ for dist, reqs in graph.missing.items():
+ if reqs:
+ reqs = ' ,'.join(repr(req) for req in reqs)
+ print('Missing dependencies for %r: %s' % (dist.name, reqs))
+
+Example output is:
+
+.. code-block:: none
+
+ Missing dependencies for 'TurboCheetah': 'Cheetah'
+ Missing dependencies for 'TurboGears': 'ConfigObj', 'DecoratorTools', 'RuleDispatch'
+ Missing dependencies for 'jockey': 'PyKDE4.kdecore', 'PyKDE4.kdeui', 'PyQt4.QtCore', 'PyQt4.QtGui'
+ Missing dependencies for 'TurboKid': 'kid'
+ Missing dependencies for 'TurboJson: 'DecoratorTools', 'RuleDispatch'
+
+Now, we proceed with generating a graphical representation of the graph. First
+we write it to a file, and then we generate a PNG image using the
+:program:`dot` command-line tool::
+
+ from packaging.depgraph import graph_to_dot
+ with open('output.dot', 'w') as f:
+ # only show the interesting distributions, skipping the disconnected ones
+ graph_to_dot(graph, f, skip_disconnected=True)
+
+We can create the final picture using:
+
+.. code-block:: sh
+
+ $ dot -Tpng output.dot > output.png
+
+An example result is:
+
+.. figure:: depgraph-output.png
+ :alt: Example PNG output from packaging.depgraph and dot
+
+If you want to include egg distributions as well, then the code requires only
+one change, namely the line::
+
+ dists = list(packaging.database.get_distributions())
+
+has to be replaced with::
+
+ dists = list(packaging.database.get_distributions(use_egg_info=True))
+
+On many platforms, a richer graph is obtained because at the moment most
+distributions are provided in the egg rather than the new standard
+``.dist-info`` format.
+
+.. XXX missing image
+
+ An example of a more involved graph for illustrative reasons can be seen
+ here:
+
+ .. image:: depgraph_big.png
+
+
+List all dependent distributions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+We will list all distributions that are dependent on some given distibution.
+This time, egg distributions will be considered as well::
+
+ import sys
+ from packaging.database import get_distribution, get_distributions
+ from packaging.depgraph import dependent_dists
+
+ dists = list(get_distributions(use_egg_info=True))
+ dist = get_distribution('bacon', use_egg_info=True)
+ if dist is None:
+ sys.exit('No such distribution in the system')
+
+ deps = dependent_dists(dists, dist)
+ deps = ', '.join(repr(x.name) for x in deps)
+ print('Distributions depending on %r: %s' % (dist.name, deps))
+
+And this is example output:
+
+.. with the dependency relationships as in the previous section
+ (depgraph_big)
+
+.. code-block:: none
+
+ Distributions depending on 'bacon': 'towel-stuff', 'choxie', 'grammar'
diff --git a/Doc/library/packaging.dist.rst b/Doc/library/packaging.dist.rst
new file mode 100644
index 0000000..25cb62b
--- /dev/null
+++ b/Doc/library/packaging.dist.rst
@@ -0,0 +1,108 @@
+:mod:`packaging.dist` --- The Distribution class
+================================================
+
+.. module:: packaging.dist
+ :synopsis: Core Distribution class.
+
+
+This module provides the :class:`Distribution` class, which represents the
+module distribution being built/packaged/distributed/installed.
+
+.. class:: Distribution(arguments)
+
+ A :class:`Distribution` describes how to build, package, distribute and
+ install a Python project.
+
+ The arguments accepted by the constructor are laid out in the following
+ table. Some of them will end up in a metadata object, the rest will become
+ data attributes of the :class:`Distribution` instance.
+
+ .. TODO improve constructor to take a Metadata object + named params?
+ (i.e. Distribution(metadata, cmdclass, py_modules, etc)
+ .. TODO also remove obsolete(?) script_name, etc. parameters? see what
+ py2exe and other tools need
+
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | argument name | value | type |
+ +====================+================================+=============================================================+
+ | *name* | The name of the project | a string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *version* | The version number of the | a string |
+ | | release; see | |
+ | | :mod:`packaging.version` | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *summary* | A single line describing the | a string |
+ | | project | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *description* | Longer description of the | a string |
+ | | project | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *author* | The name of the project author | a string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *author_email* | The email address of the | a string |
+ | | project author | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *maintainer* | The name of the current | a string |
+ | | maintainer, if different from | |
+ | | the author | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *maintainer_email* | The email address of the | a string |
+ | | current maintainer, if | |
+ | | different from the author | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *home_page* | A URL for the proejct | a string |
+ | | (homepage) | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *download_url* | A URL to download the project | a string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *packages* | A list of Python packages that | a list of strings |
+ | | packaging will manipulate | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *py_modules* | A list of Python modules that | a list of strings |
+ | | packaging will manipulate | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *scripts* | A list of standalone scripts | a list of strings |
+ | | to be built and installed | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *ext_modules* | A list of Python extensions to | a list of instances of |
+ | | be built | :class:`packaging.compiler.extension.Extension` |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *classifiers* | A list of categories for the | a list of strings; valid classifiers are listed on `PyPi |
+ | | distribution | <http://pypi.python.org/pypi?:action=list_classifiers>`_. |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *distclass* | the :class:`Distribution` | a subclass of |
+ | | class to use | :class:`packaging.dist.Distribution` |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *script_name* | The name of the setup.py | a string |
+ | | script - defaults to | |
+ | | ``sys.argv[0]`` | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *script_args* | Arguments to supply to the | a list of strings |
+ | | setup script | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *options* | default options for the setup | a string |
+ | | script | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *license* | The license for the | a string |
+ | | distribution; should be used | |
+ | | when there is no suitable | |
+ | | License classifier, or to | |
+ | | refine a classifier | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *keywords* | Descriptive keywords; used by | a list of strings or a comma-separated string |
+ | | catalogs such as PyPI | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *platforms* | Platforms compatible with this | a list of strings or a comma-separated string |
+ | | distribution; should be used | |
+ | | when there is no suitable | |
+ | | Platform classifier | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *cmdclass* | A mapping of command names to | a dictionary |
+ | | :class:`Command` subclasses | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *data_files* | A list of data files to | a list |
+ | | install | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *package_dir* | A mapping of Python packages | a dictionary |
+ | | to directory names | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
diff --git a/Doc/library/packaging.fancy_getopt.rst b/Doc/library/packaging.fancy_getopt.rst
new file mode 100644
index 0000000..2c69341
--- /dev/null
+++ b/Doc/library/packaging.fancy_getopt.rst
@@ -0,0 +1,75 @@
+:mod:`packaging.fancy_getopt` --- Wrapper around the getopt module
+==================================================================
+
+.. module:: packaging.fancy_getopt
+ :synopsis: Additional getopt functionality.
+
+
+.. warning::
+ This module is deprecated and will be replaced with :mod:`optparse`.
+
+This module provides a wrapper around the standard :mod:`getopt` module that
+provides the following additional features:
+
+* short and long options are tied together
+
+* options have help strings, so :func:`fancy_getopt` could potentially create a
+ complete usage summary
+
+* options set attributes of a passed-in object
+
+* boolean options can have "negative aliases" --- e.g. if :option:`--quiet` is
+ the "negative alias" of :option:`--verbose`, then :option:`--quiet` on the
+ command line sets *verbose* to false.
+
+.. function:: fancy_getopt(options, negative_opt, object, args)
+
+ Wrapper function. *options* is a list of ``(long_option, short_option,
+ help_string)`` 3-tuples as described in the constructor for
+ :class:`FancyGetopt`. *negative_opt* should be a dictionary mapping option names
+ to option names, both the key and value should be in the *options* list.
+ *object* is an object which will be used to store values (see the :meth:`getopt`
+ method of the :class:`FancyGetopt` class). *args* is the argument list. Will use
+ ``sys.argv[1:]`` if you pass ``None`` as *args*.
+
+
+.. class:: FancyGetopt([option_table=None])
+
+ The option_table is a list of 3-tuples: ``(long_option, short_option,
+ help_string)``
+
+ If an option takes an argument, its *long_option* should have ``'='`` appended;
+ *short_option* should just be a single character, no ``':'`` in any case.
+ *short_option* should be ``None`` if a *long_option* doesn't have a
+ corresponding *short_option*. All option tuples must have long options.
+
+The :class:`FancyGetopt` class provides the following methods:
+
+
+.. method:: FancyGetopt.getopt([args=None, object=None])
+
+ Parse command-line options in args. Store as attributes on *object*.
+
+ If *args* is ``None`` or not supplied, uses ``sys.argv[1:]``. If *object* is
+ ``None`` or not supplied, creates a new :class:`OptionDummy` instance, stores
+ option values there, and returns a tuple ``(args, object)``. If *object* is
+ supplied, it is modified in place and :func:`getopt` just returns *args*; in
+ both cases, the returned *args* is a modified copy of the passed-in *args* list,
+ which is left untouched.
+
+ .. TODO and args returned are?
+
+
+.. method:: FancyGetopt.get_option_order()
+
+ Returns the list of ``(option, value)`` tuples processed by the previous run of
+ :meth:`getopt` Raises :exc:`RuntimeError` if :meth:`getopt` hasn't been called
+ yet.
+
+
+.. method:: FancyGetopt.generate_help([header=None])
+
+ Generate help text (a list of strings, one per suggested line of output) from
+ the option table for this :class:`FancyGetopt` object.
+
+ If supplied, prints the supplied *header* at the top of the help.
diff --git a/Doc/library/packaging.install.rst b/Doc/library/packaging.install.rst
new file mode 100644
index 0000000..3e00750
--- /dev/null
+++ b/Doc/library/packaging.install.rst
@@ -0,0 +1,112 @@
+:mod:`packaging.install` --- Installation tools
+===============================================
+
+.. module:: packaging.install
+ :synopsis: Download and installation building blocks
+
+
+Packaging provides a set of tools to deal with downloads and installation of
+distributions. Their role is to download the distribution from indexes, resolve
+the dependencies, and provide a safe way to install distributions. An operation
+that fails will cleanly roll back, not leave half-installed distributions on the
+system. Here's the basic process followed:
+
+#. Move all distributions that will be removed to a temporary location.
+
+#. Install all the distributions that will be installed in a temporary location.
+
+#. If the installation fails, move the saved distributions back to their
+ location and delete the installed distributions.
+
+#. Otherwise, move the installed distributions to the right location and delete
+ the temporary locations.
+
+This is a higher-level module built on :mod:`packaging.database` and
+:mod:`packaging.pypi`.
+
+
+Public functions
+----------------
+
+.. function:: get_infos(requirements, index=None, installed=None, \
+ prefer_final=True)
+
+ Return information about what's going to be installed and upgraded.
+ *requirements* is a string containing the requirements for this
+ project, for example ``'FooBar 1.1'`` or ``'BarBaz (<1.2)'``.
+
+ .. XXX are requirements comma-separated?
+
+ If you want to use another index than the main PyPI, give its URI as *index*
+ argument.
+
+ *installed* is a list of already installed distributions used to find
+ satisfied dependencies, obsoleted distributions and eventual conflicts.
+
+ By default, alpha, beta and candidate versions are not picked up. Set
+ *prefer_final* to false to accept them too.
+
+ The results are returned in a dictionary containing all the information
+ needed to perform installation of the requirements with the
+ :func:`install_from_infos` function:
+
+ >>> get_install_info("FooBar (<=1.2)")
+ {'install': [<FooBar 1.1>], 'remove': [], 'conflict': []}
+
+ .. TODO should return tuple or named tuple, not dict
+ .. TODO use "predicate" or "requirement" consistently in version and here
+ .. FIXME "info" cannot be plural in English, s/infos/info/
+
+
+.. function:: install(project)
+
+
+.. function:: install_dists(dists, path, paths=None)
+
+ Safely install all distributions provided in *dists* into *path*. *paths* is
+ a list of paths where already-installed distributions will be looked for to
+ find satisfied dependencies and conflicts (default: :data:`sys.path`).
+ Returns a list of installed dists.
+
+ .. FIXME dists are instances of what?
+
+
+.. function:: install_from_infos(install_path=None, install=[], remove=[], \
+ conflicts=[], paths=None)
+
+ Safely install and remove given distributions. This function is designed to
+ work with the return value of :func:`get_infos`: *install*, *remove* and
+ *conflicts* should be list of distributions returned by :func:`get_infos`.
+ If *install* is not empty, *install_path* must be given to specify the path
+ where the distributions should be installed. *paths* is a list of paths
+ where already-installed distributions will be looked for (default:
+ :data:`sys.path`).
+
+ This function is a very basic installer; if *conflicts* is not empty, the
+ system will be in a conflicting state after the function completes. It is a
+ building block for more sophisticated installers with conflict resolution
+ systems.
+
+ .. TODO document typical value for install_path
+ .. TODO document integration with default schemes, esp. user site-packages
+
+
+.. function:: install_local_project(path)
+
+ Install a distribution from a source directory, which must contain either a
+ Packaging-compliant :file:`setup.cfg` file or a legacy Distutils
+ :file:`setup.py` script (in which case Distutils will be used under the hood
+ to perform the installation).
+
+
+.. function:: remove(project_name, paths=None, auto_confirm=True)
+
+ Remove one distribution from the system.
+
+ .. FIXME this is the only function using "project" instead of dist/release
+
+..
+ Example usage
+ --------------
+
+ Get the scheme of what's gonna be installed if we install "foobar":
diff --git a/Doc/library/packaging.metadata.rst b/Doc/library/packaging.metadata.rst
new file mode 100644
index 0000000..332d69d
--- /dev/null
+++ b/Doc/library/packaging.metadata.rst
@@ -0,0 +1,122 @@
+:mod:`packaging.metadata` --- Metadata handling
+===============================================
+
+.. module:: packaging.metadata
+ :synopsis: Class holding the metadata of a release.
+
+
+.. TODO use sphinx-autogen to generate basic doc from the docstrings
+
+.. class:: Metadata
+
+ This class can read and write metadata files complying with any of the
+ defined versions: 1.0 (:PEP:`241`), 1.1 (:PEP:`314`) and 1.2 (:PEP:`345`). It
+ implements methods to parse Metadata files and write them, and a mapping
+ interface to its contents.
+
+ The :PEP:`345` implementation supports the micro-language for the environment
+ markers, and displays warnings when versions that are supposed to be
+ :PEP:`386`-compliant are violating the specification.
+
+
+Reading metadata
+----------------
+
+The :class:`Metadata` class can be instantiated
+with the path of the metadata file, and provides a dict-like interface to the
+values::
+
+ >>> from packaging.metadata import Metadata
+ >>> metadata = Metadata('PKG-INFO')
+ >>> metadata.keys()[:5]
+ ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform')
+ >>> metadata['Name']
+ 'CLVault'
+ >>> metadata['Version']
+ '0.5'
+ >>> metadata['Requires-Dist']
+ ["pywin32; sys.platform == 'win32'", "Sphinx"]
+
+
+The fields that support environment markers can be automatically ignored if
+the object is instantiated using the ``platform_dependent`` option.
+:class:`Metadata` will interpret in this case
+the markers and will automatically remove the fields that are not compliant
+with the running environment. Here's an example under Mac OS X. The win32
+dependency we saw earlier is ignored::
+
+ >>> from packaging.metadata import Metadata
+ >>> metadata = Metadata('PKG-INFO', platform_dependent=True)
+ >>> metadata['Requires-Dist']
+ ['Sphinx']
+
+
+If you want to provide your own execution context, let's say to test the
+metadata under a particular environment that is not the current environment,
+you can provide your own values in the ``execution_context`` option, which
+is the dict that may contain one or more keys of the context the micro-language
+expects.
+
+Here's an example, simulating a win32 environment::
+
+ >>> from packaging.metadata import Metadata
+ >>> context = {'sys.platform': 'win32'}
+ >>> metadata = Metadata('PKG-INFO', platform_dependent=True,
+ ... execution_context=context)
+ ...
+ >>> metadata['Requires-Dist'] = ["pywin32; sys.platform == 'win32'",
+ ... "Sphinx"]
+ ...
+ >>> metadata['Requires-Dist']
+ ['pywin32', 'Sphinx']
+
+
+Writing metadata
+----------------
+
+Writing metadata can be done using the ``write`` method::
+
+ >>> metadata.write('/to/my/PKG-INFO')
+
+The class will pick the best version for the metadata, depending on the values
+provided. If all the values provided exist in all versions, the class will
+use :attr:`PKG_INFO_PREFERRED_VERSION`. It is set by default to 1.0, the most
+widespread version.
+
+
+Conflict checking and best version
+----------------------------------
+
+Some fields in :PEP:`345` have to comply with the version number specification
+defined in :PEP:`386`. When they don't comply, a warning is emitted::
+
+ >>> from packaging.metadata import Metadata
+ >>> metadata = Metadata()
+ >>> metadata['Requires-Dist'] = ['Funky (Groovie)']
+ "Funky (Groovie)" is not a valid predicate
+ >>> metadata['Requires-Dist'] = ['Funky (1.2)']
+
+See also :mod:`packaging.version`.
+
+
+.. TODO talk about check()
+
+
+:mod:`packaging.markers` --- Environment markers
+================================================
+
+.. module:: packaging.markers
+ :synopsis: Micro-language for environment markers
+
+
+This is an implementation of environment markers `as defined in PEP 345
+<http://www.python.org/dev/peps/pep-0345/#environment-markers>`_. It is used
+for some metadata fields.
+
+.. function:: interpret(marker, execution_context=None)
+
+ Interpret a marker and return a boolean result depending on the environment.
+ Example:
+
+ >>> interpret("python_version > '1.0'")
+ True
diff --git a/Doc/library/packaging.pypi.dist.rst b/Doc/library/packaging.pypi.dist.rst
new file mode 100644
index 0000000..aaaaab7
--- /dev/null
+++ b/Doc/library/packaging.pypi.dist.rst
@@ -0,0 +1,114 @@
+:mod:`packaging.pypi.dist` --- Classes representing query results
+=================================================================
+
+.. module:: packaging.pypi.dist
+ :synopsis: Classes representing the results of queries to indexes.
+
+
+Information coming from the indexes is held in instances of the classes defined
+in this module.
+
+Keep in mind that each project (eg. FooBar) can have several releases
+(eg. 1.1, 1.2, 1.3), and each of these releases can be provided in multiple
+distributions (eg. a source distribution, a binary one, etc).
+
+
+ReleaseInfo
+-----------
+
+Each release has a project name, version, metadata, and related distributions.
+
+This information is stored in :class:`ReleaseInfo`
+objects.
+
+.. class:: ReleaseInfo
+
+
+DistInfo
+---------
+
+:class:`DistInfo` is a simple class that contains
+information related to distributions; mainly the URLs where distributions
+can be found.
+
+.. class:: DistInfo
+
+
+ReleasesList
+------------
+
+The :mod:`~packaging.pypi.dist` module provides a class which works
+with lists of :class:`ReleaseInfo` classes;
+used to filter and order results.
+
+.. class:: ReleasesList
+
+
+Example usage
+-------------
+
+Build a list of releases and order them
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Assuming we have a list of releases::
+
+ >>> from packaging.pypi.dist import ReleasesList, ReleaseInfo
+ >>> fb10 = ReleaseInfo("FooBar", "1.0")
+ >>> fb11 = ReleaseInfo("FooBar", "1.1")
+ >>> fb11a = ReleaseInfo("FooBar", "1.1a1")
+ >>> ReleasesList("FooBar", [fb11, fb11a, fb10])
+ >>> releases.sort_releases()
+ >>> releases.get_versions()
+ ['1.1', '1.1a1', '1.0']
+ >>> releases.add_release("1.2a1")
+ >>> releases.get_versions()
+ ['1.1', '1.1a1', '1.0', '1.2a1']
+ >>> releases.sort_releases()
+ ['1.2a1', '1.1', '1.1a1', '1.0']
+ >>> releases.sort_releases(prefer_final=True)
+ >>> releases.get_versions()
+ ['1.1', '1.0', '1.2a1', '1.1a1']
+
+
+Add distribution related information to releases
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It's easy to add distribution information to releases::
+
+ >>> from packaging.pypi.dist import ReleasesList, ReleaseInfo
+ >>> r = ReleaseInfo("FooBar", "1.0")
+ >>> r.add_distribution("sdist", url="http://example.org/foobar-1.0.tar.gz")
+ >>> r.dists
+ {'sdist': FooBar 1.0 sdist}
+ >>> r['sdist'].url
+ {'url': 'http://example.org/foobar-1.0.tar.gz', 'hashname': None, 'hashval':
+ None, 'is_external': True}
+
+
+Getting attributes from the dist objects
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+To abstract querying information returned from the indexes, attributes and
+release information can be retrieved directly from dist objects.
+
+For instance, if you have a release instance that does not contain the metadata
+attribute, it can be fetched by using the "fetch_metadata" method::
+
+ >>> r = Release("FooBar", "1.1")
+ >>> print r.metadata
+ None # metadata field is actually set to "None"
+ >>> r.fetch_metadata()
+ <Metadata for FooBar 1.1>
+
+.. XXX add proper roles to these constructs
+
+
+It's possible to retrieve a project's releases (`fetch_releases`),
+metadata (`fetch_metadata`) and distributions (`fetch_distributions`) using
+a similar work flow.
+
+.. XXX what is possible?
+
+Internally, this is possible because while retrieving information about
+projects, releases or distributions, a reference to the client used is
+stored which can be accessed using the objects `_index` attribute.
diff --git a/Doc/library/packaging.pypi.rst b/Doc/library/packaging.pypi.rst
new file mode 100644
index 0000000..14602ce
--- /dev/null
+++ b/Doc/library/packaging.pypi.rst
@@ -0,0 +1,74 @@
+:mod:`packaging.pypi` --- Interface to projects indexes
+=======================================================
+
+.. module:: packaging.pypi
+ :synopsis: Low-level and high-level APIs to query projects indexes.
+
+
+Packaging queries PyPI to get information about projects or download them. The
+low-level facilities used internally are also part of the public API designed to
+be used by other tools.
+
+The :mod:`packaging.pypi` package provides those facilities, which can be
+used to access information about Python projects registered at indexes, the
+main one being PyPI, located ad http://pypi.python.org/.
+
+There is two ways to retrieve data from these indexes: a screen-scraping
+interface called the "simple API", and XML-RPC. The first one uses HTML pages
+located under http://pypi.python.org/simple/, the second one makes XML-RPC
+requests to http://pypi.python.org/pypi/. All functions and classes also work
+with other indexes such as mirrors, which typically implement only the simple
+interface.
+
+Packaging provides a class that wraps both APIs to provide full query and
+download functionality: :class:`packaging.pypi.client.ClientWrapper`. If you
+want more control, you can use the underlying classes
+:class:`packaging.pypi.simple.Crawler` and :class:`packaging.pypi.xmlrpc.Client`
+to connect to one specific interface.
+
+
+:mod:`packaging.pypi.client` --- High-level query API
+=====================================================
+
+.. module:: packaging.pypi.client
+ :synopsis: Wrapper around :mod;`packaging.pypi.xmlrpc` and
+ :mod:`packaging.pypi.simple` to query indexes.
+
+
+This module provides a high-level API to query indexes and search
+for releases and distributions. The aim of this module is to choose the best
+way to query the API automatically, either using XML-RPC or the simple index,
+with a preference toward the latter.
+
+.. class:: ClientWrapper
+
+ Instances of this class will use the simple interface or XML-RPC requests to
+ query indexes and return :class:`packaging.pypi.dist.ReleaseInfo` and
+ :class:`packaging.pypi.dist.ReleasesList` objects.
+
+ .. method:: find_projects
+
+ .. method:: get_release
+
+ .. method:: get_releases
+
+
+:mod:`packaging.pypi.base` --- Base class for index crawlers
+============================================================
+
+.. module:: packaging.pypi.base
+ :synopsis: Base class used to implement crawlers.
+
+
+.. class:: BaseClient(prefer_final, prefer_source)
+
+ Base class containing common methods for the index crawlers or clients. One
+ method is currently defined:
+
+ .. method:: download_distribution(requirements, temp_path=None, \
+ prefer_source=None, prefer_final=None)
+
+ Download a distribution from the last release according to the
+ requirements. If *temp_path* is provided, download to this path,
+ otherwise, create a temporary directory for the download. If a release is
+ found, the full path to the downloaded file is returned.
diff --git a/Doc/library/packaging.pypi.simple.rst b/Doc/library/packaging.pypi.simple.rst
new file mode 100644
index 0000000..f579b18
--- /dev/null
+++ b/Doc/library/packaging.pypi.simple.rst
@@ -0,0 +1,218 @@
+:mod:`packaging.pypi.simple` --- Crawler using the PyPI "simple" interface
+==========================================================================
+
+.. module:: packaging.pypi.simple
+ :synopsis: Crawler using the screen-scraping "simple" interface to fetch info
+ and distributions.
+
+
+The class provided by :mod:`packaging.pypi.simple` can access project indexes
+and provide useful information about distributions. PyPI, other indexes and
+local indexes are supported.
+
+You should use this module to search distributions by name and versions, process
+index external pages and download distributions. It is not suited for things
+that will end up in too long index processing (like "finding all distributions
+with a specific version, no matter the name"); use :mod:`packaging.pypi.xmlrpc`
+for that.
+
+
+API
+---
+
+.. class:: Crawler(index_url=DEFAULT_SIMPLE_INDEX_URL, \
+ prefer_final=False, prefer_source=True, \
+ hosts=('*',), follow_externals=False, \
+ mirrors_url=None, mirrors=None, timeout=15, \
+ mirrors_max_tries=0)
+
+ *index_url* is the address of the index to use for requests.
+
+ The first two parameters control the query results. *prefer_final*
+ indicates whether a final version (not alpha, beta or candidate) is to be
+ preferred over a newer but non-final version (for example, whether to pick
+ up 1.0 over 2.0a3). It is used only for queries that don't give a version
+ argument. Likewise, *prefer_source* tells whether to prefer a source
+ distribution over a binary one, if no distribution argument was prodived.
+
+ Other parameters are related to external links (that is links that go
+ outside the simple index): *hosts* is a list of hosts allowed to be
+ processed if *follow_externals* is true (default behavior is to follow all
+ hosts), *follow_externals* enables or disables following external links
+ (default is false, meaning disabled).
+
+ The remaining parameters are related to the mirroring infrastructure
+ defined in :PEP:`381`. *mirrors_url* gives a URL to look on for DNS
+ records giving mirror adresses; *mirrors* is a list of mirror URLs (see
+ the PEP). If both *mirrors* and *mirrors_url* are given, *mirrors_url*
+ will only be used if *mirrors* is set to ``None``. *timeout* is the time
+ (in seconds) to wait before considering a URL has timed out;
+ *mirrors_max_tries"* is the number of times to try requesting informations
+ on mirrors before switching.
+
+ The following methods are defined:
+
+ .. method:: get_distributions(project_name, version)
+
+ Return the distributions found in the index for the given release.
+
+ .. method:: get_metadata(project_name, version)
+
+ Return the metadata found on the index for this project name and
+ version. Currently downloads and unpacks a distribution to read the
+ PKG-INFO file.
+
+ .. method:: get_release(requirements, prefer_final=None)
+
+ Return one release that fulfills the given requirements.
+
+ .. method:: get_releases(requirements, prefer_final=None, force_update=False)
+
+ Search for releases and return a
+ :class:`~packaging.pypi.dist.ReleasesList` object containing the
+ results.
+
+ .. method:: search_projects(name=None)
+
+ Search the index for projects containing the given name and return a
+ list of matching names.
+
+ See also the base class :class:`packaging.pypi.base.BaseClient` for inherited
+ methods.
+
+
+.. data:: DEFAULT_SIMPLE_INDEX_URL
+
+ The address used by default by the crawler class. It is currently
+ ``'http://a.pypi.python.org/simple/'``, the main PyPI installation.
+
+
+
+
+Usage Examples
+---------------
+
+To help you understand how using the `Crawler` class, here are some basic
+usages.
+
+Request the simple index to get a specific distribution
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Supposing you want to scan an index to get a list of distributions for
+the "foobar" project. You can use the "get_releases" method for that.
+The get_releases method will browse the project page, and return
+:class:`ReleaseInfo` objects for each found link that rely on downloads. ::
+
+ >>> from packaging.pypi.simple import Crawler
+ >>> crawler = Crawler()
+ >>> crawler.get_releases("FooBar")
+ [<ReleaseInfo "Foobar 1.1">, <ReleaseInfo "Foobar 1.2">]
+
+
+Note that you also can request the client about specific versions, using version
+specifiers (described in `PEP 345
+<http://www.python.org/dev/peps/pep-0345/#version-specifiers>`_)::
+
+ >>> client.get_releases("FooBar < 1.2")
+ [<ReleaseInfo "FooBar 1.1">, ]
+
+
+`get_releases` returns a list of :class:`ReleaseInfo`, but you also can get the
+best distribution that fullfil your requirements, using "get_release"::
+
+ >>> client.get_release("FooBar < 1.2")
+ <ReleaseInfo "FooBar 1.1">
+
+
+Download distributions
+^^^^^^^^^^^^^^^^^^^^^^
+
+As it can get the urls of distributions provided by PyPI, the `Crawler`
+client also can download the distributions and put it for you in a temporary
+destination::
+
+ >>> client.download("foobar")
+ /tmp/temp_dir/foobar-1.2.tar.gz
+
+
+You also can specify the directory you want to download to::
+
+ >>> client.download("foobar", "/path/to/my/dir")
+ /path/to/my/dir/foobar-1.2.tar.gz
+
+
+While downloading, the md5 of the archive will be checked, if not matches, it
+will try another time, then if fails again, raise `MD5HashDoesNotMatchError`.
+
+Internally, that's not the Crawler which download the distributions, but the
+`DistributionInfo` class. Please refer to this documentation for more details.
+
+
+Following PyPI external links
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The default behavior for packaging is to *not* follow the links provided
+by HTML pages in the "simple index", to find distributions related
+downloads.
+
+It's possible to tell the PyPIClient to follow external links by setting the
+`follow_externals` attribute, on instantiation or after::
+
+ >>> client = Crawler(follow_externals=True)
+
+or ::
+
+ >>> client = Crawler()
+ >>> client.follow_externals = True
+
+
+Working with external indexes, and mirrors
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The default `Crawler` behavior is to rely on the Python Package index stored
+on PyPI (http://pypi.python.org/simple).
+
+As you can need to work with a local index, or private indexes, you can specify
+it using the index_url parameter::
+
+ >>> client = Crawler(index_url="file://filesystem/path/")
+
+or ::
+
+ >>> client = Crawler(index_url="http://some.specific.url/")
+
+
+You also can specify mirrors to fallback on in case the first index_url you
+provided doesnt respond, or not correctly. The default behavior for
+`Crawler` is to use the list provided by Python.org DNS records, as
+described in the :PEP:`381` about mirroring infrastructure.
+
+If you don't want to rely on these, you could specify the list of mirrors you
+want to try by specifying the `mirrors` attribute. It's a simple iterable::
+
+ >>> mirrors = ["http://first.mirror","http://second.mirror"]
+ >>> client = Crawler(mirrors=mirrors)
+
+
+Searching in the simple index
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It's possible to search for projects with specific names in the package index.
+Assuming you want to find all projects containing the "distutils" keyword::
+
+ >>> c.search_projects("distutils")
+ [<Project "collective.recipe.distutils">, <Project "Distutils">, <Project
+ "Packaging">, <Project "distutilscross">, <Project "lpdistutils">, <Project
+ "taras.recipe.distutils">, <Project "zerokspot.recipe.distutils">]
+
+
+You can also search the projects starting with a specific text, or ending with
+that text, using a wildcard::
+
+ >>> c.search_projects("distutils*")
+ [<Project "Distutils">, <Project "Packaging">, <Project "distutilscross">]
+
+ >>> c.search_projects("*distutils")
+ [<Project "collective.recipe.distutils">, <Project "Distutils">, <Project
+ "lpdistutils">, <Project "taras.recipe.distutils">, <Project
+ "zerokspot.recipe.distutils">]
diff --git a/Doc/library/packaging.pypi.xmlrpc.rst b/Doc/library/packaging.pypi.xmlrpc.rst
new file mode 100644
index 0000000..5242e4c
--- /dev/null
+++ b/Doc/library/packaging.pypi.xmlrpc.rst
@@ -0,0 +1,143 @@
+:mod:`packaging.pypi.xmlrpc` --- Crawler using the PyPI XML-RPC interface
+=========================================================================
+
+.. module:: packaging.pypi.xmlrpc
+ :synopsis: Client using XML-RPC requests to fetch info and distributions.
+
+
+Indexes can be queried using XML-RPC calls, and Packaging provides a simple
+way to interface with XML-RPC.
+
+You should **use** XML-RPC when:
+
+* Searching the index for projects **on other fields than project
+ names**. For instance, you can search for projects based on the
+ author_email field.
+* Searching all the versions that have existed for a project.
+* you want to retrieve METADATAs information from releases or
+ distributions.
+
+
+You should **avoid using** XML-RPC method calls when:
+
+* Retrieving the last version of a project
+* Getting the projects with a specific name and version.
+* The simple index can match your needs
+
+
+When dealing with indexes, keep in mind that the index queries will always
+return you :class:`packaging.pypi.dist.ReleaseInfo` and
+:class:`packaging.pypi.dist.ReleasesList` objects.
+
+Some methods here share common APIs with the one you can find on
+:class:`packaging.pypi.simple`, internally, :class:`packaging.pypi.client`
+is inherited by :class:`Client`
+
+
+API
+---
+
+.. class:: Client
+
+
+Usage examples
+--------------
+
+Use case described here are use case that are not common to the other clients.
+If you want to see all the methods, please refer to API or to usage examples
+described in :class:`packaging.pypi.client.Client`
+
+
+Finding releases
+^^^^^^^^^^^^^^^^
+
+It's a common use case to search for "things" within the index. We can
+basically search for projects by their name, which is the most used way for
+users (eg. "give me the last version of the FooBar project").
+
+This can be accomplished using the following syntax::
+
+ >>> client = xmlrpc.Client()
+ >>> client.get_release("Foobar (<= 1.3))
+ <FooBar 1.2.1>
+ >>> client.get_releases("FooBar (<= 1.3)")
+ [FooBar 1.1, FooBar 1.1.1, FooBar 1.2, FooBar 1.2.1]
+
+
+And we also can find for specific fields::
+
+ >>> client.search_projects(field=value)
+
+
+You could specify the operator to use, default is "or"::
+
+ >>> client.search_projects(field=value, operator="and")
+
+
+The specific fields you can search are:
+
+* name
+* version
+* author
+* author_email
+* maintainer
+* maintainer_email
+* home_page
+* license
+* summary
+* description
+* keywords
+* platform
+* download_url
+
+
+Getting metadata information
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+XML-RPC is a preferred way to retrieve metadata information from indexes.
+It's really simple to do so::
+
+ >>> client = xmlrpc.Client()
+ >>> client.get_metadata("FooBar", "1.1")
+ <ReleaseInfo FooBar 1.1>
+
+
+Assuming we already have a :class:`packaging.pypi.ReleaseInfo` object defined,
+it's possible to pass it to the xmlrpc client to retrieve and complete its
+metadata::
+
+ >>> foobar11 = ReleaseInfo("FooBar", "1.1")
+ >>> client = xmlrpc.Client()
+ >>> returned_release = client.get_metadata(release=foobar11)
+ >>> returned_release
+ <ReleaseInfo FooBar 1.1>
+
+
+Get all the releases of a project
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+To retrieve all the releases for a project, you can build them using
+`get_releases`::
+
+ >>> client = xmlrpc.Client()
+ >>> client.get_releases("FooBar")
+ [<ReleaseInfo FooBar 0.9>, <ReleaseInfo FooBar 1.0>, <ReleaseInfo 1.1>]
+
+
+Get information about distributions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Indexes have information about projects, releases **and** distributions.
+If you're not familiar with those, please refer to the documentation of
+:mod:`packaging.pypi.dist`.
+
+It's possible to retrieve information about distributions, e.g "what are the
+existing distributions for this release ? How to retrieve them ?"::
+
+ >>> client = xmlrpc.Client()
+ >>> release = client.get_distributions("FooBar", "1.1")
+ >>> release.dists
+ {'sdist': <FooBar 1.1 sdist>, 'bdist': <FooBar 1.1 bdist>}
+
+As you see, this does not return a list of distributions, but a release,
+because a release can be used like a list of distributions.
diff --git a/Doc/library/packaging.rst b/Doc/library/packaging.rst
new file mode 100644
index 0000000..c6bff47
--- /dev/null
+++ b/Doc/library/packaging.rst
@@ -0,0 +1,75 @@
+:mod:`packaging` --- Packaging support
+======================================
+
+.. module:: packaging
+ :synopsis: Packaging system and building blocks for other packaging systems.
+.. sectionauthor:: Fred L. Drake, Jr. <fdrake@acm.org>, distutils and packaging
+ contributors
+
+
+The :mod:`packaging` package provides support for building, packaging,
+distributing and installing additional projects into a Python installation.
+Projects may include Python modules, extension modules, packages and scripts.
+:mod:`packaging` also provides building blocks for other packaging systems
+that are not tied to the command system.
+
+This manual is the reference documentation for those standalone building
+blocks and for extending Packaging. If you're looking for the user-centric
+guides to install a project or package your own code, head to `See also`__.
+
+
+Building blocks
+---------------
+
+.. toctree::
+ :maxdepth: 2
+
+ packaging-misc
+ packaging.version
+ packaging.metadata
+ packaging.database
+ packaging.depgraph
+ packaging.pypi
+ packaging.pypi.dist
+ packaging.pypi.simple
+ packaging.pypi.xmlrpc
+ packaging.install
+
+
+The command machinery
+---------------------
+
+.. toctree::
+ :maxdepth: 2
+
+ packaging.dist
+ packaging.command
+ packaging.compiler
+ packaging.fancy_getopt
+
+
+Other utilities
+----------------
+
+.. toctree::
+ :maxdepth: 2
+
+ packaging.util
+ packaging.tests.pypi_server
+
+.. XXX missing: compat config create (dir_util) run pypi.{base,mirrors}
+
+
+.. __:
+
+.. seealso::
+
+ :ref:`packaging-index`
+ The manual for developers of Python projects who want to package and
+ distribute them. This describes how to use :mod:`packaging` to make
+ projects easily found and added to an existing Python installation.
+
+ :ref:`packaging-install-index`
+ A user-centered manual which includes information on adding projects
+ into an existing Python installation. You do not need to be a Python
+ programmer to read this manual.
diff --git a/Doc/library/packaging.tests.pypi_server.rst b/Doc/library/packaging.tests.pypi_server.rst
new file mode 100644
index 0000000..f3b7720
--- /dev/null
+++ b/Doc/library/packaging.tests.pypi_server.rst
@@ -0,0 +1,105 @@
+:mod:`packaging.tests.pypi_server` --- PyPI mock server
+=======================================================
+
+.. module:: packaging.tests.pypi_server
+ :synopsis: Mock server used to test PyPI-related modules and commands.
+
+
+When you are testing code that works with Packaging, you might find these tools
+useful.
+
+
+The mock server
+---------------
+
+.. class:: PyPIServer
+
+ PyPIServer is a class that implements an HTTP server running in a separate
+ thread. All it does is record the requests for further inspection. The recorded
+ data is available under ``requests`` attribute. The default
+ HTTP response can be overridden with the ``default_response_status``,
+ ``default_response_headers`` and ``default_response_data`` attributes.
+
+ By default, when accessing the server with urls beginning with `/simple/`,
+ the server also record your requests, but will look for files under
+ the `/tests/pypiserver/simple/` path.
+
+ You can tell the sever to serve static files for other paths. This could be
+ accomplished by using the `static_uri_paths` parameter, as below::
+
+ server = PyPIServer(static_uri_paths=["first_path", "second_path"])
+
+
+ You need to create the content that will be served under the
+ `/tests/pypiserver/default` path. If you want to serve content from another
+ place, you also can specify another filesystem path (which needs to be under
+ `tests/pypiserver/`. This will replace the default behavior of the server, and
+ it will not serve content from the `default` dir ::
+
+ server = PyPIServer(static_filesystem_paths=["path/to/your/dir"])
+
+
+ If you just need to add some paths to the existing ones, you can do as shown,
+ keeping in mind that the server will always try to load paths in reverse order
+ (e.g here, try "another/super/path" then the default one) ::
+
+ server = PyPIServer(test_static_path="another/super/path")
+ server = PyPIServer("another/super/path")
+ # or
+ server.static_filesystem_paths.append("another/super/path")
+
+
+ As a result of what, in your tests, while you need to use the PyPIServer, in
+ order to isolates the test cases, the best practice is to place the common files
+ in the `default` folder, and to create a directory for each specific test case::
+
+ server = PyPIServer(static_filesystem_paths = ["default", "test_pypi_server"],
+ static_uri_paths=["simple", "external"])
+
+
+Base class and decorator for tests
+----------------------------------
+
+.. class:: PyPIServerTestCase
+
+ ``PyPIServerTestCase`` is a test case class with setUp and tearDown methods that
+ take care of a single PyPIServer instance attached as a ``pypi`` attribute on
+ the test class. Use it as one of the base classes in your test case::
+
+
+ class UploadTestCase(PyPIServerTestCase):
+
+ def test_something(self):
+ cmd = self.prepare_command()
+ cmd.ensure_finalized()
+ cmd.repository = self.pypi.full_address
+ cmd.run()
+
+ environ, request_data = self.pypi.requests[-1]
+ self.assertEqual(request_data, EXPECTED_REQUEST_DATA)
+
+
+.. decorator:: use_pypi_server
+
+ You also can use a decorator for your tests, if you do not need the same server
+ instance along all you test case. So, you can specify, for each test method,
+ some initialisation parameters for the server.
+
+ For this, you need to add a `server` parameter to your method, like this::
+
+ class SampleTestCase(TestCase):
+
+ @use_pypi_server()
+ def test_something(self, server):
+ ...
+
+
+ The decorator will instantiate the server for you, and run and stop it just
+ before and after your method call. You also can pass the server initializer,
+ just like this::
+
+ class SampleTestCase(TestCase):
+
+ @use_pypi_server("test_case_name")
+ def test_something(self, server):
+ ...
diff --git a/Doc/library/packaging.util.rst b/Doc/library/packaging.util.rst
new file mode 100644
index 0000000..e628c32
--- /dev/null
+++ b/Doc/library/packaging.util.rst
@@ -0,0 +1,155 @@
+:mod:`packaging.util` --- Miscellaneous utility functions
+=========================================================
+
+.. module:: packaging.util
+ :synopsis: Miscellaneous utility functions.
+
+
+This module contains various helpers for the other modules.
+
+.. XXX a number of functions are missing, but the module may be split first
+ (it's ginormous right now, some things could go to compat for example)
+
+.. function:: get_platform()
+
+ Return a string that identifies the current platform. This is used mainly to
+ distinguish platform-specific build directories and platform-specific built
+ distributions. Typically includes the OS name and version and the
+ architecture (as supplied by 'os.uname()'), although the exact information
+ included depends on the OS; e.g. for IRIX the architecture isn't particularly
+ important (IRIX only runs on SGI hardware), but for Linux the kernel version
+ isn't particularly important.
+
+ Examples of returned values:
+
+ * ``linux-i586``
+ * ``linux-alpha``
+ * ``solaris-2.6-sun4u``
+ * ``irix-5.3``
+ * ``irix64-6.2``
+
+ For non-POSIX platforms, currently just returns ``sys.platform``.
+
+ For Mac OS X systems the OS version reflects the minimal version on which
+ binaries will run (that is, the value of ``MACOSX_DEPLOYMENT_TARGET``
+ during the build of Python), not the OS version of the current system.
+
+ For universal binary builds on Mac OS X the architecture value reflects
+ the univeral binary status instead of the architecture of the current
+ processor. For 32-bit universal binaries the architecture is ``fat``,
+ for 64-bit universal binaries the architecture is ``fat64``, and
+ for 4-way universal binaries the architecture is ``universal``. Starting
+ from Python 2.7 and Python 3.2 the architecture ``fat3`` is used for
+ a 3-way universal build (ppc, i386, x86_64) and ``intel`` is used for
+ a univeral build with the i386 and x86_64 architectures
+
+ Examples of returned values on Mac OS X:
+
+ * ``macosx-10.3-ppc``
+
+ * ``macosx-10.3-fat``
+
+ * ``macosx-10.5-universal``
+
+ * ``macosx-10.6-intel``
+
+ .. XXX reinvention of platform module?
+
+
+.. function:: convert_path(pathname)
+
+ Return 'pathname' as a name that will work on the native filesystem, i.e.
+ split it on '/' and put it back together again using the current directory
+ separator. Needed because filenames in the setup script are always supplied
+ in Unix style, and have to be converted to the local convention before we
+ can actually use them in the filesystem. Raises :exc:`ValueError` on
+ non-Unix-ish systems if *pathname* either starts or ends with a slash.
+
+
+.. function:: change_root(new_root, pathname)
+
+ Return *pathname* with *new_root* prepended. If *pathname* is relative, this
+ is equivalent to ``os.path.join(new_root,pathname)`` Otherwise, it requires
+ making *pathname* relative and then joining the two, which is tricky on
+ DOS/Windows.
+
+
+.. function:: check_environ()
+
+ Ensure that 'os.environ' has all the environment variables we guarantee that
+ users can use in config files, command-line options, etc. Currently this
+ includes:
+
+ * :envvar:`HOME` - user's home directory (Unix only)
+ * :envvar:`PLAT` - description of the current platform, including hardware
+ and OS (see :func:`get_platform`)
+
+
+.. function:: find_executable(executable, path=None)
+
+ Search the path for a given executable name.
+
+
+.. function:: execute(func, args, msg=None, dry_run=False)
+
+ Perform some action that affects the outside world (for instance, writing to
+ the filesystem). Such actions are special because they are disabled by the
+ *dry_run* flag. This method takes care of all that bureaucracy for you;
+ all you have to do is supply the function to call and an argument tuple for
+ it (to embody the "external action" being performed), and an optional message
+ to print.
+
+
+.. function:: newer(source, target)
+
+ Return true if *source* exists and is more recently modified than *target*,
+ or if *source* exists and *target* doesn't. Return false if both exist and
+ *target* is the same age or newer than *source*. Raise
+ :exc:`PackagingFileError` if *source* does not exist.
+
+
+.. function:: strtobool(val)
+
+ Convert a string representation of truth to true (1) or false (0).
+
+ True values are ``y``, ``yes``, ``t``, ``true``, ``on`` and ``1``; false
+ values are ``n``, ``no``, ``f``, ``false``, ``off`` and ``0``. Raises
+ :exc:`ValueError` if *val* is anything else.
+
+
+.. function:: byte_compile(py_files, optimize=0, force=0, prefix=None, \
+ base_dir=None, dry_run=0, direct=None)
+
+ Byte-compile a collection of Python source files to either :file:`.pyc` or
+ :file:`.pyo` files in a :file:`__pycache__` subdirectory (see :pep:`3147`),
+ or to the same directory when using the distutils2 backport on Python
+ versions older than 3.2.
+
+ *py_files* is a list of files to compile; any files that don't end in
+ :file:`.py` are silently skipped. *optimize* must be one of the following:
+
+ * ``0`` - don't optimize (generate :file:`.pyc`)
+ * ``1`` - normal optimization (like ``python -O``)
+ * ``2`` - extra optimization (like ``python -OO``)
+
+ This function is independent from the running Python's :option:`-O` or
+ :option:`-B` options; it is fully controlled by the parameters passed in.
+
+ If *force* is true, all files are recompiled regardless of timestamps.
+
+ The source filename encoded in each :term:`bytecode` file defaults to the filenames
+ listed in *py_files*; you can modify these with *prefix* and *basedir*.
+ *prefix* is a string that will be stripped off of each source filename, and
+ *base_dir* is a directory name that will be prepended (after *prefix* is
+ stripped). You can supply either or both (or neither) of *prefix* and
+ *base_dir*, as you wish.
+
+ If *dry_run* is true, doesn't actually do anything that would affect the
+ filesystem.
+
+ Byte-compilation is either done directly in this interpreter process with the
+ standard :mod:`py_compile` module, or indirectly by writing a temporary
+ script and executing it. Normally, you should let :func:`byte_compile`
+ figure out to use direct compilation or not (see the source for details).
+ The *direct* flag is used by the script generated in indirect mode; unless
+ you know what you're doing, leave it set to ``None``.
diff --git a/Doc/library/packaging.version.rst b/Doc/library/packaging.version.rst
new file mode 100644
index 0000000..f36cdab
--- /dev/null
+++ b/Doc/library/packaging.version.rst
@@ -0,0 +1,104 @@
+:mod:`packaging.version` --- Version number classes
+===================================================
+
+.. module:: packaging.version
+ :synopsis: Classes that represent project version numbers.
+
+
+This module contains classes and functions useful to deal with version numbers.
+It's an implementation of version specifiers `as defined in PEP 345
+<http://www.python.org/dev/peps/pep-0345/#version-specifiers>`_.
+
+
+Version numbers
+---------------
+
+.. class:: NormalizedVersion(self, s, error_on_huge_major_num=True)
+
+ A specific version of a distribution, as described in PEP 345. *s* is a
+ string object containing the version number (for example ``'1.2b1'``),
+ *error_on_huge_major_num* a boolean specifying whether to consider an
+ apparent use of a year or full date as the major version number an error.
+
+ The rationale for the second argument is that there were projects using years
+ or full dates as version numbers, which could cause problems with some
+ packaging systems sorting.
+
+ Instances of this class can be compared and sorted::
+
+ >>> NormalizedVersion('1.2b1') < NormalizedVersion('1.2')
+ True
+
+ :class:`NormalizedVersion` is used internally by :class:`VersionPredicate` to
+ do its work.
+
+
+.. class:: IrrationalVersionError
+
+ Exception raised when an invalid string is given to
+ :class:`NormalizedVersion`.
+
+ >>> NormalizedVersion("irrational_version_number")
+ ...
+ IrrationalVersionError: irrational_version_number
+
+
+.. function:: suggest_normalized_version(s)
+
+ Before standardization in PEP 386, various schemes were in use. Packaging
+ provides a function to try to convert any string to a valid, normalized
+ version::
+
+ >>> suggest_normalized_version('2.1-rc1')
+ 2.1c1
+
+
+ If :func:`suggest_normalized_version` can't make sense of the given string,
+ it will return ``None``::
+
+ >>> print(suggest_normalized_version('not a version'))
+ None
+
+
+Version predicates
+------------------
+
+.. class:: VersionPredicate(predicate)
+
+ This class deals with the parsing of field values like
+ ``ProjectName (>=version)``.
+
+ .. method:: match(version)
+
+ Test if a version number matches the predicate:
+
+ >>> version = VersionPredicate("ProjectName (<1.2, >1.0)")
+ >>> version.match("1.2.1")
+ False
+ >>> version.match("1.1.1")
+ True
+
+
+Validation helpers
+------------------
+
+If you want to use :term:`LBYL`-style checks instead of instantiating the
+classes and catching :class:`IrrationalVersionError` and :class:`ValueError`,
+you can use these functions:
+
+.. function:: is_valid_version(predicate)
+
+ Check whether the given string is a valid version number. Example of valid
+ strings: ``'1.2'``, ``'4.2.0.dev4'``, ``'2.5.4.post2'``.
+
+
+.. function:: is_valid_versions(predicate)
+
+ Check whether the given string is a valid value for specifying multiple
+ versions, such as in the Requires-Python field. Example: ``'2.7, >=3.2'``.
+
+
+.. function:: is_valid_predicate(predicate)
+
+ Check whether the given string is a valid version predicate. Examples:
+ ``'some.project == 4.5, <= 4.7'``, ``'speciallib (> 1.0, != 1.4.2, < 2.0)'``.
diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst
index 1e9de63..f4e37ac 100644
--- a/Doc/library/pdb.rst
+++ b/Doc/library/pdb.rst
@@ -38,6 +38,11 @@ of the debugger is::
> <string>(1)?()
(Pdb)
+.. versionchanged:: 3.3
+ Tab-completion via the :mod:`readline` module is available for commands and
+ command arguments, e.g. the current global and local names are offered as
+ arguments of the ``print`` command.
+
:file:`pdb.py` can also be invoked as a script to debug other scripts. For
example::
diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst
index bf0a72e..1b85bfa 100644
--- a/Doc/library/pickle.rst
+++ b/Doc/library/pickle.rst
@@ -285,6 +285,29 @@ The :mod:`pickle` module exports two classes, :class:`Pickler` and
See :ref:`pickle-persistent` for details and examples of uses.
+ .. attribute:: dispatch_table
+
+ A pickler object's dispatch table is a registry of *reduction
+ functions* of the kind which can be declared using
+ :func:`copyreg.pickle`. It is a mapping whose keys are classes
+ and whose values are reduction functions. A reduction function
+ takes a single argument of the associated class and should
+ conform to the same interface as a :meth:`~object.__reduce__`
+ method.
+
+ By default, a pickler object will not have a
+ :attr:`dispatch_table` attribute, and it will instead use the
+ global dispatch table managed by the :mod:`copyreg` module.
+ However, to customize the pickling for a specific pickler object
+ one can set the :attr:`dispatch_table` attribute to a dict-like
+ object. Alternatively, if a subclass of :class:`Pickler` has a
+ :attr:`dispatch_table` attribute then this will be used as the
+ default dispatch table for instances of that class.
+
+ See :ref:`pickle-dispatch` for usage examples.
+
+ .. versionadded:: 3.3
+
.. attribute:: fast
Deprecated. Enable fast mode if set to a true value. The fast mode
@@ -575,6 +598,44 @@ pickle external objects by reference.
.. literalinclude:: ../includes/dbpickle.py
+.. _pickle-dispatch:
+
+Dispatch Tables
+^^^^^^^^^^^^^^^
+
+If one wants to customize pickling of some classes without disturbing
+any other code which depends on pickling, then one can create a
+pickler with a private dispatch table.
+
+The global dispatch table managed by the :mod:`copyreg` module is
+available as :data:`copyreg.dispatch_table`. Therefore, one may
+choose to use a modified copy of :data:`copyreg.dispatch_table` as a
+private dispatch table.
+
+For example ::
+
+ f = io.BytesIO()
+ p = pickle.Pickler(f)
+ p.dispatch_table = copyreg.dispatch_table.copy()
+ p.dispatch_table[SomeClass] = reduce_SomeClass
+
+creates an instance of :class:`pickle.Pickler` with a private dispatch
+table which handles the ``SomeClass`` class specially. Alternatively,
+the code ::
+
+ class MyPickler(pickle.Pickler):
+ dispatch_table = copyreg.dispatch_table.copy()
+ dispatch_table[SomeClass] = reduce_SomeClass
+ f = io.BytesIO()
+ p = MyPickler(f)
+
+does the same, but all instances of ``MyPickler`` will by default
+share the same dispatch table. The equivalent code using the
+:mod:`copyreg` module is ::
+
+ copyreg.pickle(SomeClass, reduce_SomeClass)
+ f = io.BytesIO()
+ p = pickle.Pickler(f)
.. _pickle-state:
diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst
index 85eca9a..938c270 100644
--- a/Doc/library/platform.rst
+++ b/Doc/library/platform.rst
@@ -214,6 +214,10 @@ Win95/98 specific
preferring :func:`win32pipe.popen`. On Windows NT, :func:`win32pipe.popen`
should work; on Windows 9x it hangs due to bugs in the MS C library.
+ .. deprecated:: 3.3
+ This function is obsolete. Use the :mod:`subprocess` module. Check
+ especially the :ref:`subprocess-replacements` section.
+
Mac OS Platform
---------------
diff --git a/Doc/library/python.rst b/Doc/library/python.rst
index b67fbfc..07eadb4 100644
--- a/Doc/library/python.rst
+++ b/Doc/library/python.rst
@@ -25,4 +25,5 @@ overview:
inspect.rst
site.rst
fpectl.rst
+ packaging.rst
distutils.rst
diff --git a/Doc/library/random.rst b/Doc/library/random.rst
index 31cb945..2b10e6e 100644
--- a/Doc/library/random.rst
+++ b/Doc/library/random.rst
@@ -43,6 +43,12 @@ The :mod:`random` module also provides the :class:`SystemRandom` class which
uses the system function :func:`os.urandom` to generate random numbers
from sources provided by the operating system.
+.. warning::
+
+ The generators of the :mod:`random` module should not be used for security
+ purposes. Use :func:`ssl.RAND_bytes` if you require a cryptographically
+ secure pseudorandom number generator.
+
Bookkeeping functions:
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index 5a14408..c4ecd46 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -681,9 +681,12 @@ form.
.. function:: escape(string)
- Return *string* with all non-alphanumerics backslashed; this is useful if you
- want to match an arbitrary literal string that may have regular expression
- metacharacters in it.
+ Escape all the characters in pattern except ASCII letters, numbers and ``'_'``.
+ This is useful if you want to match an arbitrary literal string that may
+ have regular expression metacharacters in it.
+
+ .. versionchanged:: 3.3
+ The ``'_'`` character is no longer escaped.
.. function:: purge()
diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst
index ab55197..1134619 100644
--- a/Doc/library/readline.rst
+++ b/Doc/library/readline.rst
@@ -199,7 +199,7 @@ normally be executed automatically during interactive sessions from the user's
histfile = os.path.join(os.path.expanduser("~"), ".pyhist")
try:
readline.read_history_file(histfile)
- except IOError:
+ except FileNotFoundError:
pass
import atexit
atexit.register(readline.write_history_file, histfile)
@@ -224,7 +224,7 @@ support history save/restore. ::
if hasattr(readline, "read_history_file"):
try:
readline.read_history_file(histfile)
- except IOError:
+ except FileNotFoundError:
pass
atexit.register(self.save_history, histfile)
diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst
index c16b013..03a7cb5 100644
--- a/Doc/library/resource.rst
+++ b/Doc/library/resource.rst
@@ -14,13 +14,15 @@ resources utilized by a program.
Symbolic constants are used to specify particular system resources and to
request usage information about either the current process or its children.
-A single exception is defined for errors:
+An :exc:`OSError` is raised on syscall failure.
.. exception:: error
- The functions described below may raise this error if the underlying system call
- failures unexpectedly.
+ A deprecated alias of :exc:`OSError`.
+
+ .. versionchanged:: 3.3
+ Following :pep:`3151`, this class was made an alias of :exc:`OSError`.
Resource Limits
diff --git a/Doc/library/sched.rst b/Doc/library/sched.rst
index 000dba0..d6c86c7 100644
--- a/Doc/library/sched.rst
+++ b/Doc/library/sched.rst
@@ -14,7 +14,7 @@
The :mod:`sched` module defines a class which implements a general purpose event
scheduler:
-.. class:: scheduler(timefunc, delayfunc)
+.. class:: scheduler(timefunc=time.time, delayfunc=time.sleep)
The :class:`scheduler` class defines a generic interface to scheduling events.
It needs two functions to actually deal with the "outside world" --- *timefunc*
@@ -25,6 +25,12 @@ scheduler:
event is run to allow other threads an opportunity to run in multi-threaded
applications.
+ .. versionchanged:: 3.3
+ *timefunc* and *delayfunc* parameters are optional.
+ .. versionchanged:: 3.3
+ :class:`scheduler` class can be safely used in multi-threaded
+ environments.
+
Example::
>>> import sched, time
@@ -44,33 +50,6 @@ Example::
From print_time 930343700.273
930343700.276
-In multi-threaded environments, the :class:`scheduler` class has limitations
-with respect to thread-safety, inability to insert a new task before
-the one currently pending in a running scheduler, and holding up the main
-thread until the event queue is empty. Instead, the preferred approach
-is to use the :class:`threading.Timer` class instead.
-
-Example::
-
- >>> import time
- >>> from threading import Timer
- >>> def print_time():
- ... print("From print_time", time.time())
- ...
- >>> def print_some_times():
- ... print(time.time())
- ... Timer(5, print_time, ()).start()
- ... Timer(10, print_time, ()).start()
- ... time.sleep(11) # sleep while time-delay events execute
- ... print(time.time())
- ...
- >>> print_some_times()
- 930343690.257
- From print_time 930343695.274
- From print_time 930343700.273
- 930343701.301
-
-
.. _scheduler-objects:
Scheduler Objects
@@ -79,26 +58,38 @@ Scheduler Objects
:class:`scheduler` instances have the following methods and attributes:
-.. method:: scheduler.enterabs(time, priority, action, argument)
+.. method:: scheduler.enterabs(time, priority, action, argument=[], kwargs={})
Schedule a new event. The *time* argument should be a numeric type compatible
with the return value of the *timefunc* function passed to the constructor.
Events scheduled for the same *time* will be executed in the order of their
*priority*.
- Executing the event means executing ``action(*argument)``. *argument* must be a
- sequence holding the parameters for *action*.
+ Executing the event means executing ``action(*argument, **kwargs)``.
+ *argument* must be a sequence holding the parameters for *action*.
+ *kwargs* must be a dictionary holding the keyword parameters for *action*.
Return value is an event which may be used for later cancellation of the event
(see :meth:`cancel`).
+ .. versionchanged:: 3.3
+ *argument* parameter is optional.
-.. method:: scheduler.enter(delay, priority, action, argument)
+ .. versionadded:: 3.3
+ *kwargs* parameter was added.
+
+
+.. method:: scheduler.enter(delay, priority, action, argument=[], kwargs={})
Schedule an event for *delay* more time units. Other than the relative time, the
other arguments, the effect and the return value are the same as those for
:meth:`enterabs`.
+ .. versionchanged:: 3.3
+ *argument* parameter is optional.
+
+ .. versionadded:: 3.3
+ *kwargs* parameter was added.
.. method:: scheduler.cancel(event)
@@ -111,12 +102,16 @@ Scheduler Objects
Return true if the event queue is empty.
-.. method:: scheduler.run()
+.. method:: scheduler.run(blocking=True)
- Run all scheduled events. This function will wait (using the :func:`delayfunc`
+ Run all scheduled events. This method will wait (using the :func:`delayfunc`
function passed to the constructor) for the next event, then execute it and so
on until there are no more scheduled events.
+ If *blocking* is False executes the scheduled events due to expire soonest
+ (if any) and then return the deadline of the next scheduled call in the
+ scheduler (if any).
+
Either *action* or *delayfunc* can raise an exception. In either case, the
scheduler will maintain a consistent state and propagate the exception. If an
exception is raised by *action*, the event will not be attempted in future calls
@@ -127,6 +122,9 @@ Scheduler Objects
the calling code is responsible for canceling events which are no longer
pertinent.
+ .. versionadded:: 3.3
+ *blocking* parameter was added.
+
.. attribute:: scheduler.queue
Read-only attribute returning a list of upcoming events in the order they
diff --git a/Doc/library/select.rst b/Doc/library/select.rst
index f1fd126..d9d802b 100644
--- a/Doc/library/select.rst
+++ b/Doc/library/select.rst
@@ -6,7 +6,8 @@
This module provides access to the :c:func:`select` and :c:func:`poll` functions
-available in most operating systems, :c:func:`epoll` available on Linux 2.5+ and
+available in most operating systems, :c:func:`devpoll` available on
+Solaris and derivatives, :c:func:`epoll` available on Linux 2.5+ and
:c:func:`kqueue` available on most BSD.
Note that on Windows, it only works for sockets; on other operating systems,
it also works for other file types (in particular, on Unix, it works on pipes).
@@ -18,17 +19,39 @@ The module defines the following:
.. exception:: error
- The exception raised when an error occurs. The accompanying value is a pair
- containing the numeric error code from :c:data:`errno` and the corresponding
- string, as would be printed by the C function :c:func:`perror`.
+ A deprecated alias of :exc:`OSError`.
+ .. versionchanged:: 3.3
+ Following :pep:`3151`, this class was made an alias of :exc:`OSError`.
-.. function:: epoll(sizehint=-1)
- (Only supported on Linux 2.5.44 and newer.) Returns an edge polling object,
- which can be used as Edge or Level Triggered interface for I/O events; see
- section :ref:`epoll-objects` below for the methods supported by epolling
- objects.
+.. function:: devpoll()
+
+ (Only supported on Solaris and derivatives.) Returns a ``/dev/poll``
+ polling object; see section :ref:`devpoll-objects` below for the
+ methods supported by devpoll objects.
+
+ :c:func:`devpoll` objects are linked to the number of file
+ descriptors allowed at the time of instantiation. If your program
+ reduces this value, :c:func:`devpoll` will fail. If your program
+ increases this value, :c:func:`devpoll` may return an
+ incomplete list of active file descriptors.
+
+ .. versionadded:: 3.3
+
+.. function:: epoll(sizehint=-1, flags=0)
+
+ (Only supported on Linux 2.5.44 and newer.) Return an edge polling object,
+ which can be used as Edge or Level Triggered interface for I/O
+ events. *sizehint* is deprecated and completely ignored. *flags* can be set
+ to :const:`EPOLL_CLOEXEC`, which causes the epoll descriptor to be closed
+ automatically when :func:`os.execve` is called. See section
+ :ref:`epoll-objects` below for the methods supported by epolling objects.
+
+
+ .. versionchanged:: 3.3
+
+ Added the *flags* parameter.
.. function:: poll()
@@ -106,6 +129,74 @@ The module defines the following:
.. versionadded:: 3.2
+.. _devpoll-objects:
+
+``/dev/poll`` Polling Objects
+----------------------------------------------
+
+ http://developers.sun.com/solaris/articles/using_devpoll.html
+ http://developers.sun.com/solaris/articles/polling_efficient.html
+
+Solaris and derivatives have ``/dev/poll``. While :c:func:`select` is
+O(highest file descriptor) and :c:func:`poll` is O(number of file
+descriptors), ``/dev/poll`` is O(active file descriptors).
+
+``/dev/poll`` behaviour is very close to the standard :c:func:`poll`
+object.
+
+
+.. method:: devpoll.register(fd[, eventmask])
+
+ Register a file descriptor with the polling object. Future calls to the
+ :meth:`poll` method will then check whether the file descriptor has any pending
+ I/O events. *fd* can be either an integer, or an object with a :meth:`fileno`
+ method that returns an integer. File objects implement :meth:`fileno`, so they
+ can also be used as the argument.
+
+ *eventmask* is an optional bitmask describing the type of events you want to
+ check for. The constants are the same that with :c:func:`poll`
+ object. The default value is a combination of the constants :const:`POLLIN`,
+ :const:`POLLPRI`, and :const:`POLLOUT`.
+
+ .. warning::
+
+ Registering a file descriptor that's already registered is not an
+ error, but the result is undefined. The appropiate action is to
+ unregister or modify it first. This is an important difference
+ compared with :c:func:`poll`.
+
+
+.. method:: devpoll.modify(fd[, eventmask])
+
+ This method does an :meth:`unregister` followed by a
+ :meth:`register`. It is (a bit) more efficient that doing the same
+ explicitly.
+
+
+.. method:: devpoll.unregister(fd)
+
+ Remove a file descriptor being tracked by a polling object. Just like the
+ :meth:`register` method, *fd* can be an integer or an object with a
+ :meth:`fileno` method that returns an integer.
+
+ Attempting to remove a file descriptor that was never registered is
+ safely ignored.
+
+
+.. method:: devpoll.poll([timeout])
+
+ Polls the set of registered file descriptors, and returns a possibly-empty list
+ containing ``(fd, event)`` 2-tuples for the descriptors that have events or
+ errors to report. *fd* is the file descriptor, and *event* is a bitmask with
+ bits set for the reported events for that descriptor --- :const:`POLLIN` for
+ waiting input, :const:`POLLOUT` to indicate that the descriptor can be written
+ to, and so forth. An empty list indicates that the call timed out and no file
+ descriptors had any events to report. If *timeout* is given, it specifies the
+ length of time in milliseconds which the system will wait for events before
+ returning. If *timeout* is omitted, -1, or :const:`None`, the call will
+ block until there is an event for this poll object.
+
+
.. _epoll-objects:
Edge and Level Trigger Polling (epoll) Objects
@@ -165,11 +256,6 @@ Edge and Level Trigger Polling (epoll) Objects
Register a fd descriptor with the epoll object.
- .. note::
-
- Registering a file descriptor that's already registered raises an
- IOError -- contrary to :ref:`poll-objects`'s register.
-
.. method:: epoll.modify(fd, eventmask)
diff --git a/Doc/library/shlex.rst b/Doc/library/shlex.rst
index 0113fb7..941e090 100644
--- a/Doc/library/shlex.rst
+++ b/Doc/library/shlex.rst
@@ -34,6 +34,40 @@ The :mod:`shlex` module defines the following functions:
passing ``None`` for *s* will read the string to split from standard
input.
+
+.. function:: quote(s)
+
+ Return a shell-escaped version of the string *s*. The returned value is a
+ string that can safely be used as one token in a shell command line, for
+ cases where you cannot use a list.
+
+ This idiom would be unsafe::
+
+ >>> filename = 'somefile; rm -rf ~'
+ >>> command = 'ls -l {}'.format(filename)
+ >>> print(command) # executed by a shell: boom!
+ ls -l somefile; rm -rf ~
+
+ :func:`quote` lets you plug the security hole::
+
+ >>> command = 'ls -l {}'.format(quote(filename))
+ >>> print(command)
+ ls -l 'somefile; rm -rf ~'
+ >>> remote_command = 'ssh home {}'.format(quote(command))
+ >>> print(remote_command)
+ ssh home 'ls -l '"'"'somefile; rm -rf ~'"'"''
+
+ The quoting is compatible with UNIX shells and with :func:`split`:
+
+ >>> remote_command = split(remote_command)
+ >>> remote_command
+ ['ssh', 'home', "ls -l 'somefile; rm -rf ~'"]
+ >>> command = split(remote_command[-1])
+ >>> command
+ ['ls', '-l', 'somefile; rm -rf ~']
+
+ .. versionadded:: 3.3
+
The :mod:`shlex` module defines the following class:
@@ -282,5 +316,4 @@ parsing rules.
* EOF is signaled with a :const:`None` value;
-* Quoted empty strings (``''``) are allowed;
-
+* Quoted empty strings (``''``) are allowed.
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index 18f6485..21ee94f 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -47,45 +47,69 @@ Directory and files operations
be copied.
-.. function:: copyfile(src, dst)
+.. function:: copyfile(src, dst[, symlinks=False])
Copy the contents (no metadata) of the file named *src* to a file named
*dst*. *dst* must be the complete target file name; look at
:func:`shutil.copy` for a copy that accepts a target directory path. If
*src* and *dst* are the same files, :exc:`Error` is raised.
- The destination location must be writable; otherwise, an :exc:`IOError` exception
+
+ The destination location must be writable; otherwise, an :exc:`OSError` exception
will be raised. If *dst* already exists, it will be replaced. Special files
such as character or block devices and pipes cannot be copied with this
function. *src* and *dst* are path names given as strings.
+ If *symlinks* is true and *src* is a symbolic link, a new symbolic link will
+ be created instead of copying the file *src* points to.
+
+ .. versionchanged:: 3.3
+ :exc:`IOError` used to be raised instead of :exc:`OSError`.
+ Added *symlinks* argument.
-.. function:: copymode(src, dst)
+
+.. function:: copymode(src, dst[, symlinks=False])
Copy the permission bits from *src* to *dst*. The file contents, owner, and
- group are unaffected. *src* and *dst* are path names given as strings.
+ group are unaffected. *src* and *dst* are path names given as strings. If
+ *symlinks* is true, *src* a symbolic link and the operating system supports
+ modes for symbolic links (for example BSD-based ones), the mode of the link
+ will be copied.
+ .. versionchanged:: 3.3
+ Added *symlinks* argument.
-.. function:: copystat(src, dst)
+.. function:: copystat(src, dst[, symlinks=False])
Copy the permission bits, last access time, last modification time, and flags
from *src* to *dst*. The file contents, owner, and group are unaffected. *src*
- and *dst* are path names given as strings.
+ and *dst* are path names given as strings. If *src* and *dst* are both
+ symbolic links and *symlinks* true, the stats of the link will be copied as
+ far as the platform allows.
+ .. versionchanged:: 3.3
+ Added *symlinks* argument.
-.. function:: copy(src, dst)
+.. function:: copy(src, dst[, symlinks=False]))
Copy the file *src* to the file or directory *dst*. If *dst* is a directory, a
file with the same basename as *src* is created (or overwritten) in the
directory specified. Permission bits are copied. *src* and *dst* are path
- names given as strings.
+ names given as strings. If *symlinks* is true, symbolic links won't be
+ followed but recreated instead -- this resembles GNU's :program:`cp -P`.
+ .. versionchanged:: 3.3
+ Added *symlinks* argument.
-.. function:: copy2(src, dst)
+.. function:: copy2(src, dst[, symlinks=False])
Similar to :func:`shutil.copy`, but metadata is copied as well -- in fact,
this is just :func:`shutil.copy` followed by :func:`copystat`. This is
- similar to the Unix command :program:`cp -p`.
+ similar to the Unix command :program:`cp -p`. If *symlinks* is true,
+ symbolic links won't be followed but recreated instead -- this resembles
+ GNU's :program:`cp -P`.
+ .. versionchanged:: 3.3
+ Added *symlinks* argument.
.. function:: ignore_patterns(\*patterns)
@@ -103,9 +127,9 @@ Directory and files operations
:func:`shutil.copy2`.
If *symlinks* is true, symbolic links in the source tree are represented as
- symbolic links in the new tree, but the metadata of the original links is NOT
- copied; if false or omitted, the contents and metadata of the linked files
- are copied to the new tree.
+ symbolic links in the new tree and the metadata of the original links will
+ be copied as far as the platform allows; if false or omitted, the contents
+ and metadata of the linked files are copied to the new tree.
When *symlinks* is false, if the file pointed by the symlink doesn't
exist, a exception will be added in the list of errors raised in
@@ -129,7 +153,7 @@ Directory and files operations
If *copy_function* is given, it must be a callable that will be used to copy
each file. It will be called with the source path and the destination path
as arguments. By default, :func:`shutil.copy2` is used, but any function
- that supports the same signature (like :func:`copy`) can be used.
+ that supports the same signature (like :func:`shutil.copy`) can be used.
.. versionchanged:: 3.2
Added the *copy_function* argument to be able to provide a custom copy
@@ -139,6 +163,9 @@ Directory and files operations
Added the *ignore_dangling_symlinks* argument to silent dangling symlinks
errors when *symlinks* is false.
+ .. versionchanged:: 3.3
+ Copy metadata when *symlinks* is false.
+
.. function:: rmtree(path, ignore_errors=False, onerror=None)
@@ -173,7 +200,35 @@ Directory and files operations
If the destination is on the current filesystem, then :func:`os.rename` is
used. Otherwise, *src* is copied (using :func:`shutil.copy2`) to *dst* and
- then removed.
+ then removed. In case of symlinks, a new symlink pointing to the target of
+ *src* will be created in or as *dst* and *src* will be removed.
+
+ .. versionchanged:: 3.3
+ Added explicit symlink handling for foreign filesystems, thus adapting
+ it to the behavior of GNU's :program:`mv`.
+
+.. function:: disk_usage(path)
+
+ Return disk usage statistics about the given path as a :term:`named tuple`
+ with the attributes *total*, *used* and *free*, which are the amount of
+ total, used and free space, in bytes.
+
+ .. versionadded:: 3.3
+
+ Availability: Unix, Windows.
+
+.. function:: chown(path, user=None, group=None)
+
+ Change owner *user* and/or *group* of the given *path*.
+
+ *user* can be a system user name or a uid; the same applies to *group*. At
+ least one argument is required.
+
+ See also :func:`os.chown`, the underlying function.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
.. exception:: Error
@@ -406,3 +461,36 @@ The resulting archive contains::
-rw------- tarek/staff 1675 2008-06-09 13:26:54 ./id_rsa
-rw-r--r-- tarek/staff 397 2008-06-09 13:26:54 ./id_rsa.pub
-rw-r--r-- tarek/staff 37192 2010-02-06 18:23:10 ./known_hosts
+
+
+Querying the size of the output terminal
+----------------------------------------
+
+.. versionadded:: 3.3
+
+.. function:: get_terminal_size(fallback=(columns, lines))
+
+ Get the size of the terminal window.
+
+ For each of the two dimensions, the environment variable, ``COLUMNS``
+ and ``LINES`` respectively, is checked. If the variable is defined and
+ the value is a positive integer, it is used.
+
+ When ``COLUMNS`` or ``LINES`` is not defined, which is the common case,
+ the terminal connected to :data:`sys.__stdout__` is queried
+ by invoking :func:`os.get_terminal_size`.
+
+ If the terminal size cannot be successfully queried, either because
+ the system doesn't support querying, or because we are not
+ connected to a terminal, the value given in ``fallback`` parameter
+ is used. ``fallback`` defaults to ``(80, 24)`` which is the default
+ size used by many terminal emulators.
+
+ The value returned is a named tuple of type :class:`os.terminal_size`.
+
+ See also: The Single UNIX Specification, Version 2,
+ `Other Environment Variables`_.
+
+.. _`Other Environment Variables`:
+ http://pubs.opengroup.org/onlinepubs/7908799/xbd/envvar.html#tag_002_003
+
diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst
index 698b1e7..04afd9e 100644
--- a/Doc/library/signal.rst
+++ b/Doc/library/signal.rst
@@ -13,9 +13,6 @@ rules for working with signals and their handlers:
underlying implementation), with the exception of the handler for
:const:`SIGCHLD`, which follows the underlying implementation.
-* There is no way to "block" signals temporarily from critical sections (since
- this is not supported by all Unix flavors).
-
* Although Python signal handlers are called asynchronously as far as the Python
user is concerned, they can only occur between the "atomic" instructions of the
Python interpreter. This means that signals arriving during long calculations
@@ -119,6 +116,28 @@ The variables defined in the :mod:`signal` module are:
in user and kernel space. SIGPROF is delivered upon expiration.
+.. data:: SIG_BLOCK
+
+ A possible value for the *how* parameter to :func:`pthread_sigmask`
+ indicating that signals are to be blocked.
+
+ .. versionadded:: 3.3
+
+.. data:: SIG_UNBLOCK
+
+ A possible value for the *how* parameter to :func:`pthread_sigmask`
+ indicating that signals are to be unblocked.
+
+ .. versionadded:: 3.3
+
+.. data:: SIG_SETMASK
+
+ A possible value for the *how* parameter to :func:`pthread_sigmask`
+ indicating that the signal mask is to be replaced.
+
+ .. versionadded:: 3.3
+
+
The :mod:`signal` module defines one exception:
.. exception:: ItimerError
@@ -126,7 +145,11 @@ The :mod:`signal` module defines one exception:
Raised to signal an error from the underlying :func:`setitimer` or
:func:`getitimer` implementation. Expect this error if an invalid
interval timer or a negative time is passed to :func:`setitimer`.
- This error is a subtype of :exc:`IOError`.
+ This error is a subtype of :exc:`OSError`.
+
+ .. versionadded:: 3.3
+ This error used to be a subtype of :exc:`IOError`, which is now an
+ alias of :exc:`OSError`.
The :mod:`signal` module defines the following functions:
@@ -160,6 +183,60 @@ The :mod:`signal` module defines the following functions:
will then be called. Returns nothing. Not on Windows. (See the Unix man page
:manpage:`signal(2)`.)
+ See also :func:`sigwait`, :func:`sigwaitinfo`, :func:`sigtimedwait` and
+ :func:`sigpending`.
+
+
+.. function:: pthread_kill(thread_id, signum)
+
+ Send the signal *signum* to the thread *thread_id*, another thread in the same
+ process as the caller. The signal is asynchronously directed to thread.
+
+ Use :func:`threading.get_ident()` or the :attr:`~threading.Thread.ident`
+ attribute of :attr:`threading.Thread` to get a 'thread identifier' for
+ *thread_id*.
+
+ If *signum* is 0, then no signal is sent, but error checking is still
+ performed; this can be used to check if a thread is still running.
+
+ Availability: Unix (see the man page :manpage:`pthread_kill(3)` for further
+ information).
+
+ See also :func:`os.kill`.
+
+ .. versionadded:: 3.3
+
+
+.. function:: pthread_sigmask(how, mask)
+
+ Fetch and/or change the signal mask of the calling thread. The signal mask
+ is the set of signals whose delivery is currently blocked for the caller.
+ Return the old signal mask as a set of signals.
+
+ The behavior of the call is dependent on the value of *how*, as follows.
+
+ * :data:`SIG_BLOCK`: The set of blocked signals is the union of the current
+ set and the *mask* argument.
+ * :data:`SIG_UNBLOCK`: The signals in *mask* are removed from the current
+ set of blocked signals. It is permissible to attempt to unblock a
+ signal which is not blocked.
+ * :data:`SIG_SETMASK`: The set of blocked signals is set to the *mask*
+ argument.
+
+ *mask* is a set of signal numbers (e.g. {:const:`signal.SIGINT`,
+ :const:`signal.SIGTERM`}). Use ``range(1, signal.NSIG)`` for a full mask
+ including all signals.
+
+ For example, ``signal.pthread_sigmask(signal.SIG_BLOCK, [])`` reads the
+ signal mask of the calling thread.
+
+ Availability: Unix. See the man page :manpage:`sigprocmask(3)` and
+ :manpage:`pthread_sigmask(3)` for further information.
+
+ See also :func:`pause`, :func:`sigpending` and :func:`sigwait`.
+
+ .. versionadded:: 3.3
+
.. function:: setitimer(which, seconds[, interval])
@@ -189,13 +266,17 @@ The :mod:`signal` module defines the following functions:
.. function:: set_wakeup_fd(fd)
- Set the wakeup fd to *fd*. When a signal is received, a ``'\0'`` byte is
- written to the fd. This can be used by a library to wakeup a poll or select
- call, allowing the signal to be fully processed.
+ Set the wakeup file descriptor to *fd*. When a signal is received, the
+ signal number is written as a single byte into the fd. This can be used by
+ a library to wakeup a poll or select call, allowing the signal to be fully
+ processed.
The old wakeup fd is returned. *fd* must be non-blocking. It is up to the
library to remove any bytes before calling poll or select again.
+ Use for example ``struct.unpack('%uB' % len(data), data)`` to decode the
+ signal numbers list.
+
When threads are enabled, this function can only be called from the main thread;
attempting to call it from other threads will cause a :exc:`ValueError`
exception to be raised.
@@ -235,6 +316,73 @@ The :mod:`signal` module defines the following functions:
:const:`SIGTERM`. A :exc:`ValueError` will be raised in any other case.
+.. function:: sigpending()
+
+ Examine the set of signals that are pending for delivery to the calling
+ thread (i.e., the signals which have been raised while blocked). Return the
+ set of the pending signals.
+
+ Availability: Unix (see the man page :manpage:`sigpending(2)` for further
+ information).
+
+ See also :func:`pause`, :func:`pthread_sigmask` and :func:`sigwait`.
+
+ .. versionadded:: 3.3
+
+
+.. function:: sigwait(sigset)
+
+ Suspend execution of the calling thread until the delivery of one of the
+ signals specified in the signal set *sigset*. The function accepts the signal
+ (removes it from the pending list of signals), and returns the signal number.
+
+ Availability: Unix (see the man page :manpage:`sigwait(3)` for further
+ information).
+
+ See also :func:`pause`, :func:`pthread_sigmask`, :func:`sigpending`,
+ :func:`sigwaitinfo` and :func:`sigtimedwait`.
+
+ .. versionadded:: 3.3
+
+
+.. function:: sigwaitinfo(sigset)
+
+ Suspend execution of the calling thread until the delivery of one of the
+ signals specified in the signal set *sigset*. The function accepts the
+ signal and removes it from the pending list of signals. If one of the
+ signals in *sigset* is already pending for the calling thread, the function
+ will return immediately with information about that signal. The signal
+ handler is not called for the delivered signal. The function raises an
+ :exc:`InterruptedError` if it is interrupted by a signal that is not in
+ *sigset*.
+
+ The return value is an object representing the data contained in the
+ :c:type:`siginfo_t` structure, namely: :attr:`si_signo`, :attr:`si_code`,
+ :attr:`si_errno`, :attr:`si_pid`, :attr:`si_uid`, :attr:`si_status`,
+ :attr:`si_band`.
+
+ Availability: Unix (see the man page :manpage:`sigwaitinfo(2)` for further
+ information).
+
+ See also :func:`pause`, :func:`sigwait` and :func:`sigtimedwait`.
+
+ .. versionadded:: 3.3
+
+
+.. function:: sigtimedwait(sigset, timeout)
+
+ Like :func:`sigwaitinfo`, but takes an additional *timeout* argument
+ specifying a timeout. If *timeout* is specified as :const:`0`, a poll is
+ performed. Returns :const:`None` if a timeout occurs.
+
+ Availability: Unix (see the man page :manpage:`sigtimedwait(2)` for further
+ information).
+
+ See also :func:`pause`, :func:`sigwait` and :func:`sigwaitinfo`.
+
+ .. versionadded:: 3.3
+
+
.. _signal-example:
Example
@@ -251,7 +399,7 @@ be sent, and the handler raises an exception. ::
def handler(signum, frame):
print('Signal handler called with signal', signum)
- raise IOError("Couldn't open device!")
+ raise OSError("Couldn't open device!")
# Set the signal handler and a 5-second alarm
signal.signal(signal.SIGALRM, handler)
diff --git a/Doc/library/site.rst b/Doc/library/site.rst
index db96add..b987897 100644
--- a/Doc/library/site.rst
+++ b/Doc/library/site.rst
@@ -16,7 +16,14 @@ import can be suppressed using the interpreter's :option:`-S` option.
.. index:: triple: module; search; path
Importing this module will append site-specific paths to the module search path
-and add a few builtins.
+and add a few builtins, unless :option:`-S` was used. In that case, this module
+can be safely imported with no automatic modifications to the module search path
+or additions to the builtins. To explicitly trigger the usual site-specific
+additions, call the :func:`site.main` function.
+
+.. versionchanged:: 3.3
+ Importing the module used to trigger paths manipulation even when using
+ :option:`-S`.
.. index::
pair: site-python; directory
@@ -127,10 +134,21 @@ empty, and the path manipulations are skipped; however the import of
:func:`getuserbase` hasn't been called yet. Default value is
:file:`~/.local` for UNIX and Mac OS X non-framework builds,
:file:`~/Library/Python/{X.Y}` for Mac framework builds, and
- :file:`{%APPDATA%}\\Python` for Windows. This value is used by Distutils to
+ :file:`{%APPDATA%}\\Python` for Windows. This value is used by Packaging to
compute the installation directories for scripts, data files, Python modules,
- etc. for the :ref:`user installation scheme <inst-alt-install-user>`. See
- also :envvar:`PYTHONUSERBASE`.
+ etc. for the :ref:`user installation scheme <packaging-alt-install-user>`.
+ See also :envvar:`PYTHONUSERBASE`.
+
+
+.. function:: main()
+
+ Adds all the standard site-specific directories to the module search
+ path. This function is called automatically when this module is imported,
+ unless the :program:`python` interpreter was started with the :option:`-S`
+ flag.
+
+ .. versionchanged:: 3.3
+ This function used to be called unconditionnally.
.. function:: addsitedir(sitedir, known_paths=None)
diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst
index 3101ab7..45c5d6d 100644
--- a/Doc/library/smtplib.rst
+++ b/Doc/library/smtplib.rst
@@ -20,7 +20,7 @@ details of SMTP and ESMTP operation, consult :rfc:`821` (Simple Mail Transfer
Protocol) and :rfc:`1869` (SMTP Service Extensions).
-.. class:: SMTP(host='', port=0, local_hostname=None[, timeout])
+.. class:: SMTP(host='', port=0, local_hostname=None[, timeout], source_address=None)
A :class:`SMTP` instance encapsulates an SMTP connection. It has methods
that support a full repertoire of SMTP and ESMTP operations. If the optional
@@ -29,13 +29,34 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions).
raised if the specified host doesn't respond correctly. The optional
*timeout* parameter specifies a timeout in seconds for blocking operations
like the connection attempt (if not specified, the global default timeout
- setting will be used).
+ setting will be used). The optional source_address parameter allows to bind to some
+ specific source address in a machine with multiple network interfaces,
+ and/or to some specific source TCP port. It takes a 2-tuple (host, port),
+ for the socket to bind to as its source address before connecting. If
+ omitted (or if host or port are ``''`` and/or 0 respectively) the OS default
+ behavior will be used.
For normal use, you should only require the initialization/connect,
:meth:`sendmail`, and :meth:`quit` methods. An example is included below.
+ The :class:`SMTP` class supports the :keyword:`with` statement. When used
+ like this, the SMTP ``QUIT`` command is issued automatically when the
+ :keyword:`with` statement exits. E.g.::
-.. class:: SMTP_SSL(host='', port=0, local_hostname=None, keyfile=None, certfile=None[, timeout])
+ >>> from smtplib import SMTP
+ >>> with SMTP("domain.org") as smtp:
+ ... smtp.noop()
+ ...
+ (250, b'Ok')
+ >>>
+
+ .. versionchanged:: 3.3
+ Support for the :keyword:`with` statement was added.
+
+ .. versionchanged:: 3.3
+ source_address argument was added.
+
+.. class:: SMTP_SSL(host='', port=0, local_hostname=None, keyfile=None, certfile=None[, timeout], context=None, source_address=None)
A :class:`SMTP_SSL` instance behaves exactly the same as instances of
:class:`SMTP`. :class:`SMTP_SSL` should be used for situations where SSL is
@@ -43,18 +64,33 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions).
not appropriate. If *host* is not specified, the local host is used. If
*port* is zero, the standard SMTP-over-SSL port (465) is used. *keyfile*
and *certfile* are also optional, and can contain a PEM formatted private key
- and certificate chain file for the SSL connection. The optional *timeout*
+ and certificate chain file for the SSL connection. *context* also optional, can contain
+ a SSLContext, and is an alternative to keyfile and certfile; If it is specified both
+ keyfile and certfile must be None. The optional *timeout*
parameter specifies a timeout in seconds for blocking operations like the
connection attempt (if not specified, the global default timeout setting
- will be used).
+ will be used). The optional source_address parameter allows to bind to some
+ specific source address in a machine with multiple network interfaces,
+ and/or to some specific source tcp port. It takes a 2-tuple (host, port),
+ for the socket to bind to as its source address before connecting. If
+ omitted (or if host or port are ``''`` and/or 0 respectively) the OS default
+ behavior will be used.
+
+ .. versionchanged:: 3.3
+ *context* was added.
+ .. versionchanged:: 3.3
+ source_address argument was added.
-.. class:: LMTP(host='', port=LMTP_PORT, local_hostname=None)
+
+.. class:: LMTP(host='', port=LMTP_PORT, local_hostname=None, source_address=None)
The LMTP protocol, which is very similar to ESMTP, is heavily based on the
- standard SMTP client. It's common to use Unix sockets for LMTP, so our :meth:`connect`
- method must support that as well as a regular host:port server. To specify a
- Unix socket, you must use an absolute path for *host*, starting with a '/'.
+ standard SMTP client. It's common to use Unix sockets for LMTP, so our
+ :meth:`connect` method must support that as well as a regular host:port
+ server. The optional arguments local_hostname and source_address have the
+ same meaning as that of SMTP client. To specify a Unix socket, you must use
+ an absolute path for *host*, starting with a '/'.
Authentication is supported, using the regular SMTP mechanism. When using a Unix
socket, LMTP generally don't support or require any authentication, but your
@@ -242,7 +278,7 @@ An :class:`SMTP` instance has the following methods:
No suitable authentication method was found.
-.. method:: SMTP.starttls(keyfile=None, certfile=None)
+.. method:: SMTP.starttls(keyfile=None, certfile=None, context=None)
Put the SMTP connection in TLS (Transport Layer Security) mode. All SMTP
commands that follow will be encrypted. You should then call :meth:`ehlo`
@@ -251,6 +287,9 @@ An :class:`SMTP` instance has the following methods:
If *keyfile* and *certfile* are provided, these are passed to the :mod:`socket`
module's :func:`ssl` function.
+ Optional *context* parameter is a :class:`ssl.SSLContext` object; This is an alternative to
+ using a keyfile and a certfile and if specified both *keyfile* and *certfile* should be None.
+
If there has been no previous ``EHLO`` or ``HELO`` command this session,
this method tries ESMTP ``EHLO`` first.
@@ -263,6 +302,9 @@ An :class:`SMTP` instance has the following methods:
:exc:`RuntimeError`
SSL/TLS support is not available to your Python interpreter.
+ .. versionchanged:: 3.3
+ *context* was added.
+
.. method:: SMTP.sendmail(from_addr, to_addrs, msg, mail_options=[], rcpt_options=[])
diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst
index f236d30..69fa378 100644
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -40,9 +40,23 @@ Socket families
Depending on the system and the build options, various socket families
are supported by this module.
-Socket addresses are represented as follows:
-
-- A single string is used for the :const:`AF_UNIX` address family.
+The address format required by a particular socket object is automatically
+selected based on the address family specified when the socket object was
+created. Socket addresses are represented as follows:
+
+- The address of an :const:`AF_UNIX` socket bound to a file system node
+ is represented as a string, using the file system encoding and the
+ ``'surrogateescape'`` error handler (see :pep:`383`). An address in
+ Linux's abstract namespace is returned as a :class:`bytes` object with
+ an initial null byte; note that sockets in this namespace can
+ communicate with normal file system sockets, so programs intended to
+ run on Linux may need to deal with both types of address. A string or
+ :class:`bytes` object can be used for either type of address when
+ passing it as an argument.
+
+ .. versionchanged:: 3.3
+ Previously, :const:`AF_UNIX` socket paths were assumed to use UTF-8
+ encoding.
- A pair ``(host, port)`` is used for the :const:`AF_INET` address family,
where *host* is a string representing either a hostname in Internet domain
@@ -80,6 +94,19 @@ Socket addresses are represented as follows:
If *addr_type* is :const:`TIPC_ADDR_ID`, then *v1* is the node, *v2* is the
reference, and *v3* should be set to 0.
+- A tuple ``(interface, )`` is used for the :const:`AF_CAN` address family,
+ where *interface* is a string representing a network interface name like
+ ``'can0'``. The network interface name ``''`` can be used to receive packets
+ from all network interfaces of this family.
+
+- A string or a tuple ``(id, unit)`` is used for the :const:`SYSPROTO_CONTROL`
+ protocol of the :const:`PF_SYSTEM` family. The string is the name of a
+ kernel control using a dynamically-assigned ID. The tuple can be used if ID
+ and unit number of the kernel control are known or if a registered ID is
+ used.
+
+ .. versionadded:: 3.3
+
- Certain other address families (:const:`AF_BLUETOOTH`, :const:`AF_PACKET`)
support specific representations.
@@ -99,8 +126,9 @@ resolution and/or the host configuration. For deterministic behavior use a
numeric address in *host* portion.
All errors raise exceptions. The normal exceptions for invalid argument types
-and out-of-memory conditions can be raised; errors related to socket or address
-semantics raise :exc:`socket.error` or one of its subclasses.
+and out-of-memory conditions can be raised; starting from Python 3.3, errors
+related to socket or address semantics raise :exc:`OSError` or one of its
+subclasses (they used to raise :exc:`socket.error`).
Non-blocking mode is supported through :meth:`~socket.setblocking`. A
generalization of this based on timeouts is supported through
@@ -115,20 +143,15 @@ The module :mod:`socket` exports the following constants and functions:
.. exception:: error
- .. index:: module: errno
-
- A subclass of :exc:`IOError`, this exception is raised for socket-related
- errors. It is recommended that you inspect its ``errno`` attribute to
- discriminate between different kinds of errors.
+ A deprecated alias of :exc:`OSError`.
- .. seealso::
- The :mod:`errno` module contains symbolic names for the error codes
- defined by the underlying operating system.
+ .. versionchanged:: 3.3
+ Following :pep:`3151`, this class was made an alias of :exc:`OSError`.
.. exception:: herror
- A subclass of :exc:`socket.error`, this exception is raised for
+ A subclass of :exc:`OSError`, this exception is raised for
address-related errors, i.e. for functions that use *h_errno* in the POSIX
C API, including :func:`gethostbyname_ex` and :func:`gethostbyaddr`.
The accompanying value is a pair ``(h_errno, string)`` representing an
@@ -136,10 +159,12 @@ The module :mod:`socket` exports the following constants and functions:
*string* represents the description of *h_errno*, as returned by the
:c:func:`hstrerror` C function.
+ .. versionchanged:: 3.3
+ This class was made a subclass of :exc:`OSError`.
.. exception:: gaierror
- A subclass of :exc:`socket.error`, this exception is raised for
+ A subclass of :exc:`OSError`, this exception is raised for
address-related errors by :func:`getaddrinfo` and :func:`getnameinfo`.
The accompanying value is a pair ``(error, string)`` representing an error
returned by a library call. *string* represents the description of
@@ -147,15 +172,19 @@ The module :mod:`socket` exports the following constants and functions:
numeric *error* value will match one of the :const:`EAI_\*` constants
defined in this module.
+ .. versionchanged:: 3.3
+ This class was made a subclass of :exc:`OSError`.
.. exception:: timeout
- A subclass of :exc:`socket.error`, this exception is raised when a timeout
+ A subclass of :exc:`OSError`, this exception is raised when a timeout
occurs on a socket which has had timeouts enabled via a prior call to
:meth:`~socket.settimeout` (or implicitly through
:func:`~socket.setdefaulttimeout`). The accompanying value is a string
whose value is currently always "timed out".
+ .. versionchanged:: 3.3
+ This class was made a subclass of :exc:`OSError`.
.. data:: AF_UNIX
AF_INET
@@ -198,6 +227,7 @@ The module :mod:`socket` exports the following constants and functions:
SOMAXCONN
MSG_*
SOL_*
+ SCM_*
IPPROTO_*
IPPORT_*
INADDR_*
@@ -215,6 +245,32 @@ The module :mod:`socket` exports the following constants and functions:
in the Unix header files are defined; for a few symbols, default values are
provided.
+.. data:: AF_CAN
+ PF_CAN
+ SOL_CAN_*
+ CAN_*
+
+ Many constants of these forms, documented in the Linux documentation, are
+ also defined in the socket module.
+
+ Availability: Linux >= 2.6.25.
+
+ .. versionadded:: 3.3
+
+
+.. data:: AF_RDS
+ PF_RDS
+ SOL_RDS
+ RDS_*
+
+ Many constants of these forms, documented in the Linux documentation, are
+ also defined in the socket module.
+
+ Availability: Linux >= 2.6.30.
+
+ .. versionadded:: 3.3
+
+
.. data:: SIO_*
RCVALL_*
@@ -393,10 +449,15 @@ The module :mod:`socket` exports the following constants and functions:
Create a new socket using the given address family, socket type and protocol
number. The address family should be :const:`AF_INET` (the default),
- :const:`AF_INET6` or :const:`AF_UNIX`. The socket type should be
- :const:`SOCK_STREAM` (the default), :const:`SOCK_DGRAM` or perhaps one of the
- other ``SOCK_`` constants. The protocol number is usually zero and may be
- omitted in that case.
+ :const:`AF_INET6`, :const:`AF_UNIX`, :const:`AF_CAN` or :const:`AF_RDS`. The
+ socket type should be :const:`SOCK_STREAM` (the default),
+ :const:`SOCK_DGRAM`, :const:`SOCK_RAW` or perhaps one of the other ``SOCK_``
+ constants. The protocol number is usually zero and may be omitted in that
+ case or :const:`CAN_RAW` in case the address family is :const:`AF_CAN`.
+
+ .. versionchanged:: 3.3
+ The AF_CAN family was added.
+ The AF_RDS family was added.
.. function:: socketpair([family[, type[, proto]]])
@@ -464,7 +525,7 @@ The module :mod:`socket` exports the following constants and functions:
Unix manual page :manpage:`inet(3)` for details.
If the IPv4 address string passed to this function is invalid,
- :exc:`socket.error` will be raised. Note that exactly what is valid depends on
+ :exc:`OSError` will be raised. Note that exactly what is valid depends on
the underlying C implementation of :c:func:`inet_aton`.
:func:`inet_aton` does not support IPv6, and :func:`inet_pton` should be used
@@ -481,7 +542,7 @@ The module :mod:`socket` exports the following constants and functions:
argument.
If the byte sequence passed to this function is not exactly 4 bytes in
- length, :exc:`socket.error` will be raised. :func:`inet_ntoa` does not
+ length, :exc:`OSError` will be raised. :func:`inet_ntoa` does not
support IPv6, and :func:`inet_ntop` should be used instead for IPv4/v6 dual
stack support.
@@ -495,7 +556,7 @@ The module :mod:`socket` exports the following constants and functions:
Supported values for *address_family* are currently :const:`AF_INET` and
:const:`AF_INET6`. If the IP address string *ip_string* is invalid,
- :exc:`socket.error` will be raised. Note that exactly what is valid depends on
+ :exc:`OSError` will be raised. Note that exactly what is valid depends on
both the value of *address_family* and the underlying implementation of
:c:func:`inet_pton`.
@@ -513,11 +574,54 @@ The module :mod:`socket` exports the following constants and functions:
Supported values for *address_family* are currently :const:`AF_INET` and
:const:`AF_INET6`. If the string *packed_ip* is not the correct length for the
specified address family, :exc:`ValueError` will be raised. A
- :exc:`socket.error` is raised for errors from the call to :func:`inet_ntop`.
+ :exc:`OSError` is raised for errors from the call to :func:`inet_ntop`.
Availability: Unix (maybe not all platforms).
+..
+ XXX: Are sendmsg(), recvmsg() and CMSG_*() available on any
+ non-Unix platforms? The old (obsolete?) 4.2BSD form of the
+ interface, in which struct msghdr has no msg_control or
+ msg_controllen members, is not currently supported.
+
+.. function:: CMSG_LEN(length)
+
+ Return the total length, without trailing padding, of an ancillary
+ data item with associated data of the given *length*. This value
+ can often be used as the buffer size for :meth:`~socket.recvmsg` to
+ receive a single item of ancillary data, but :rfc:`3542` requires
+ portable applications to use :func:`CMSG_SPACE` and thus include
+ space for padding, even when the item will be the last in the
+ buffer. Raises :exc:`OverflowError` if *length* is outside the
+ permissible range of values.
+
+ Availability: most Unix platforms, possibly others.
+
+ .. versionadded:: 3.3
+
+
+.. function:: CMSG_SPACE(length)
+
+ Return the buffer size needed for :meth:`~socket.recvmsg` to
+ receive an ancillary data item with associated data of the given
+ *length*, along with any trailing padding. The buffer space needed
+ to receive multiple items is the sum of the :func:`CMSG_SPACE`
+ values for their associated data lengths. Raises
+ :exc:`OverflowError` if *length* is outside the permissible range
+ of values.
+
+ Note that some systems might support ancillary data without
+ providing this function. Also note that setting the buffer size
+ using the results of this function may not precisely limit the
+ amount of ancillary data that can be received, since additional
+ data may be able to fit into the padding area.
+
+ Availability: most Unix platforms, possibly others.
+
+ .. versionadded:: 3.3
+
+
.. function:: getdefaulttimeout()
Return the default timeout in seconds (float) for new socket objects. A value
@@ -533,6 +637,49 @@ The module :mod:`socket` exports the following constants and functions:
meanings.
+.. function:: sethostname(name)
+
+ Set the machine's hostname to *name*. This will raise a
+ :exc:`OSError` if you don't have enough rights.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: if_nameindex()
+
+ Return a list of network interface information
+ (index int, name string) tuples.
+ :exc:`OSError` if the system call fails.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: if_nametoindex(if_name)
+
+ Return a network interface index number corresponding to an
+ interface name.
+ :exc:`OSError` if no interface with the given name exists.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
+.. function:: if_indextoname(if_index)
+
+ Return a network interface name corresponding to a
+ interface index number.
+ :exc:`OSError` if no interface with the given index exists.
+
+ Availability: Unix.
+
+ .. versionadded:: 3.3
+
+
.. data:: SocketType
This is a Python type object that represents the socket object type. It is the
@@ -706,6 +853,109 @@ correspond to Unix system calls applicable to sockets.
to zero. (The format of *address* depends on the address family --- see above.)
+.. method:: socket.recvmsg(bufsize[, ancbufsize[, flags]])
+
+ Receive normal data (up to *bufsize* bytes) and ancillary data from
+ the socket. The *ancbufsize* argument sets the size in bytes of
+ the internal buffer used to receive the ancillary data; it defaults
+ to 0, meaning that no ancillary data will be received. Appropriate
+ buffer sizes for ancillary data can be calculated using
+ :func:`CMSG_SPACE` or :func:`CMSG_LEN`, and items which do not fit
+ into the buffer might be truncated or discarded. The *flags*
+ argument defaults to 0 and has the same meaning as for
+ :meth:`recv`.
+
+ The return value is a 4-tuple: ``(data, ancdata, msg_flags,
+ address)``. The *data* item is a :class:`bytes` object holding the
+ non-ancillary data received. The *ancdata* item is a list of zero
+ or more tuples ``(cmsg_level, cmsg_type, cmsg_data)`` representing
+ the ancillary data (control messages) received: *cmsg_level* and
+ *cmsg_type* are integers specifying the protocol level and
+ protocol-specific type respectively, and *cmsg_data* is a
+ :class:`bytes` object holding the associated data. The *msg_flags*
+ item is the bitwise OR of various flags indicating conditions on
+ the received message; see your system documentation for details.
+ If the receiving socket is unconnected, *address* is the address of
+ the sending socket, if available; otherwise, its value is
+ unspecified.
+
+ On some systems, :meth:`sendmsg` and :meth:`recvmsg` can be used to
+ pass file descriptors between processes over an :const:`AF_UNIX`
+ socket. When this facility is used (it is often restricted to
+ :const:`SOCK_STREAM` sockets), :meth:`recvmsg` will return, in its
+ ancillary data, items of the form ``(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS, fds)``, where *fds* is a :class:`bytes` object
+ representing the new file descriptors as a binary array of the
+ native C :c:type:`int` type. If :meth:`recvmsg` raises an
+ exception after the system call returns, it will first attempt to
+ close any file descriptors received via this mechanism.
+
+ Some systems do not indicate the truncated length of ancillary data
+ items which have been only partially received. If an item appears
+ to extend beyond the end of the buffer, :meth:`recvmsg` will issue
+ a :exc:`RuntimeWarning`, and will return the part of it which is
+ inside the buffer provided it has not been truncated before the
+ start of its associated data.
+
+ On systems which support the :const:`SCM_RIGHTS` mechanism, the
+ following function will receive up to *maxfds* file descriptors,
+ returning the message data and a list containing the descriptors
+ (while ignoring unexpected conditions such as unrelated control
+ messages being received). See also :meth:`sendmsg`. ::
+
+ import socket, array
+
+ def recv_fds(sock, msglen, maxfds):
+ fds = array.array("i") # Array of ints
+ msg, ancdata, flags, addr = sock.recvmsg(msglen, socket.CMSG_LEN(maxfds * fds.itemsize))
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
+ if (cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS):
+ # Append data, ignoring any truncated integers at the end.
+ fds.fromstring(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+ return msg, list(fds)
+
+ Availability: most Unix platforms, possibly others.
+
+ .. versionadded:: 3.3
+
+
+.. method:: socket.recvmsg_into(buffers[, ancbufsize[, flags]])
+
+ Receive normal data and ancillary data from the socket, behaving as
+ :meth:`recvmsg` would, but scatter the non-ancillary data into a
+ series of buffers instead of returning a new bytes object. The
+ *buffers* argument must be an iterable of objects that export
+ writable buffers (e.g. :class:`bytearray` objects); these will be
+ filled with successive chunks of the non-ancillary data until it
+ has all been written or there are no more buffers. The operating
+ system may set a limit (:func:`~os.sysconf` value ``SC_IOV_MAX``)
+ on the number of buffers that can be used. The *ancbufsize* and
+ *flags* arguments have the same meaning as for :meth:`recvmsg`.
+
+ The return value is a 4-tuple: ``(nbytes, ancdata, msg_flags,
+ address)``, where *nbytes* is the total number of bytes of
+ non-ancillary data written into the buffers, and *ancdata*,
+ *msg_flags* and *address* are the same as for :meth:`recvmsg`.
+
+ Example::
+
+ >>> import socket
+ >>> s1, s2 = socket.socketpair()
+ >>> b1 = bytearray(b'----')
+ >>> b2 = bytearray(b'0123456789')
+ >>> b3 = bytearray(b'--------------')
+ >>> s1.send(b'Mary had a little lamb')
+ 22
+ >>> s2.recvmsg_into([b1, memoryview(b2)[2:9], b3])
+ (22, [], 0, None)
+ >>> [b1, b2, b3]
+ [bytearray(b'Mary'), bytearray(b'01 had a 9'), bytearray(b'little lamb---')]
+
+ Availability: most Unix platforms, possibly others.
+
+ .. versionadded:: 3.3
+
+
.. method:: socket.recvfrom_into(buffer[, nbytes[, flags]])
Receive data from the socket, writing it into *buffer* instead of creating a
@@ -754,6 +1004,41 @@ correspond to Unix system calls applicable to sockets.
above.)
+.. method:: socket.sendmsg(buffers[, ancdata[, flags[, address]]])
+
+ Send normal and ancillary data to the socket, gathering the
+ non-ancillary data from a series of buffers and concatenating it
+ into a single message. The *buffers* argument specifies the
+ non-ancillary data as an iterable of buffer-compatible objects
+ (e.g. :class:`bytes` objects); the operating system may set a limit
+ (:func:`~os.sysconf` value ``SC_IOV_MAX``) on the number of buffers
+ that can be used. The *ancdata* argument specifies the ancillary
+ data (control messages) as an iterable of zero or more tuples
+ ``(cmsg_level, cmsg_type, cmsg_data)``, where *cmsg_level* and
+ *cmsg_type* are integers specifying the protocol level and
+ protocol-specific type respectively, and *cmsg_data* is a
+ buffer-compatible object holding the associated data. Note that
+ some systems (in particular, systems without :func:`CMSG_SPACE`)
+ might support sending only one control message per call. The
+ *flags* argument defaults to 0 and has the same meaning as for
+ :meth:`send`. If *address* is supplied and not ``None``, it sets a
+ destination address for the message. The return value is the
+ number of bytes of non-ancillary data sent.
+
+ The following function sends the list of file descriptors *fds*
+ over an :const:`AF_UNIX` socket, on systems which support the
+ :const:`SCM_RIGHTS` mechanism. See also :meth:`recvmsg`. ::
+
+ import socket, array
+
+ def send_fds(sock, msg, fds):
+ return sock.sendmsg([msg], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, array.array("i", fds))])
+
+ Availability: most Unix platforms, possibly others.
+
+ .. versionadded:: 3.3
+
+
.. method:: socket.setblocking(flag)
Set blocking or non-blocking mode of the socket: if *flag* is false, the
@@ -795,9 +1080,7 @@ correspond to Unix system calls applicable to sockets.
Shut down one or both halves of the connection. If *how* is :const:`SHUT_RD`,
further receives are disallowed. If *how* is :const:`SHUT_WR`, further sends
are disallowed. If *how* is :const:`SHUT_RDWR`, further sends and receives are
- disallowed. Depending on the platform, shutting down one half of the connection
- can also close the opposite half (e.g. on Mac OS X, ``shutdown(SHUT_WR)`` does
- not allow further reads on the other end of the connection).
+ disallowed.
Note that there are no methods :meth:`read` or :meth:`write`; use
:meth:`~socket.recv` and :meth:`~socket.send` without *flags* argument instead.
@@ -942,13 +1225,13 @@ sends traffic to the first one connected successfully. ::
af, socktype, proto, canonname, sa = res
try:
s = socket.socket(af, socktype, proto)
- except socket.error as msg:
+ except OSError as msg:
s = None
continue
try:
s.bind(sa)
s.listen(1)
- except socket.error as msg:
+ except OSError as msg:
s.close()
s = None
continue
@@ -977,12 +1260,12 @@ sends traffic to the first one connected successfully. ::
af, socktype, proto, canonname, sa = res
try:
s = socket.socket(af, socktype, proto)
- except socket.error as msg:
+ except OSError as msg:
s = None
continue
try:
s.connect(sa)
- except socket.error as msg:
+ except OSError as msg:
s.close()
s = None
continue
@@ -996,7 +1279,7 @@ sends traffic to the first one connected successfully. ::
print('Received', repr(data))
-The last example shows how to write a very simple network sniffer with raw
+The next example shows how to write a very simple network sniffer with raw
sockets on Windows. The example requires administrator privileges to modify
the interface::
@@ -1021,11 +1304,51 @@ the interface::
# disabled promiscuous mode
s.ioctl(socket.SIO_RCVALL, socket.RCVALL_OFF)
+The last example shows how to use the socket interface to communicate to a CAN
+network. This example might require special priviledge::
+
+ import socket
+ import struct
+
+
+ # CAN frame packing/unpacking (see 'struct can_frame' in <linux/can.h>)
+
+ can_frame_fmt = "=IB3x8s"
+ can_frame_size = struct.calcsize(can_frame_fmt)
+
+ def build_can_frame(can_id, data):
+ can_dlc = len(data)
+ data = data.ljust(8, b'\x00')
+ return struct.pack(can_frame_fmt, can_id, can_dlc, data)
+
+ def dissect_can_frame(frame):
+ can_id, can_dlc, data = struct.unpack(can_frame_fmt, frame)
+ return (can_id, can_dlc, data[:can_dlc])
+
+
+ # create a raw socket and bind it to the 'vcan0' interface
+ s = socket.socket(socket.AF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
+ s.bind(('vcan0',))
+
+ while True:
+ cf, addr = s.recvfrom(can_frame_size)
+
+ print('Received: can_id=%x, can_dlc=%x, data=%s' % dissect_can_frame(cf))
+
+ try:
+ s.send(cf)
+ except OSError:
+ print('Error sending CAN frame')
+
+ try:
+ s.send(build_can_frame(0x01, b'\x01\x02\x03'))
+ except OSError:
+ print('Error sending CAN frame')
Running an example several times with too small delay between executions, could
lead to this error::
- socket.error: [Errno 98] Address already in use
+ OSError: [Errno 98] Address already in use
This is because the previous execution has left the socket in a ``TIME_WAIT``
state, and can't be immediately reused.
diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst
index 5287f17..7dc0cc7 100644
--- a/Doc/library/socketserver.rst
+++ b/Doc/library/socketserver.rst
@@ -154,9 +154,21 @@ Server Objects
.. method:: BaseServer.serve_forever(poll_interval=0.5)
Handle requests until an explicit :meth:`shutdown` request.
- Poll for shutdown every *poll_interval* seconds. Ignores :attr:`self.timeout`.
- If you need to do periodic tasks, do them in another thread.
+ Poll for shutdown every *poll_interval* seconds. Ignores :attr:`self.timeout`. It also calls
+ :meth:`service_actions` which may be used by a subclass or Mixin to provide
+ various cleanup actions. For e.g. ForkingMixin class uses
+ :meth:`service_actions` to cleanup the zombie child processes.
+ .. versionchanged:: 3.3
+ Added service_actions call to the serve_forever method.
+
+
+.. method:: BaseServer.service_actions()
+
+ This is called by the serve_forever loop. This method is can be overridden
+ by Mixin's to add cleanup or service specific actions.
+
+ .. versionadded:: 3.3
.. method:: BaseServer.shutdown()
diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst
index 41db5c3..ea92032 100644
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -369,6 +369,22 @@ Connection Objects
method with :const:`None` for *handler*.
+.. method:: Connection.set_trace_callback(trace_callback)
+
+ Registers *trace_callback* to be called for each SQL statement that is
+ actually executed by the SQLite backend.
+
+ The only argument passed to the callback is the statement (as string) that
+ is being executed. The return value of the callback is ignored. Note that
+ the backend does not only run statements passed to the :meth:`Cursor.execute`
+ methods. Other sources include the transaction management of the Python
+ module and the execution of triggers defined in the current database.
+
+ Passing :const:`None` as *trace_callback* will disable the trace callback.
+
+ .. versionadded:: 3.3
+
+
.. method:: Connection.enable_load_extension(enabled)
This routine allows/disallows the SQLite engine to load SQLite extensions
@@ -420,10 +436,6 @@ Connection Objects
:mod:`sqlite3` module will return Unicode objects for ``TEXT``. If you want to
return bytestrings instead, you can set it to :class:`bytes`.
- For efficiency reasons, there's also a way to return :class:`str` objects
- only for non-ASCII data, and :class:`bytes` otherwise. To activate it, set
- this attribute to :const:`sqlite3.OptimizedUnicode`.
-
You can also set it to any other callable that accepts a single bytestring
parameter and returns the resulting object.
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index 8cd07d7..7691996 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -53,9 +53,53 @@ Functions, Constants, and Exceptions
(currently provided by the OpenSSL library). This signifies some
problem in the higher-level encryption and authentication layer that's
superimposed on the underlying network connection. This error
- is a subtype of :exc:`socket.error`, which in turn is a subtype of
- :exc:`IOError`. The error code and message of :exc:`SSLError` instances
- are provided by the OpenSSL library.
+ is a subtype of :exc:`OSError`. The error code and message of
+ :exc:`SSLError` instances are provided by the OpenSSL library.
+
+ .. versionchanged:: 3.3
+ :exc:`SSLError` used to be a subtype of :exc:`socket.error`.
+
+.. exception:: SSLZeroReturnError
+
+ A subclass of :exc:`SSLError` raised when trying to read or write and
+ the SSL connection has been closed cleanly. Note that this doesn't
+ mean that the underlying transport (read TCP) has been closed.
+
+ .. versionadded:: 3.3
+
+.. exception:: SSLWantReadError
+
+ A subclass of :exc:`SSLError` raised by a :ref:`non-blocking SSL socket
+ <ssl-nonblocking>` when trying to read or write data, but more data needs
+ to be received on the underlying TCP transport before the request can be
+ fulfilled.
+
+ .. versionadded:: 3.3
+
+.. exception:: SSLWantWriteError
+
+ A subclass of :exc:`SSLError` raised by a :ref:`non-blocking SSL socket
+ <ssl-nonblocking>` when trying to read or write data, but more data needs
+ to be sent on the underlying TCP transport before the request can be
+ fulfilled.
+
+ .. versionadded:: 3.3
+
+.. exception:: SSLSyscallError
+
+ A subclass of :exc:`SSLError` raised when a system error was encountered
+ while trying to fulfill an operation on a SSL socket. Unfortunately,
+ there is no easy way to inspect the original errno number.
+
+ .. versionadded:: 3.3
+
+.. exception:: SSLEOFError
+
+ A subclass of :exc:`SSLError` raised when the SSL connection has been
+ terminated abruptly. Generally, you shouldn't try to reuse the underlying
+ transport when this error is encountered.
+
+ .. versionadded:: 3.3
.. exception:: CertificateError
@@ -161,6 +205,35 @@ instead.
Random generation
^^^^^^^^^^^^^^^^^
+.. function:: RAND_bytes(num)
+
+ Returns *num* cryptographically strong pseudo-random bytes. Raises an
+ :class:`SSLError` if the PRNG has not been seeded with enough data or if the
+ operation is not supported by the current RAND method. :func:`RAND_status`
+ can be used to check the status of the PRNG and :func:`RAND_add` can be used
+ to seed the PRNG.
+
+ Read the Wikipedia article, `Cryptographically secure pseudorandom number
+ generator (CSPRNG)
+ <http://en.wikipedia.org/wiki/Cryptographically_secure_pseudorandom_number_generator>`_,
+ to get the requirements of a cryptographically generator.
+
+ .. versionadded:: 3.3
+
+.. function:: RAND_pseudo_bytes(num)
+
+ Returns (bytes, is_cryptographic): bytes are *num* pseudo-random bytes,
+ is_cryptographic is True if the bytes generated are cryptographically
+ strong. Raises an :class:`SSLError` if the operation is not supported by the
+ current RAND method.
+
+ Generated pseudo-random byte sequences will be unique if they are of
+ sufficient length, but are not necessarily unpredictable. They can be used
+ for non-cryptographic purposes and for certain purposes in cryptographic
+ protocols, but usually not for key generation etc.
+
+ .. versionadded:: 3.3
+
.. function:: RAND_status()
Returns True if the SSL pseudo-random number generator has been seeded with
@@ -170,7 +243,7 @@ Random generation
.. function:: RAND_egd(path)
- If you are running an entropy-gathering daemon (EGD) somewhere, and ``path``
+ If you are running an entropy-gathering daemon (EGD) somewhere, and *path*
is the pathname of a socket connection open to it, this will read 256 bytes
of randomness from the socket, and add it to the SSL pseudo-random number
generator to increase the security of generated secret keys. This is
@@ -181,8 +254,8 @@ Random generation
.. function:: RAND_add(bytes, entropy)
- Mixes the given ``bytes`` into the SSL pseudo-random number generator. The
- parameter ``entropy`` (a float) is a lower bound on the entropy contained in
+ Mixes the given *bytes* into the SSL pseudo-random number generator. The
+ parameter *entropy* (a float) is a lower bound on the entropy contained in
string (so you can always use :const:`0.0`). See :rfc:`1750` for more
information on sources of entropy.
@@ -238,6 +311,9 @@ Certificate handling
will attempt to validate the server certificate against that set of root
certificates, and will fail if the validation attempt fails.
+ .. versionchanged:: 3.3
+ This function is now IPv6-compatible.
+
.. function:: DER_cert_to_PEM_cert(DER_cert_bytes)
Given a certificate as a DER-encoded blob of bytes, returns a PEM-encoded
@@ -345,6 +421,46 @@ Constants
.. versionadded:: 3.2
+.. data:: OP_CIPHER_SERVER_PREFERENCE
+
+ Use the server's cipher ordering preference, rather than the client's.
+ This option has no effect on client sockets and SSLv2 server sockets.
+
+ .. versionadded:: 3.3
+
+.. data:: OP_SINGLE_DH_USE
+
+ Prevents re-use of the same DH key for distinct SSL sessions. This
+ improves forward secrecy but requires more computational resources.
+ This option only applies to server sockets.
+
+ .. versionadded:: 3.3
+
+.. data:: OP_SINGLE_ECDH_USE
+
+ Prevents re-use of the same ECDH key for distinct SSL sessions. This
+ improves forward secrecy but requires more computational resources.
+ This option only applies to server sockets.
+
+ .. versionadded:: 3.3
+
+.. data:: OP_NO_COMPRESSION
+
+ Disable compression on the SSL channel. This is useful if the application
+ protocol supports its own compression scheme.
+
+ This option is only available with OpenSSL 1.0.0 and later.
+
+ .. versionadded:: 3.3
+
+.. data:: HAS_ECDH
+
+ Whether the OpenSSL library has built-in support for Elliptic Curve-based
+ Diffie-Hellman key exchange. This should be true unless the feature was
+ explicitly disabled by the distributor.
+
+ .. versionadded:: 3.3
+
.. data:: HAS_SNI
Whether the OpenSSL library has built-in support for the *Server Name
@@ -354,6 +470,13 @@ Constants
.. versionadded:: 3.2
+.. data:: CHANNEL_BINDING_TYPES
+
+ List of supported TLS channel binding types. Strings in this list
+ can be used as arguments to :meth:`SSLSocket.get_channel_binding`.
+
+ .. versionadded:: 3.3
+
.. data:: OPENSSL_VERSION
The version string of the OpenSSL library loaded by the interpreter::
@@ -463,6 +586,28 @@ SSL sockets also have the following additional methods and attributes:
version of the SSL protocol that defines its use, and the number of secret
bits being used. If no connection has been established, returns ``None``.
+.. method:: SSLSocket.compression()
+
+ Return the compression algorithm being used as a string, or ``None``
+ if the connection isn't compressed.
+
+ If the higher-level protocol supports its own compression mechanism,
+ you can use :data:`OP_NO_COMPRESSION` to disable SSL-level compression.
+
+ .. versionadded:: 3.3
+
+.. method:: SSLSocket.get_channel_binding(cb_type="tls-unique")
+
+ Get channel binding data for current connection, as a bytes object. Returns
+ ``None`` if not connected or the handshake has not been completed.
+
+ The *cb_type* parameter allow selection of the desired channel binding
+ type. Valid channel binding types are listed in the
+ :data:`CHANNEL_BINDING_TYPES` list. Currently only the 'tls-unique' channel
+ binding, defined by :rfc:`5929`, is supported. :exc:`ValueError` will be
+ raised if an unsupported channel binding type is requested.
+
+ .. versionadded:: 3.3
.. method:: SSLSocket.unwrap()
@@ -502,7 +647,7 @@ to speed up repeated connections from the same clients.
:class:`SSLContext` objects have the following methods and attributes:
-.. method:: SSLContext.load_cert_chain(certfile, keyfile=None)
+.. method:: SSLContext.load_cert_chain(certfile, keyfile=None, password=None)
Load a private key and the corresponding certificate. The *certfile*
string must be the path to a single file in PEM format containing the
@@ -513,9 +658,25 @@ to speed up repeated connections from the same clients.
:ref:`ssl-certificates` for more information on how the certificate
is stored in the *certfile*.
+ The *password* argument may be a function to call to get the password for
+ decrypting the private key. It will only be called if the private key is
+ encrypted and a password is necessary. It will be called with no arguments,
+ and it should return a string, bytes, or bytearray. If the return value is
+ a string it will be encoded as UTF-8 before using it to decrypt the key.
+ Alternatively a string, bytes, or bytearray value may be supplied directly
+ as the *password* argument. It will be ignored if the private key is not
+ encrypted and no password is needed.
+
+ If the *password* argument is not specified and a password is required,
+ OpenSSL's built-in password prompting mechanism will be used to
+ interactively prompt the user for a password.
+
An :class:`SSLError` is raised if the private key doesn't
match with the certificate.
+ .. versionchanged:: 3.3
+ New optional argument *password*.
+
.. method:: SSLContext.load_verify_locations(cafile=None, capath=None)
Load a set of "certification authority" (CA) certificates used to validate
@@ -554,6 +715,38 @@ to speed up repeated connections from the same clients.
when connected, the :meth:`SSLSocket.cipher` method of SSL sockets will
give the currently selected cipher.
+.. method:: SSLContext.load_dh_params(dhfile)
+
+ Load the key generation parameters for Diffie-Helman (DH) key exchange.
+ Using DH key exchange improves forward secrecy at the expense of
+ computational resources (both on the server and on the client).
+ The *dhfile* parameter should be the path to a file containing DH
+ parameters in PEM format.
+
+ This setting doesn't apply to client sockets. You can also use the
+ :data:`OP_SINGLE_DH_USE` option to further improve security.
+
+ .. versionadded:: 3.3
+
+.. method:: SSLContext.set_ecdh_curve(curve_name)
+
+ Set the curve name for Elliptic Curve-based Diffie-Hellman (ECDH) key
+ exchange. ECDH is significantly faster than regular DH while arguably
+ as secure. The *curve_name* parameter should be a string describing
+ a well-known elliptic curve, for example ``prime256v1`` for a widely
+ supported curve.
+
+ This setting doesn't apply to client sockets. You can also use the
+ :data:`OP_SINGLE_ECDH_USE` option to further improve security.
+
+ This method is not available if :data:`HAS_ECDH` is False.
+
+ .. versionadded:: 3.3
+
+ .. seealso::
+ `SSL/TLS & Perfect Forward Secrecy <http://vincent.bernat.im/en/blog/2011-ssl-perfect-forward-secrecy.html>`_
+ Vincent Bernat.
+
.. method:: SSLContext.wrap_socket(sock, server_side=False, \
do_handshake_on_connect=True, suppress_ragged_eofs=True, \
server_hostname=None)
@@ -968,13 +1161,10 @@ to be aware of:
try:
sock.do_handshake()
break
- except ssl.SSLError as err:
- if err.args[0] == ssl.SSL_ERROR_WANT_READ:
- select.select([sock], [], [])
- elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
- select.select([], [sock], [])
- else:
- raise
+ except ssl.SSLWantReadError:
+ select.select([sock], [], [])
+ except ssl.SSLWantWriteError:
+ select.select([], [sock], [])
.. _ssl-security:
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index 153ee44..a513d40 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -15,6 +15,10 @@ interpreter.
The principal built-in types are numerics, sequences, mappings, classes,
instances and exceptions.
+Some collection classes are mutable. The methods that add, subtract, or
+rearrange their members in place, and don't return a specific item, never return
+the collection instance itself but ``None``.
+
Some operations are supported by several object types; in particular,
practically all objects can be compared, tested for truth value, and converted
to a string (with the :func:`repr` function or the slightly different
@@ -998,6 +1002,23 @@ functions based on regular expressions.
rest lowercased.
+.. method:: str.casefold()
+
+ Return a casefolded copy of the string. Casefolded strings may be used for
+ caseless matching.
+
+ Casefolding is similar to lowercasing but more aggressive because it is
+ intended to remove all case distinctions in a string. For example, the German
+ lowercase letter ``'ß'`` is equivalent to ``"ss"``. Since it is already
+ lowercase, :meth:`lower` would do nothing to ``'ß'``; :meth:`casefold`
+ converts it to ``"ss"``.
+
+ The casefolding algorithm is described in section 3.13 of the Unicode
+ Standard.
+
+ .. versionadded:: 3.3
+
+
.. method:: str.center(width[, fillchar])
Return centered in a string of length *width*. Padding is done using the
@@ -1205,6 +1226,9 @@ functions based on regular expressions.
Return a copy of the string with all the cased characters [4]_ converted to
lowercase.
+ The lowercasing algorithm used is described in section 3.13 of the Unicode
+ Standard.
+
.. method:: str.lstrip([chars])
@@ -1277,7 +1301,7 @@ functions based on regular expressions.
two empty strings, followed by the string itself.
-.. method:: str.rsplit([sep[, maxsplit]])
+.. method:: str.rsplit(sep=None, maxsplit=-1)
Return a list of the words in the string, using *sep* as the delimiter string.
If *maxsplit* is given, at most *maxsplit* splits are done, the *rightmost*
@@ -1299,7 +1323,7 @@ functions based on regular expressions.
'mississ'
-.. method:: str.split([sep[, maxsplit]])
+.. method:: str.split(sep=None, maxsplit=-1)
Return a list of the words in the string, using *sep* as the delimiter
string. If *maxsplit* is given, at most *maxsplit* splits are done (thus,
@@ -1356,7 +1380,8 @@ functions based on regular expressions.
.. method:: str.swapcase()
Return a copy of the string with uppercase characters converted to lowercase and
- vice versa.
+ vice versa. Note that it is not necessarily true that
+ ``s.swapcase().swapcase() == s``.
.. method:: str.title()
@@ -1407,7 +1432,11 @@ functions based on regular expressions.
Return a copy of the string with all the cased characters [4]_ converted to
uppercase. Note that ``str.upper().isupper()`` might be ``False`` if ``s``
contains uncased characters or if the Unicode category of the resulting
- character(s) is not "Lu" (Letter, uppercase), but e.g. "Lt" (Letter, titlecase).
+ character(s) is not "Lu" (Letter, uppercase), but e.g. "Lt" (Letter,
+ titlecase).
+
+ The uppercasing algorithm used is described in section 3.13 of the Unicode
+ Standard.
.. method:: str.zfill(width)
@@ -1672,6 +1701,8 @@ Note that while lists allow their items to be of any type, bytearray object
single: append() (sequence method)
single: extend() (sequence method)
single: count() (sequence method)
+ single: clear() (sequence method)
+ single: copy() (sequence method)
single: index() (sequence method)
single: insert() (sequence method)
single: pop() (sequence method)
@@ -1703,6 +1734,12 @@ Note that while lists allow their items to be of any type, bytearray object
| ``s.extend(x)`` | same as ``s[len(s):len(s)] = | \(2) |
| | x`` | |
+------------------------------+--------------------------------+---------------------+
+| ``s.clear()`` | remove all items from ``s`` | |
+| | | |
++------------------------------+--------------------------------+---------------------+
+| ``s.copy()`` | return a shallow copy of ``s`` | |
+| | | |
++------------------------------+--------------------------------+---------------------+
| ``s.count(x)`` | return number of *i*'s for | |
| | which ``s[i] == x`` | |
+------------------------------+--------------------------------+---------------------+
@@ -1781,6 +1818,9 @@ Notes:
(8)
:meth:`sort` is not supported by :class:`bytearray` objects.
+ .. versionadded:: 3.3
+ :meth:`clear` and :meth:`!copy` methods.
+
.. _bytes-methods:
@@ -1798,6 +1838,12 @@ the objects to strings, they have a :func:`decode` method.
Wherever one of these methods needs to interpret the bytes as characters
(e.g. the :func:`is...` methods), the ASCII character set is assumed.
+.. versionadded:: 3.3
+ The functions :func:`count`, :func:`find`, :func:`index`,
+ :func:`rfind` and :func:`rindex` have additional semantics compared to
+ the corresponding string functions: They also accept an integer in
+ range 0 to 255 (a byte) as their first argument.
+
.. note::
The methods on bytes and bytearray objects don't accept strings as their
@@ -2162,6 +2208,10 @@ pairs within braces, for example: ``{'jack': 4098, 'sjoerd': 4127}`` or ``{4098:
See :class:`collections.Counter` for a complete implementation including
other methods helpful for accumulating and managing tallies.
+ .. versionchanged:: 3.3
+ If the dict is modified during the lookup, a :exc:`RuntimeError`
+ exception is now raised.
+
.. describe:: d[key] = value
Set ``d[key]`` to *value*.
@@ -2336,7 +2386,7 @@ memoryview type
:class:`memoryview` objects allow Python code to access the internal data
of an object that supports the :ref:`buffer protocol <bufferobjects>` without
-copying. Memory is generally interpreted as simple bytes.
+copying.
.. class:: memoryview(obj)
@@ -2350,43 +2400,92 @@ copying. Memory is generally interpreted as simple bytes.
is a single byte, but other types such as :class:`array.array` may have
bigger elements.
- ``len(view)`` returns the total number of elements in the memoryview,
- *view*. The :class:`~memoryview.itemsize` attribute will give you the
+ ``len(view)`` is equal to the length of :class:`~memoryview.tolist`.
+ If ``view.ndim = 0``, the length is 1. If ``view.ndim = 1``, the length
+ is equal to the number of elements in the view. For higher dimensions,
+ the length is equal to the length of the nested list representation of
+ the view. The :class:`~memoryview.itemsize` attribute will give you the
number of bytes in a single element.
- A :class:`memoryview` supports slicing to expose its data. Taking a single
- index will return a single element as a :class:`bytes` object. Full
- slicing will result in a subview::
-
- >>> v = memoryview(b'abcefg')
- >>> v[1]
- b'b'
- >>> v[-1]
- b'g'
- >>> v[1:4]
- <memory at 0x77ab28>
- >>> bytes(v[1:4])
- b'bce'
-
- If the object the memoryview is over supports changing its data, the
- memoryview supports slice assignment::
+ A :class:`memoryview` supports slicing to expose its data. If
+ :class:`~memoryview.format` is one of the native format specifiers
+ from the :mod:`struct` module, indexing will return a single element
+ with the correct type. Full slicing will result in a subview::
+
+ >>> v = memoryview(b'abcefg')
+ >>> v[1]
+ 98
+ >>> v[-1]
+ 103
+ >>> v[1:4]
+ <memory at 0x7f3ddc9f4350>
+ >>> bytes(v[1:4])
+ b'bce'
+
+ Other native formats::
+
+ >>> import array
+ >>> a = array.array('l', [-11111111, 22222222, -33333333, 44444444])
+ >>> a[0]
+ -11111111
+ >>> a[-1]
+ 44444444
+ >>> a[2:3].tolist()
+ [-33333333]
+ >>> a[::2].tolist()
+ [-11111111, -33333333]
+ >>> a[::-1].tolist()
+ [44444444, -33333333, 22222222, -11111111]
+
+ .. versionadded:: 3.3
+
+ If the underlying object is writable, the memoryview supports slice
+ assignment. Resizing is not allowed::
>>> data = bytearray(b'abcefg')
>>> v = memoryview(data)
>>> v.readonly
False
- >>> v[0] = b'z'
+ >>> v[0] = ord(b'z')
>>> data
bytearray(b'zbcefg')
>>> v[1:4] = b'123'
>>> data
bytearray(b'z123fg')
- >>> v[2] = b'spam'
+ >>> v[2:3] = b'spam'
Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- ValueError: cannot modify size of memoryview object
+ File "<stdin>", line 1, in <module>
+ ValueError: memoryview assignment: lvalue and rvalue have different structures
+ >>> v[2:6] = b'spam'
+ >>> data
+ bytearray(b'z1spam')
+
+ Memoryviews of hashable (read-only) types are also hashable. The hash
+ is defined as ``hash(m) == hash(m.tobytes())``::
+
+ >>> v = memoryview(b'abcefg')
+ >>> hash(v) == hash(b'abcefg')
+ True
+ >>> hash(v[2:4]) == hash(b'ce')
+ True
+ >>> hash(v[::-2]) == hash(b'abcefg'[::-2])
+ True
+
+ Hashing of multi-dimensional objects is supported::
+
+ >>> buf = bytes(list(range(12)))
+ >>> x = memoryview(buf)
+ >>> y = x.cast('B', shape=[2,2,3])
+ >>> x.tolist()
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+ >>> y.tolist()
+ [[[0, 1, 2], [3, 4, 5]], [[6, 7, 8], [9, 10, 11]]]
+ >>> hash(x) == hash(y) == hash(y.tobytes())
+ True
+
+ .. versionchanged:: 3.3
+ Memoryview objects are now hashable.
- Notice how the size of the memoryview object cannot be changed.
:class:`memoryview` has several methods:
@@ -2401,12 +2500,20 @@ copying. Memory is generally interpreted as simple bytes.
>>> bytes(m)
b'abc'
+ For non-contiguous arrays the result is equal to the flattened list
+ representation with all elements converted to bytes.
+
.. method:: tolist()
- Return the data in the buffer as a list of integers. ::
+ Return the data in the buffer as a list of elements. ::
>>> memoryview(b'abc').tolist()
[97, 98, 99]
+ >>> import array
+ >>> a = array.array('d', [1.1, 2.2, 3.3])
+ >>> m = memoryview(a)
+ >>> m.tolist()
+ [1.1, 2.2, 3.3]
.. method:: release()
@@ -2433,7 +2540,7 @@ copying. Memory is generally interpreted as simple bytes.
>>> with memoryview(b'abc') as m:
... m[0]
...
- b'a'
+ 97
>>> m[0]
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
@@ -2441,45 +2548,219 @@ copying. Memory is generally interpreted as simple bytes.
.. versionadded:: 3.2
+ .. method:: cast(format[, shape])
+
+ Cast a memoryview to a new format or shape. *shape* defaults to
+ ``[byte_length//new_itemsize]``, which means that the result view
+ will be one-dimensional. The return value is a new memoryview, but
+ the buffer itself is not copied. Supported casts are 1D -> C-contiguous
+ and C-contiguous -> 1D. One of the formats must be a byte format
+ ('B', 'b' or 'c'). The byte length of the result must be the same
+ as the original length.
+
+ Cast 1D/long to 1D/unsigned bytes::
+
+ >>> import array
+ >>> a = array.array('l', [1,2,3])
+ >>> x = memoryview(a)
+ >>> x.format
+ 'l'
+ >>> x.itemsize
+ 8
+ >>> len(x)
+ 3
+ >>> x.nbytes
+ 24
+ >>> y = x.cast('B')
+ >>> y.format
+ 'B'
+ >>> y.itemsize
+ 1
+ >>> len(y)
+ 24
+ >>> y.nbytes
+ 24
+
+ Cast 1D/unsigned bytes to 1D/char::
+
+ >>> b = bytearray(b'zyz')
+ >>> x = memoryview(b)
+ >>> x[0] = b'a'
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ValueError: memoryview: invalid value for format "B"
+ >>> y = x.cast('c')
+ >>> y[0] = b'a'
+ >>> b
+ bytearray(b'ayz')
+
+ Cast 1D/bytes to 3D/ints to 1D/signed char::
+
+ >>> import struct
+ >>> buf = struct.pack("i"*12, *list(range(12)))
+ >>> x = memoryview(buf)
+ >>> y = x.cast('i', shape=[2,2,3])
+ >>> y.tolist()
+ [[[0, 1, 2], [3, 4, 5]], [[6, 7, 8], [9, 10, 11]]]
+ >>> y.format
+ 'i'
+ >>> y.itemsize
+ 4
+ >>> len(y)
+ 2
+ >>> y.nbytes
+ 48
+ >>> z = y.cast('b')
+ >>> z.format
+ 'b'
+ >>> z.itemsize
+ 1
+ >>> len(z)
+ 48
+ >>> z.nbytes
+ 48
+
+ Cast 1D/unsigned char to to 2D/unsigned long::
+
+ >>> buf = struct.pack("L"*6, *list(range(6)))
+ >>> x = memoryview(buf)
+ >>> y = x.cast('L', shape=[2,3])
+ >>> len(y)
+ 2
+ >>> y.nbytes
+ 48
+ >>> y.tolist()
+ [[0, 1, 2], [3, 4, 5]]
+
+ .. versionadded:: 3.3
+
There are also several readonly attributes available:
+ .. attribute:: obj
+
+ The underlying object of the memoryview::
+
+ >>> b = bytearray(b'xyz')
+ >>> m = memoryview(b)
+ >>> m.obj is b
+ True
+
+ .. versionadded:: 3.3
+
+ .. attribute:: nbytes
+
+ ``nbytes == product(shape) * itemsize == len(m.tobytes())``. This is
+ the amount of space in bytes that the array would use in a contiguous
+ representation. It is not necessarily equal to len(m)::
+
+ >>> import array
+ >>> a = array.array('i', [1,2,3,4,5])
+ >>> m = memoryview(a)
+ >>> len(m)
+ 5
+ >>> m.nbytes
+ 20
+ >>> y = m[::2]
+ >>> len(y)
+ 3
+ >>> y.nbytes
+ 12
+ >>> len(y.tobytes())
+ 12
+
+ Multi-dimensional arrays::
+
+ >>> import struct
+ >>> buf = struct.pack("d"*12, *[1.5*x for x in range(12)])
+ >>> x = memoryview(buf)
+ >>> y = x.cast('d', shape=[3,4])
+ >>> y.tolist()
+ [[0.0, 1.5, 3.0, 4.5], [6.0, 7.5, 9.0, 10.5], [12.0, 13.5, 15.0, 16.5]]
+ >>> len(y)
+ 3
+ >>> y.nbytes
+ 96
+
+ .. versionadded:: 3.3
+
+ .. attribute:: readonly
+
+ A bool indicating whether the memory is read only.
+
.. attribute:: format
A string containing the format (in :mod:`struct` module style) for each
- element in the view. This defaults to ``'B'``, a simple bytestring.
+ element in the view. A memoryview can be created from exporters with
+ arbitrary format strings, but some methods (e.g. :meth:`tolist`) are
+ restricted to native single element formats. Special care must be taken
+ when comparing memoryviews. Since comparisons are required to return a
+ value for ``==`` and ``!=``, two memoryviews referencing the same
+ exporter can compare as not-equal if the exporter's format is not
+ understood::
+
+ >>> from ctypes import BigEndianStructure, c_long
+ >>> class BEPoint(BigEndianStructure):
+ ... _fields_ = [("x", c_long), ("y", c_long)]
+ ...
+ >>> point = BEPoint(100, 200)
+ >>> a = memoryview(point)
+ >>> b = memoryview(point)
+ >>> a == b
+ False
+ >>> a.tolist()
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ NotImplementedError: memoryview: unsupported format T{>l:x:>l:y:}
.. attribute:: itemsize
The size in bytes of each element of the memoryview::
- >>> m = memoryview(array.array('H', [1,2,3]))
+ >>> import array, struct
+ >>> m = memoryview(array.array('H', [32000, 32001, 32002]))
>>> m.itemsize
2
>>> m[0]
- b'\x01\x00'
- >>> len(m[0]) == m.itemsize
+ 32000
+ >>> struct.calcsize('H') == m.itemsize
True
- .. attribute:: shape
-
- A tuple of integers the length of :attr:`ndim` giving the shape of the
- memory as a N-dimensional array.
-
.. attribute:: ndim
An integer indicating how many dimensions of a multi-dimensional array the
memory represents.
+ .. attribute:: shape
+
+ A tuple of integers the length of :attr:`ndim` giving the shape of the
+ memory as a N-dimensional array.
+
.. attribute:: strides
A tuple of integers the length of :attr:`ndim` giving the size in bytes to
access each element for each dimension of the array.
- .. attribute:: readonly
+ .. attribute:: suboffsets
- A bool indicating whether the memory is read only.
+ Used internally for PIL-style arrays. The value is informational only.
+
+ .. attribute:: c_contiguous
+
+ A bool indicating whether the memory is C-contiguous.
+
+ .. versionadded:: 3.3
+
+ .. attribute:: f_contiguous
- .. memoryview.suboffsets isn't documented because it only seems useful for C
+ A bool indicating whether the memory is Fortran contiguous.
+
+ .. versionadded:: 3.3
+
+ .. attribute:: contiguous
+
+ A bool indicating whether the memory is contiguous.
+
+ .. versionadded:: 3.3
.. _typecontextmanager:
@@ -2703,7 +2984,7 @@ The Null Object
This object is returned by functions that don't explicitly return a value. It
supports no special operations. There is exactly one null object, named
-``None`` (a built-in name).
+``None`` (a built-in name). ``type(None)()`` produces the same singleton.
It is written as ``None``.
@@ -2713,9 +2994,11 @@ It is written as ``None``.
The Ellipsis Object
-------------------
-This object is commonly used by slicing (see :ref:`slicings`). It supports no
-special operations. There is exactly one ellipsis object, named
-:const:`Ellipsis` (a built-in name).
+This object is commonly used by slicing (see :ref:`slicings`), but may also
+be used in other situations where a sentinel value other than :const:`None`
+is needed. It supports no special operations. There is exactly one ellipsis
+object, named :const:`Ellipsis` (a built-in name). ``type(Ellipsis)()``
+produces the :const:`Ellipsis` singleton.
It is written as ``Ellipsis`` or ``...``.
@@ -2727,7 +3010,8 @@ The NotImplemented Object
This object is returned from comparisons and binary operations when they are
asked to operate on types they don't support. See :ref:`comparisons` for more
-information.
+information. There is exactly one ``NotImplemented`` object.
+``type(NotImplemented)()`` produces the singleton instance.
It is written as ``NotImplemented``.
@@ -2793,6 +3077,13 @@ types, where they are relevant. Some of these are not reported by the
The name of the class or type.
+.. attribute:: class.__qualname__
+
+ The :term:`qualified name` of the class or type.
+
+ .. versionadded:: 3.3
+
+
.. attribute:: class.__mro__
This attribute is a tuple of classes that are considered when looking for
diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst
index 12820e0..994506c 100644
--- a/Doc/library/struct.rst
+++ b/Doc/library/struct.rst
@@ -187,17 +187,24 @@ platform-dependent.
| ``Q`` | :c:type:`unsigned long | integer | 8 | \(2), \(3) |
| | long` | | | |
+--------+--------------------------+--------------------+----------------+------------+
-| ``f`` | :c:type:`float` | float | 4 | \(4) |
+| ``n`` | :c:type:`ssize_t` | integer | | \(4) |
+--------+--------------------------+--------------------+----------------+------------+
-| ``d`` | :c:type:`double` | float | 8 | \(4) |
+| ``N`` | :c:type:`size_t` | integer | | \(4) |
++--------+--------------------------+--------------------+----------------+------------+
+| ``f`` | :c:type:`float` | float | 4 | \(5) |
++--------+--------------------------+--------------------+----------------+------------+
+| ``d`` | :c:type:`double` | float | 8 | \(5) |
+--------+--------------------------+--------------------+----------------+------------+
| ``s`` | :c:type:`char[]` | bytes | | |
+--------+--------------------------+--------------------+----------------+------------+
| ``p`` | :c:type:`char[]` | bytes | | |
+--------+--------------------------+--------------------+----------------+------------+
-| ``P`` | :c:type:`void \*` | integer | | \(5) |
+| ``P`` | :c:type:`void \*` | integer | | \(6) |
+--------+--------------------------+--------------------+----------------+------------+
+.. versionchanged:: 3.3
+ Added support for the ``'n'`` and ``'N'`` formats.
+
Notes:
(1)
@@ -219,11 +226,17 @@ Notes:
Use of the :meth:`__index__` method for non-integers is new in 3.2.
(4)
+ The ``'n'`` and ``'N'`` conversion codes are only available for the native
+ size (selected as the default or with the ``'@'`` byte order character).
+ For the standard size, you can use whichever of the other integer formats
+ fits your application.
+
+(5)
For the ``'f'`` and ``'d'`` conversion codes, the packed representation uses
the IEEE 754 binary32 (for ``'f'``) or binary64 (for ``'d'``) format,
regardless of the floating-point format used by the platform.
-(5)
+(6)
The ``'P'`` format character is only available for the native byte ordering
(selected as the default or with the ``'@'`` byte order character). The byte
order character ``'='`` chooses to use little- or big-endian ordering based
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
index b7e87ab..ee88379 100644
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -30,16 +30,21 @@ convenience functions for all use cases they can handle. For more advanced
use cases, the underlying :class:`Popen` interface can be used directly.
-.. function:: call(args, *, stdin=None, stdout=None, stderr=None, shell=False)
+.. function:: call(args, *, stdin=None, stdout=None, stderr=None, shell=False, timeout=None)
Run the command described by *args*. Wait for command to complete, then
return the :attr:`returncode` attribute.
The arguments shown above are merely the most common ones, described below
- in :ref:`frequently-used-arguments` (hence the slightly odd notation in
- the abbreviated signature). The full function signature is the same as
- that of the :class:`Popen` constructor - this functions passes all
- supplied arguments directly through to that interface.
+ in :ref:`frequently-used-arguments` (hence the use of keyword-only notation
+ in the abbreviated signature). The full function signature is largely the
+ same as that of the :class:`Popen` constructor - this function passes all
+ supplied arguments other than *timeout* directly through to that interface.
+
+ The *timeout* argument is passed to :meth:`Popen.wait`. If the timeout
+ expires, the child process will be killed and then waited for again. The
+ :exc:`TimeoutExpired` exception will be re-raised after the child process
+ has terminated.
Examples::
@@ -62,8 +67,11 @@ use cases, the underlying :class:`Popen` interface can be used directly.
process may block if it generates enough output to a pipe to fill up
the OS pipe buffer.
+ .. versionchanged:: 3.3
+ *timeout* was added.
+
-.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False)
+.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False, timeout=None)
Run command with arguments. Wait for command to complete. If the return
code was zero then return, otherwise raise :exc:`CalledProcessError`. The
@@ -71,10 +79,15 @@ use cases, the underlying :class:`Popen` interface can be used directly.
:attr:`returncode` attribute.
The arguments shown above are merely the most common ones, described below
- in :ref:`frequently-used-arguments` (hence the slightly odd notation in
- the abbreviated signature). The full function signature is the same as
- that of the :class:`Popen` constructor - this functions passes all
- supplied arguments directly through to that interface.
+ in :ref:`frequently-used-arguments` (hence the use of keyword-only notation
+ in the abbreviated signature). The full function signature is largely the
+ same as that of the :class:`Popen` constructor - this function passes all
+ supplied arguments other than *timeout* directly through to that interface.
+
+ The *timeout* argument is passed to :meth:`Popen.wait`. If the timeout
+ expires, the child process will be killed and then waited for again. The
+ :exc:`TimeoutExpired` exception will be re-raised after the child process
+ has terminated.
Examples::
@@ -86,8 +99,6 @@ use cases, the underlying :class:`Popen` interface can be used directly.
...
subprocess.CalledProcessError: Command 'exit 1' returned non-zero exit status 1
- .. versionadded:: 2.5
-
.. warning::
Invoking the system shell with ``shell=True`` can be a security hazard
@@ -101,8 +112,11 @@ use cases, the underlying :class:`Popen` interface can be used directly.
process may block if it generates enough output to a pipe to fill up
the OS pipe buffer.
+ .. versionchanged:: 3.3
+ *timeout* was added.
+
-.. function:: check_output(args, *, stdin=None, stderr=None, shell=False, universal_newlines=False)
+.. function:: check_output(args, *, stdin=None, stderr=None, shell=False, universal_newlines=False, timeout=None)
Run command with arguments and return its output as a byte string.
@@ -112,11 +126,17 @@ use cases, the underlying :class:`Popen` interface can be used directly.
attribute.
The arguments shown above are merely the most common ones, described below
- in :ref:`frequently-used-arguments` (hence the slightly odd notation in
- the abbreviated signature). The full function signature is largely the
- same as that of the :class:`Popen` constructor, except that *stdout* is
- not permitted as it is used internally. All other supplied arguments are
- passed directly through to the :class:`Popen` constructor.
+ in :ref:`frequently-used-arguments` (hence the use of keyword-only notation
+ in the abbreviated signature). The full function signature is largely the
+ same as that of the :class:`Popen` constructor - this functions passes all
+ supplied arguments other than *timeout* directly through to that interface.
+ In addition, *stdout* is not permitted as an argument, as it is used
+ internally to collect the output from the subprocess.
+
+ The *timeout* argument is passed to :meth:`Popen.wait`. If the timeout
+ expires, the child process will be killed and then waited for again. The
+ :exc:`TimeoutExpired` exception will be re-raised after the child process
+ has terminated.
Examples::
@@ -147,7 +167,7 @@ use cases, the underlying :class:`Popen` interface can be used directly.
... shell=True)
'ls: non_existent_file: No such file or directory\n'
- .. versionadded:: 2.7
+ .. versionadded:: 3.1
.. warning::
@@ -161,6 +181,18 @@ use cases, the underlying :class:`Popen` interface can be used directly.
read in the current process, the child process may block if it
generates enough output to the pipe to fill up the OS pipe buffer.
+ .. versionchanged:: 3.3
+ *timeout* was added.
+
+
+.. data:: DEVNULL
+
+ Special value that can be used as the *stdin*, *stdout* or *stderr* argument
+ to :class:`Popen` and indicates that the special file :data:`os.devnull`
+ will be used.
+
+ .. versionadded:: 3.3
+
.. data:: PIPE
@@ -196,13 +228,14 @@ default values. The arguments that are most commonly needed are:
*stdin*, *stdout* and *stderr* specify the executed program's standard input,
standard output and standard error file handles, respectively. Valid values
- are :data:`PIPE`, an existing file descriptor (a positive integer), an
- existing file object, and ``None``. :data:`PIPE` indicates that a new pipe
- to the child should be created. With the default settings of ``None``, no
- redirection will occur; the child's file handles will be inherited from the
- parent. Additionally, *stderr* can be :data:`STDOUT`, which indicates that
- the stderr data from the child process should be captured into the same file
- handle as for stdout.
+ are :data:`PIPE`, :data:`DEVNULL`, an existing file descriptor (a positive
+ integer), an existing file object, and ``None``. :data:`PIPE` indicates
+ that a new pipe to the child should be created. :data:`DEVNULL` indicates
+ that the special file :data:`os.devnull` will be used. With the default
+ settings of ``None``, no redirection will occur; the child's file handles
+ will be inherited from the parent. Additionally, *stderr* can be
+ :data:`STDOUT`, which indicates that the stderr data from the child
+ process should be captured into the same file handle as for *stdout*.
When *stdout* or *stderr* are pipes and *universal_newlines* is
:const:`True` then the output data is assumed to be encoded as UTF-8 and
@@ -331,13 +364,14 @@ functions.
*stdin*, *stdout* and *stderr* specify the executed program's standard input,
standard output and standard error file handles, respectively. Valid values
- are :data:`PIPE`, an existing file descriptor (a positive integer), an
- existing :term:`file object`, and ``None``. :data:`PIPE` indicates that a
- new pipe to the child should be created. With the default settings of
- ``None``, no redirection will occur; the child's file handles will be
- inherited from the parent. Additionally, *stderr* can be :data:`STDOUT`,
- which indicates that the stderr data from the applications should be
- captured into the same file handle as for stdout.
+ are :data:`PIPE`, :data:`DEVNULL`, an existing file descriptor (a positive
+ integer), an existing :term:`file object`, and ``None``. :data:`PIPE`
+ indicates that a new pipe to the child should be created. :data:`DEVNULL`
+ indicates that the special file :data:`os.devnull` will be used. With the
+ default settings of ``None``, no redirection will occur; the child's file
+ handles will be inherited from the parent. Additionally, *stderr* can be
+ :data:`STDOUT`, which indicates that the stderr data from the applications
+ should be captured into the same file handle as for stdout.
If *preexec_fn* is set to a callable object, this object will be called in the
child process just before the child is executed.
@@ -456,6 +490,15 @@ arguments.
:exc:`CalledProcessError` if the called process returns a non-zero return
code.
+All of the functions and methods that accept a *timeout* parameter, such as
+:func:`call` and :meth:`Popen.communicate` will raise :exc:`TimeoutExpired` if
+the timeout expires before the process exits.
+
+Exceptions defined in this module all inherit from :exc:`SubprocessError`.
+
+ .. versionadded:: 3.3
+ The :exc:`SubprocessError` base class was added.
+
Security
^^^^^^^^
@@ -479,11 +522,15 @@ Instances of the :class:`Popen` class have the following methods:
attribute.
-.. method:: Popen.wait()
+.. method:: Popen.wait(timeout=None)
Wait for child process to terminate. Set and return :attr:`returncode`
attribute.
+ If the process does not terminate after *timeout* seconds, raise a
+ :exc:`TimeoutExpired` exception. It is safe to catch this exception and
+ retry the wait.
+
.. warning::
This will deadlock when using ``stdout=PIPE`` and/or
@@ -491,13 +538,17 @@ Instances of the :class:`Popen` class have the following methods:
a pipe such that it blocks waiting for the OS pipe buffer to
accept more data. Use :meth:`communicate` to avoid that.
+ .. versionchanged:: 3.3
+ *timeout* was added.
-.. method:: Popen.communicate(input=None)
+
+.. method:: Popen.communicate(input=None, timeout=None)
Interact with process: Send data to stdin. Read data from stdout and stderr,
- until end-of-file is reached. Wait for process to terminate. The optional
- *input* argument should be a byte string to be sent to the child process, or
- ``None``, if no data should be sent to the child.
+ until end-of-file is reached. Wait for process to terminate. The optional
+ *input* argument should be data to be sent to the child process, or
+ ``None``, if no data should be sent to the child. The type of *input*
+ must be bytes or, if *universal_newlines* was ``True``, a string.
:meth:`communicate` returns a tuple ``(stdoutdata, stderrdata)``.
@@ -506,11 +557,29 @@ Instances of the :class:`Popen` class have the following methods:
``None`` in the result tuple, you need to give ``stdout=PIPE`` and/or
``stderr=PIPE`` too.
+ If the process does not terminate after *timeout* seconds, a
+ :exc:`TimeoutExpired` exception will be raised. Catching this exception and
+ retrying communication will not lose any output.
+
+ The child process is not killed if the timeout expires, so in order to
+ cleanup properly a well-behaved application should kill the child process and
+ finish communication::
+
+ proc = subprocess.Popen(...)
+ try:
+ outs, errs = proc.communicate(timeout=15)
+ except TimeoutExpired:
+ proc.kill()
+ outs, errs = proc.communicate()
+
.. note::
The data read is buffered in memory, so do not use this method if the data
size is large or unlimited.
+ .. versionchanged:: 3.3
+ *timeout* was added.
+
.. method:: Popen.send_signal(signal)
@@ -952,3 +1021,9 @@ runtime):
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
+
+
+.. seealso::
+
+ :mod:`shlex`
+ Module which provides function to parse and escape command lines.
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index 0e4adec..96450c5 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -235,14 +235,13 @@ always available.
.. data:: flags
- The struct sequence *flags* exposes the status of command line flags. The
- attributes are read only.
+ The :term:`struct sequence` *flags* exposes the status of command line
+ flags. The attributes are read only.
============================= =============================
attribute flag
============================= =============================
:const:`debug` :option:`-d`
- :const:`division_warning` :option:`-Q`
:const:`inspect` :option:`-i`
:const:`interactive` :option:`-i`
:const:`optimize` :option:`-O` or :option:`-OO`
@@ -262,15 +261,18 @@ always available.
.. versionadded:: 3.2.3
The ``hash_randomization`` attribute.
+ .. versionchanged:: 3.3
+ Removed obsolete ``division_warning`` attribute.
+
.. data:: float_info
- A structseq holding information about the float type. It contains low level
- information about the precision and internal representation. The values
- correspond to the various floating-point constants defined in the standard
- header file :file:`float.h` for the 'C' programming language; see section
- 5.2.4.2.2 of the 1999 ISO/IEC C standard [C99]_, 'Characteristics of
- floating types', for details.
+ A :term:`struct sequence` holding information about the float type. It
+ contains low level information about the precision and internal
+ representation. The values correspond to the various floating-point
+ constants defined in the standard header file :file:`float.h` for the 'C'
+ programming language; see section 5.2.4.2.2 of the 1999 ISO/IEC C standard
+ [C99]_, 'Characteristics of floating types', for details.
+---------------------+----------------+--------------------------------------------------+
| attribute | float.h macro | explanation |
@@ -520,8 +522,9 @@ always available.
.. data:: hash_info
- A structseq giving parameters of the numeric hash implementation. For
- more details about hashing of numeric types, see :ref:`numeric-hash`.
+ A :term:`struct sequence` giving parameters of the numeric hash
+ implementation. For more details about hashing of numeric types, see
+ :ref:`numeric-hash`.
+---------------------+--------------------------------------------------+
| attribute | explanation |
@@ -556,8 +559,8 @@ always available.
This is called ``hexversion`` since it only really looks meaningful when viewed
as the result of passing it to the built-in :func:`hex` function. The
- struct sequence :data:`sys.version_info` may be used for a more human-friendly
- encoding of the same information.
+ :term:`struct sequence` :data:`sys.version_info` may be used for a more
+ human-friendly encoding of the same information.
The ``hexversion`` is a 32-bit number with the following layout:
@@ -585,8 +588,8 @@ always available.
.. data:: int_info
- A struct sequence that holds information about Python's
- internal representation of integers. The attributes are read only.
+ A :term:`struct sequence` that holds information about Python's internal
+ representation of integers. The attributes are read only.
+-------------------------+----------------------------------------------+
| Attribute | Explanation |
@@ -641,9 +644,13 @@ always available.
.. data:: maxunicode
- An integer giving the largest supported code point for a Unicode character. The
- value of this depends on the configuration option that specifies whether Unicode
- characters are stored as UCS-2 or UCS-4.
+ An integer giving the value of the largest Unicode code point,
+ i.e. ``1114111`` (``0x10FFFF`` in hexadecimal).
+
+ .. versionchanged:: 3.3
+ Before :pep:`393`, ``sys.maxunicode`` used to be either ``0xFFFF``
+ or ``0x10FFFF``, depending on the configuration option that specified
+ whether Unicode characters were stored as UCS-2 or UCS-4.
.. data:: meta_path
@@ -718,36 +725,35 @@ always available.
This string contains a platform identifier that can be used to append
platform-specific components to :data:`sys.path`, for instance.
- For most Unix systems, this is the lowercased OS name as returned by ``uname
- -s`` with the first part of the version as returned by ``uname -r`` appended,
- e.g. ``'sunos5'``, *at the time when Python was built*. Unless you want to
- test for a specific system version, it is therefore recommended to use the
- following idiom::
+ For Unix systems, except on Linux, this is the lowercased OS name as
+ returned by ``uname -s`` with the first part of the version as returned by
+ ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, *at the time
+ when Python was built*. Unless you want to test for a specific system
+ version, it is therefore recommended to use the following idiom::
if sys.platform.startswith('freebsd'):
# FreeBSD-specific code here...
elif sys.platform.startswith('linux'):
# Linux-specific code here...
- .. versionchanged:: 3.2.2
- Since lots of code check for ``sys.platform == 'linux2'``, and there is
- no essential change between Linux 2.x and 3.x, ``sys.platform`` is always
- set to ``'linux2'``, even on Linux 3.x. In Python 3.3 and later, the
- value will always be set to ``'linux'``, so it is recommended to always
- use the ``startswith`` idiom presented above.
-
For other systems, the values are:
- ====================== ===========================
- System :data:`platform` value
- ====================== ===========================
- Linux (2.x *and* 3.x) ``'linux2'``
- Windows ``'win32'``
- Windows/Cygwin ``'cygwin'``
- Mac OS X ``'darwin'``
- OS/2 ``'os2'``
- OS/2 EMX ``'os2emx'``
- ====================== ===========================
+ ================ ===========================
+ System :data:`platform` value
+ ================ ===========================
+ Linux ``'linux'``
+ Windows ``'win32'``
+ Windows/Cygwin ``'cygwin'``
+ Mac OS X ``'darwin'``
+ OS/2 ``'os2'``
+ OS/2 EMX ``'os2emx'``
+ ================ ===========================
+
+ .. versionchanged:: 3.3
+ On Linux, :attr:`sys.platform` doesn't contain the major version anymore.
+ It is always ``'linux'``, instead of ``'linux2'`` or ``'linux3'``. Since
+ older Python versions include the version number, it is recommended to
+ always use the ``startswith`` idiom presented above.
.. seealso::
@@ -764,7 +770,7 @@ always available.
independent Python files are installed; by default, this is the string
``'/usr/local'``. This can be set at build time with the ``--prefix``
argument to the :program:`configure` script. The main collection of Python
- library modules is installed in the directory :file:`{prefix}/lib/python{X.Y}``
+ library modules is installed in the directory :file:`{prefix}/lib/python{X.Y}`
while the platform independent header files (all except :file:`pyconfig.h`) are
stored in :file:`{prefix}/include/python{X.Y}`, where *X.Y* is the version
number of Python, for example ``3.2``.
@@ -806,11 +812,11 @@ always available.
the interpreter loads extension modules. Among other things, this will enable a
lazy resolving of symbols when importing a module, if called as
``sys.setdlopenflags(0)``. To share symbols across extension modules, call as
- ``sys.setdlopenflags(ctypes.RTLD_GLOBAL)``. Symbolic names for the
- flag modules can be either found in the :mod:`ctypes` module, or in the :mod:`DLFCN`
- module. If :mod:`DLFCN` is not available, it can be generated from
- :file:`/usr/include/dlfcn.h` using the :program:`h2py` script. Availability:
- Unix.
+ ``sys.setdlopenflags(os.RTLD_GLOBAL)``. Symbolic names for the flag modules
+ can be found in the :mod:`os` module (``RTLD_xxx`` constants, e.g.
+ :data:`os.RTLD_LAZY`).
+
+ Availability: Unix.
.. function:: setprofile(profilefunc)
@@ -1003,22 +1009,33 @@ always available.
to a console and Python apps started with :program:`pythonw`.
-.. data:: subversion
+.. data:: thread_info
- A triple (repo, branch, version) representing the Subversion information of the
- Python interpreter. *repo* is the name of the repository, ``'CPython'``.
- *branch* is a string of one of the forms ``'trunk'``, ``'branches/name'`` or
- ``'tags/name'``. *version* is the output of ``svnversion``, if the interpreter
- was built from a Subversion checkout; it contains the revision number (range)
- and possibly a trailing 'M' if there were local modifications. If the tree was
- exported (or svnversion was not available), it is the revision of
- ``Include/patchlevel.h`` if the branch is a tag. Otherwise, it is ``None``.
+ A :term:`struct sequence` holding information about the thread
+ implementation.
- .. deprecated:: 3.2.1
- Python is now `developed <http://docs.python.org/devguide/>`_ using
- Mercurial. In recent Python 3.2 bugfix releases, :data:`subversion`
- therefore contains placeholder information. It is removed in Python
- 3.3.
+ +------------------+---------------------------------------------------------+
+ | Attribute | Explanation |
+ +==================+=========================================================+
+ | :const:`name` | Name of the thread implementation: |
+ | | |
+ | | * ``'nt'``: Windows threads |
+ | | * ``'os2'``: OS/2 threads |
+ | | * ``'pthread'``: POSIX threads |
+ | | * ``'solaris'``: Solaris threads |
+ +------------------+---------------------------------------------------------+
+ | :const:`lock` | Name of the lock implementation: |
+ | | |
+ | | * ``'semaphore'``: a lock uses a semaphore |
+ | | * ``'mutex+cond'``: a lock uses a mutex |
+ | | and a condition variable |
+ | | * ``None`` if this information is unknown |
+ +------------------+---------------------------------------------------------+
+ | :const:`version` | Name and version of the thread library. It is a string, |
+ | | or ``None`` if these informations are unknown. |
+ +------------------+---------------------------------------------------------+
+
+ .. versionadded:: 3.3
.. data:: tracebacklimit
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index 46e4900..92e9df7 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -13,13 +13,13 @@
--------------
The :mod:`tarfile` module makes it possible to read and write tar
-archives, including those using gzip or bz2 compression.
+archives, including those using gzip, bz2 and lzma compression.
Use the :mod:`zipfile` module to read or write :file:`.zip` files, or the
higher-level functions in :ref:`shutil <archiving-operations>`.
Some facts and figures:
-* reads and writes :mod:`gzip` and :mod:`bz2` compressed archives.
+* reads and writes :mod:`gzip`, :mod:`bz2` and :mod:`lzma` compressed archives.
* read/write support for the POSIX.1-1988 (ustar) format.
@@ -33,6 +33,9 @@ Some facts and figures:
character devices and block devices and is able to acquire and restore file
information like timestamp, access permissions and owner.
+.. versionchanged:: 3.3
+ Added support for :mod:`lzma` compression.
+
.. function:: open(name=None, mode='r', fileobj=None, bufsize=10240, \*\*kwargs)
@@ -56,6 +59,8 @@ Some facts and figures:
+------------------+---------------------------------------------+
| ``'r:bz2'`` | Open for reading with bzip2 compression. |
+------------------+---------------------------------------------+
+ | ``'r:xz'`` | Open for reading with lzma compression. |
+ +------------------+---------------------------------------------+
| ``'a' or 'a:'`` | Open for appending with no compression. The |
| | file is created if it does not exist. |
+------------------+---------------------------------------------+
@@ -65,11 +70,13 @@ Some facts and figures:
+------------------+---------------------------------------------+
| ``'w:bz2'`` | Open for bzip2 compressed writing. |
+------------------+---------------------------------------------+
+ | ``'w:xz'`` | Open for lzma compressed writing. |
+ +------------------+---------------------------------------------+
- Note that ``'a:gz'`` or ``'a:bz2'`` is not possible. If *mode* is not suitable
- to open a certain (compressed) file for reading, :exc:`ReadError` is raised. Use
- *mode* ``'r'`` to avoid this. If a compression method is not supported,
- :exc:`CompressionError` is raised.
+ Note that ``'a:gz'``, ``'a:bz2'`` or ``'a:xz'`` is not possible. If *mode*
+ is not suitable to open a certain (compressed) file for reading,
+ :exc:`ReadError` is raised. Use *mode* ``'r'`` to avoid this. If a
+ compression method is not supported, :exc:`CompressionError` is raised.
If *fileobj* is specified, it is used as an alternative to a :term:`file object`
opened in binary mode for *name*. It is supposed to be at position 0.
@@ -100,6 +107,9 @@ Some facts and figures:
| ``'r|bz2'`` | Open a bzip2 compressed *stream* for |
| | reading. |
+-------------+--------------------------------------------+
+ | ``'r|xz'`` | Open a lzma compressed *stream* for |
+ | | reading. |
+ +-------------+--------------------------------------------+
| ``'w|'`` | Open an uncompressed *stream* for writing. |
+-------------+--------------------------------------------+
| ``'w|gz'`` | Open a gzip compressed *stream* for |
@@ -108,6 +118,9 @@ Some facts and figures:
| ``'w|bz2'`` | Open a bzip2 compressed *stream* for |
| | writing. |
+-------------+--------------------------------------------+
+ | ``'w|xz'`` | Open an lzma compressed *stream* for |
+ | | writing. |
+ +-------------+--------------------------------------------+
.. class:: TarFile
@@ -263,9 +276,9 @@ be finalized; only the internally used file object will be closed. See the
If *errorlevel* is ``0``, all errors are ignored when using :meth:`TarFile.extract`.
Nevertheless, they appear as error messages in the debug output, when debugging
- is enabled. If ``1``, all *fatal* errors are raised as :exc:`OSError` or
- :exc:`IOError` exceptions. If ``2``, all *non-fatal* errors are raised as
- :exc:`TarError` exceptions as well.
+ is enabled. If ``1``, all *fatal* errors are raised as :exc:`OSError`
+ exceptions. If ``2``, all *non-fatal* errors are raised as :exc:`TarError`
+ exceptions as well.
The *encoding* and *errors* arguments define the character encoding to be
used for reading or writing the archive and how conversion errors are going
diff --git a/Doc/library/telnetlib.rst b/Doc/library/telnetlib.rst
index 646634d..9bc79c5 100644
--- a/Doc/library/telnetlib.rst
+++ b/Doc/library/telnetlib.rst
@@ -162,9 +162,13 @@ Telnet Objects
.. method:: Telnet.write(buffer)
Write a byte string to the socket, doubling any IAC characters. This can
- block if the connection is blocked. May raise :exc:`socket.error` if the
+ block if the connection is blocked. May raise :exc:`OSError` if the
connection is closed.
+ .. versionchanged:: 3.3
+ This method used to raise :exc:`socket.error`, which is now an alias
+ of :exc:`OSError`.
+
.. method:: Telnet.interact()
diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst
index fff6c4e..dfeb250 100644
--- a/Doc/library/tempfile.rst
+++ b/Doc/library/tempfile.rst
@@ -25,7 +25,7 @@ instead a string of six random characters is used.
Also, all the user-callable functions now take additional arguments which
allow direct control over the location and name of temporary files. It is
-no longer necessary to use the global *tempdir* and *template* variables.
+no longer necessary to use the global *tempdir* variable.
To maintain backward compatibility, the argument order is somewhat odd; it
is recommended to use keyword arguments for clarity.
diff --git a/Doc/library/test.rst b/Doc/library/test.rst
index c27ee08..5e4a1cb 100644
--- a/Doc/library/test.rst
+++ b/Doc/library/test.rst
@@ -223,14 +223,14 @@ The :mod:`test.support` module defines the following constants:
.. data:: verbose
- :const:`True` when verbose output is enabled. Should be checked when more
+ ``True`` when verbose output is enabled. Should be checked when more
detailed information is desired about a running test. *verbose* is set by
:mod:`test.regrtest`.
.. data:: is_jython
- :const:`True` if the running interpreter is Jython.
+ ``True`` if the running interpreter is Jython.
.. data:: TESTFN
@@ -249,7 +249,7 @@ The :mod:`test.support` module defines the following functions:
.. function:: is_resource_enabled(resource)
- Return :const:`True` if *resource* is enabled and available. The list of
+ Return ``True`` if *resource* is enabled and available. The list of
available resources is only set when :mod:`test.regrtest` is executing the
tests.
@@ -258,7 +258,7 @@ The :mod:`test.support` module defines the following functions:
Raise :exc:`ResourceDenied` if *resource* is not available. *msg* is the
argument to :exc:`ResourceDenied` if it is raised. Always returns
- :const:`True` if called by a function whose ``__name__`` is ``'__main__'``.
+ ``True`` if called by a function whose ``__name__`` is ``'__main__'``.
Used when tests are executed by :mod:`test.regrtest`.
@@ -286,6 +286,15 @@ The :mod:`test.support` module defines the following functions:
This will run all tests defined in the named module.
+.. function:: run_doctest(module, verbosity=None)
+
+ Run :func:`doctest.testmod` on the given *module*. Return
+ ``(failure_count, test_count)``.
+
+ If *verbosity* is ``None``, :func:`doctest.testmod` is run with verbosity
+ set to :data:`verbose`. Otherwise, it is run with verbosity set to
+ ``None``.
+
.. function:: check_warnings(\*filters, quiet=True)
A convenience wrapper for :func:`warnings.catch_warnings()` that makes it
@@ -296,12 +305,12 @@ The :mod:`test.support` module defines the following functions:
``check_warnings`` accepts 2-tuples of the form ``("message regexp",
WarningCategory)`` as positional arguments. If one or more *filters* are
- provided, or if the optional keyword argument *quiet* is :const:`False`,
+ provided, or if the optional keyword argument *quiet* is ``False``,
it checks to make sure the warnings are as expected: each specified filter
must match at least one of the warnings raised by the enclosed code or the
test fails, and if any warnings are raised that do not match any of the
specified filters the test fails. To disable the first of these checks,
- set *quiet* to :const:`True`.
+ set *quiet* to ``True``.
If no arguments are specified, it defaults to::
@@ -316,7 +325,7 @@ The :mod:`test.support` module defines the following functions:
representing the most recent warning can also be accessed directly through
the recorder object (see example below). If no warning has been raised,
then any of the attributes that would otherwise be expected on an object
- representing a warning will return :const:`None`.
+ representing a warning will return ``None``.
The recorder object also has a :meth:`reset` method, which clears the
warnings list.
@@ -354,7 +363,7 @@ The :mod:`test.support` module defines the following functions:
.. function:: captured_stdout()
- This is a context manager that runs the :keyword:`with` statement body using
+ A context manager that runs the :keyword:`with` statement body using
a :class:`StringIO.StringIO` object as sys.stdout. That object can be
retrieved using the ``as`` clause of the :keyword:`with` statement.
@@ -365,6 +374,57 @@ The :mod:`test.support` module defines the following functions:
assert s.getvalue() == "hello"
+.. function:: temp_cwd(name='tempcwd', quiet=False, path=None)
+
+ A context manager that temporarily changes the current working
+ directory (CWD).
+
+ An existing path may be provided as *path*, in which case this function
+ makes no changes to the file system.
+
+ Otherwise, the new CWD is created in the current directory and it's named
+ *name*. If *quiet* is ``False`` and it's not possible to create or
+ change the CWD, an error is raised. If it's ``True``, only a warning
+ is raised and the original CWD is used.
+
+
+.. function:: temp_umask(umask)
+
+ A context manager that temporarily sets the process umask.
+
+
+.. function:: can_symlink()
+
+ Return ``True`` if the OS supports symbolic links, ``False``
+ otherwise.
+
+
+.. decorator:: skip_unless_symlink()
+
+ A decorator for running tests that require support for symbolic links.
+
+
+.. decorator:: anticipate_failure(condition)
+
+ A decorator to conditionally mark tests with
+ :func:`unittest.expectedFailure`. Any use of this decorator should
+ have an associated comment identifying the relevant tracker issue.
+
+
+.. decorator:: run_with_locale(catstr, *locales)
+
+ A decorator for running a function in a different locale, correctly
+ resetting it after it has finished. *catstr* is the locale category as
+ a string (for example ``"LC_ALL"``). The *locales* passed will be tried
+ sequentially, and the first valid locale will be used.
+
+
+.. function:: make_bad_fd()
+
+ Create an invalid file descriptor by opening and closing a temporary file,
+ and returning its descripor.
+
+
.. function:: import_module(name, deprecated=False)
This function imports and returns the named module. Unlike a normal
@@ -372,7 +432,7 @@ The :mod:`test.support` module defines the following functions:
cannot be imported.
Module and package deprecation messages are suppressed during this import
- if *deprecated* is :const:`True`.
+ if *deprecated* is ``True``.
.. versionadded:: 3.1
@@ -396,9 +456,9 @@ The :mod:`test.support` module defines the following functions:
``sys.modules`` when the fresh import is complete.
Module and package deprecation messages are suppressed during this import
- if *deprecated* is :const:`True`.
+ if *deprecated* is ``True``.
- This function will raise :exc:`unittest.SkipTest` is the named module
+ This function will raise :exc:`unittest.SkipTest` if the named module
cannot be imported.
Example use::
@@ -413,6 +473,48 @@ The :mod:`test.support` module defines the following functions:
.. versionadded:: 3.1
+.. function:: bind_port(sock, host=HOST)
+
+ Bind the socket to a free port and return the port number. Relies on
+ ephemeral ports in order to ensure we are using an unbound port. This is
+ important as many tests may be running simultaneously, especially in a
+ buildbot environment. This method raises an exception if the
+ ``sock.family`` is :const:`~socket.AF_INET` and ``sock.type`` is
+ :const:`~socket.SOCK_STREAM`, and the socket has
+ :const:`~socket.SO_REUSEADDR` or :const:`~socket.SO_REUSEPORT` set on it.
+ Tests should never set these socket options for TCP/IP sockets.
+ The only case for setting these options is testing multicasting via
+ multiple UDP sockets.
+
+ Additionally, if the :const:`~socket.SO_EXCLUSIVEADDRUSE` socket option is
+ available (i.e. on Windows), it will be set on the socket. This will
+ prevent anyone else from binding to our host/port for the duration of the
+ test.
+
+
+.. function:: find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM)
+
+ Returns an unused port that should be suitable for binding. This is
+ achieved by creating a temporary socket with the same family and type as
+ the ``sock`` parameter (default is :const:`~socket.AF_INET`,
+ :const:`~socket.SOCK_STREAM`),
+ and binding it to the specified host address (defaults to ``0.0.0.0``)
+ with the port set to 0, eliciting an unused ephemeral port from the OS.
+ The temporary socket is then closed and deleted, and the ephemeral port is
+ returned.
+
+ Either this method or :func:`bind_port` should be used for any tests
+ where a server socket needs to be bound to a particular port for the
+ duration of the test.
+ Which one to use depends on whether the calling code is creating a python
+ socket, or if an unused port needs to be provided in a constructor
+ or passed to an external program (i.e. the ``-accept`` argument to
+ openssl's s_server mode). Always prefer :func:`bind_port` over
+ :func:`find_unused_port` where possible. Using a hard coded port is
+ discouraged since it can makes multiple instances of the test impossible to
+ run simultaneously, which is a problem for buildbots.
+
+
The :mod:`test.support` module defines the following classes:
.. class:: TransientResource(exc, **kwargs)
diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst
index 9b3affd..2b43ee5 100644
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -20,17 +20,6 @@ The :mod:`dummy_threading` module is provided for situations where
methods and functions in this module in the Python 2.x series are still
supported by this module.
-.. impl-detail::
-
- Due to the :term:`Global Interpreter Lock`, in CPython only one thread
- can execute Python code at once (even though certain performance-oriented
- libraries might overcome this limitation).
- If you want your application to make better of use of the computational
- resources of multi-core machines, you are advised to use
- :mod:`multiprocessing` or :class:`concurrent.futures.ProcessPoolExecutor`.
- However, threading is still an appropriate model if you want to run
- multiple I/O-bound tasks simultaneously.
-
This module defines the following functions and objects:
@@ -59,6 +48,17 @@ This module defines the following functions and objects:
returned.
+.. function:: get_ident()
+
+ Return the 'thread identifier' of the current thread. This is a nonzero
+ integer. Its value has no direct meaning; it is intended as a magic cookie
+ to be used e.g. to index a dictionary of thread-specific data. Thread
+ identifiers may be recycled when a thread exits and another thread is
+ created.
+
+ .. versionadded:: 3.3
+
+
.. function:: enumerate()
Return a list of all :class:`Thread` objects currently alive. The list
@@ -241,7 +241,7 @@ changed through the :attr:`name` attribute.
A thread can be flagged as a "daemon thread". The significance of this flag is
that the entire Python program exits when only daemon threads are left. The
initial value is inherited from the creating thread. The flag can be set
-through the :attr:`daemon` property.
+through the :attr:`daemon` property or the *daemon* constructor argument.
There is a "main thread" object; this corresponds to the initial thread of
control in the Python program. It is not a daemon thread.
@@ -254,7 +254,8 @@ daemonic, and cannot be :meth:`join`\ ed. They are never deleted, since it is
impossible to detect the termination of alien threads.
-.. class:: Thread(group=None, target=None, name=None, args=(), kwargs={})
+.. class:: Thread(group=None, target=None, name=None, args=(), kwargs={},
+ verbose=None, *, daemon=None)
This constructor should always be called with keyword arguments. Arguments
are:
@@ -273,10 +274,19 @@ impossible to detect the termination of alien threads.
*kwargs* is a dictionary of keyword arguments for the target invocation.
Defaults to ``{}``.
+ *verbose* is a flag used for debugging messages.
+
+ If not ``None``, *daemon* explicitly sets whether the thread is daemonic.
+ If ``None`` (the default), the daemonic property is inherited from the
+ current thread.
+
If the subclass overrides the constructor, it must make sure to invoke the
base class constructor (``Thread.__init__()``) before doing anything else to
the thread.
+ .. versionchanged:: 3.3
+ Added the *daemon* argument.
+
.. method:: start()
Start the thread's activity.
@@ -333,10 +343,10 @@ impossible to detect the termination of alien threads.
.. attribute:: ident
The 'thread identifier' of this thread or ``None`` if the thread has not
- been started. This is a nonzero integer. See the
- :func:`thread.get_ident()` function. Thread identifiers may be recycled
- when a thread exits and another thread is created. The identifier is
- available even after the thread has exited.
+ been started. This is a nonzero integer. See the :func:`get_ident()`
+ function. Thread identifiers may be recycled when a thread exits and
+ another thread is created. The identifier is available even after the
+ thread has exited.
.. method:: is_alive()
@@ -364,6 +374,18 @@ impossible to detect the termination of alien threads.
property instead.
+.. impl-detail::
+
+ Due to the :term:`Global Interpreter Lock`, in CPython only one thread
+ can execute Python code at once (even though certain performance-oriented
+ libraries might overcome this limitation).
+ If you want your application to make better of use of the computational
+ resources of multi-core machines, you are advised to use
+ :mod:`multiprocessing` or :class:`concurrent.futures.ProcessPoolExecutor`.
+ However, threading is still an appropriate model if you want to run
+ multiple I/O-bound tasks simultaneously.
+
+
.. _lock-objects:
Lock Objects
diff --git a/Doc/library/time.rst b/Doc/library/time.rst
index 7854fbd..3e666cf 100644
--- a/Doc/library/time.rst
+++ b/Doc/library/time.rst
@@ -41,25 +41,6 @@ An explanation of some terminology and conventions is in order.
parsed, they are converted according to the POSIX and ISO C standards: values
69--99 are mapped to 1969--1999, and values 0--68 are mapped to 2000--2068.
- For backward compatibility, years with less than 4 digits are treated
- specially by :func:`asctime`, :func:`mktime`, and :func:`strftime` functions
- that operate on a 9-tuple or :class:`struct_time` values. If year (the first
- value in the 9-tuple) is specified with less than 4 digits, its interpretation
- depends on the value of ``accept2dyear`` variable.
-
- If ``accept2dyear`` is true (default), a backward compatibility behavior is
- invoked as follows:
-
- - for 2-digit year, century is guessed according to POSIX rules for
- ``%y`` strptime format. A deprecation warning is issued when century
- information is guessed in this way.
-
- - for 3-digit or negative year, a :exc:`ValueError` exception is raised.
-
- If ``accept2dyear`` is false (set by the program or as a result of a
- non-empty value assigned to ``PYTHONY2K`` environment variable) all year
- values are interpreted as given.
-
.. index::
single: UTC
single: Coordinated Universal Time
@@ -117,24 +98,6 @@ An explanation of some terminology and conventions is in order.
The module defines the following functions and data items:
-
-.. data:: accept2dyear
-
- Boolean value indicating whether two-digit year values will be
- mapped to 1969--2068 range by :func:`asctime`, :func:`mktime`, and
- :func:`strftime` functions. This is true by default, but will be
- set to false if the environment variable :envvar:`PYTHONY2K` has
- been set to a non-empty string. It may also be modified at run
- time.
-
- .. deprecated:: 3.2
- Mapping of 2-digit year values by :func:`asctime`,
- :func:`mktime`, and :func:`strftime` functions to 1969--2068
- range is deprecated. Programs that need to process 2-digit
- years should use ``%y`` code available in :func:`strptime`
- function or convert 2-digit year values to 4-digit themselves.
-
-
.. data:: altzone
The offset of the local DST timezone, in seconds west of UTC, if one is defined.
@@ -152,7 +115,8 @@ The module defines the following functions and data items:
.. note::
- Unlike the C function of the same name, there is no trailing newline.
+ Unlike the C function of the same name, :func:`asctime` does not add a
+ trailing newline.
.. function:: clock()
@@ -173,6 +137,53 @@ The module defines the following functions and data items:
microsecond.
+.. function:: clock_getres(clk_id)
+
+ Return the resolution (precision) of the specified clock *clk_id*.
+
+ .. versionadded:: 3.3
+
+.. function:: clock_gettime(clk_id)
+
+ Return the time of the specified clock *clk_id*.
+
+ .. versionadded:: 3.3
+
+.. data:: CLOCK_REALTIME
+
+ System-wide real-time clock. Setting this clock requires appropriate
+ privileges.
+
+ .. versionadded:: 3.3
+
+.. data:: CLOCK_MONOTONIC
+
+ Clock that cannot be set and represents monotonic time since some
+ unspecified starting point.
+
+ .. versionadded:: 3.3
+
+.. data:: CLOCK_MONOTONIC_RAW
+
+ Similar to :data:`CLOCK_MONOTONIC`, but provides access to a raw
+ hardware-based time that is not subject to NTP adjustments.
+
+ Availability: Linux 2.6.28 or later.
+
+ .. versionadded:: 3.3
+
+.. data:: CLOCK_PROCESS_CPUTIME_ID
+
+ High-resolution per-process timer from the CPU.
+
+ .. versionadded:: 3.3
+
+.. data:: CLOCK_THREAD_CPUTIME_ID
+
+ Thread-specific CPU-time clock.
+
+ .. versionadded:: 3.3
+
.. function:: ctime([secs])
Convert a time expressed in seconds since the epoch to a string representing
@@ -215,6 +226,24 @@ The module defines the following functions and data items:
The earliest date for which it can generate a time is platform-dependent.
+.. function:: steady(strict=False)
+
+ .. index::
+ single: benchmarking
+
+ Return the current time as a floating point number expressed in seconds.
+ This clock advances at a steady rate relative to real time and it may not be
+ adjusted. The reference point of the returned value is undefined so only the
+ difference of consecutive calls is valid.
+
+ If available, a monotonic clock is used. By default, if *strict* is False,
+ the function falls back to another clock if the monotonic clock failed or is
+ not available. If *strict* is True, raise an :exc:`OSError` on error or
+ :exc:`NotImplementedError` if no monotonic clock is available.
+
+ .. versionadded:: 3.3
+
+
.. function:: sleep(secs)
Suspend execution for the given number of seconds. The argument may be a
@@ -308,7 +337,7 @@ The module defines the following functions and data items:
| ``%y`` | Year without century as a decimal number | |
| | [00,99]. | |
+-----------+------------------------------------------------+-------+
- | ``%Y`` | Year with century as a decimal number. | \(4) |
+ | ``%Y`` | Year with century as a decimal number. | |
| | | |
+-----------+------------------------------------------------+-------+
| ``%Z`` | Time zone name (no characters if no time zone | |
@@ -332,12 +361,6 @@ The module defines the following functions and data items:
When used with the :func:`strptime` function, ``%U`` and ``%W`` are only used in
calculations when the day of the week and the year are specified.
- (4)
- Produces different results depending on the value of
- ``time.accept2dyear`` variable. See :ref:`Year 2000 (Y2K)
- issues <time-y2kissues>` for details.
-
-
Here is an example, a format for dates compatible with that specified in the
:rfc:`2822` Internet email standard. [#]_ ::
@@ -418,8 +441,7 @@ The module defines the following functions and data items:
+-------+-------------------+---------------------------------+
Note that unlike the C structure, the month value is a range of [1, 12], not
- [0, 11]. A year value will be handled as described under :ref:`Year 2000
- (Y2K) issues <time-y2kissues>` above. A ``-1`` argument as the daylight
+ [0, 11]. A ``-1`` argument as the daylight
savings flag, passed to :func:`mktime` will usually result in the correct
daylight savings state to be filled in.
diff --git a/Doc/library/tkinter.rst b/Doc/library/tkinter.rst
index ae5635f..62eedff 100644
--- a/Doc/library/tkinter.rst
+++ b/Doc/library/tkinter.rst
@@ -179,35 +179,30 @@ A Simple Hello World Program
::
- from tkinter import *
-
- class Application(Frame):
- def say_hi(self):
- print("hi there, everyone!")
-
- def createWidgets(self):
- self.QUIT = Button(self)
- self.QUIT["text"] = "QUIT"
- self.QUIT["fg"] = "red"
- self.QUIT["command"] = self.quit
-
- self.QUIT.pack({"side": "left"})
-
- self.hi_there = Button(self)
- self.hi_there["text"] = "Hello",
- self.hi_there["command"] = self.say_hi
-
- self.hi_there.pack({"side": "left"})
-
- def __init__(self, master=None):
- Frame.__init__(self, master)
- self.pack()
- self.createWidgets()
-
- root = Tk()
- app = Application(master=root)
- app.mainloop()
- root.destroy()
+ import tkinter as tk
+
+ class Application(tk.Frame):
+ def __init__(self, master=None):
+ tk.Frame.__init__(self, master)
+ self.pack()
+ self.createWidgets()
+
+ def createWidgets(self):
+ self.hi_there = tk.Button(self)
+ self.hi_there["text"] = "Hello World\n(click me)"
+ self.hi_there["command"] = self.say_hi
+ self.hi_there.pack(side="top")
+
+ self.QUIT = tk.Button(self, text = "QUIT", fg = "red",
+ command = root.destroy)
+ self.QUIT.pack(side = "bottom")
+
+ def say_hi(self):
+ print("hi there, everyone!")
+
+ root = tk.Tk()
+ app = Application(master=root)
+ app.mainloop()
A (Very) Quick Look at Tcl/Tk
diff --git a/Doc/library/tokenize.rst b/Doc/library/tokenize.rst
index 70919ca..37d9f41 100644
--- a/Doc/library/tokenize.rst
+++ b/Doc/library/tokenize.rst
@@ -17,9 +17,11 @@ colorizers for on-screen displays.
To simplify token stream handling, all :ref:`operators` and :ref:`delimiters`
tokens are returned using the generic :data:`token.OP` token type. The exact
-type can be determined by checking the token ``string`` field on the
-:term:`named tuple` returned from :func:`tokenize.tokenize` for the character
-sequence that identifies a specific operator token.
+type can be determined by checking the ``exact_type`` property on the
+:term:`named tuple` returned from :func:`tokenize.tokenize`.
+
+Tokenizing Input
+----------------
The primary entry point is a :term:`generator`:
@@ -39,9 +41,17 @@ The primary entry point is a :term:`generator`:
returned as a :term:`named tuple` with the field names:
``type string start end line``.
+ The returned :term:`named tuple` has a additional property named
+ ``exact_type`` that contains the exact operator type for
+ :data:`token.OP` tokens. For all other token types ``exact_type``
+ equals the named tuple ``type`` field.
+
.. versionchanged:: 3.1
Added support for named tuples.
+ .. versionchanged:: 3.3
+ Added support for ``exact_type``.
+
:func:`tokenize` determines the source encoding of the file by looking for a
UTF-8 BOM or encoding cookie, according to :pep:`263`.
@@ -122,6 +132,38 @@ function it uses to do this is available:
.. versionadded:: 3.2
+.. _tokenize-cli:
+
+Command-Line Usage
+------------------
+
+.. versionadded:: 3.3
+
+The :mod:`tokenize` module can be executed as a script from the command line.
+It is as simple as:
+
+.. code-block:: sh
+
+ python -m tokenize [-e] [filename.py]
+
+The following options are accepted:
+
+.. program:: tokenize
+
+.. cmdoption:: -h, --help
+
+ show this help message and exit
+
+.. cmdoption:: -e, --exact
+
+ display token names using the exact type
+
+If :file:`filename.py` is specified its contents are tokenized to stdout.
+Otherwise, tokenization is performed on stdin.
+
+Examples
+------------------
+
Example of a script rewriter that transforms float literals into Decimal
objects::
@@ -164,3 +206,63 @@ objects::
result.append((toknum, tokval))
return untokenize(result).decode('utf-8')
+Example of tokenizing from the command line. The script::
+
+ def say_hello():
+ print("Hello, World!")
+
+ say_hello()
+
+will be tokenized to the following output where the first column is the range
+of the line/column coordinates where the token is found, the second column is
+the name of the token, and the final column is the value of the token (if any)
+
+.. code-block:: sh
+
+ $ python -m tokenize hello.py
+ 0,0-0,0: ENCODING 'utf-8'
+ 1,0-1,3: NAME 'def'
+ 1,4-1,13: NAME 'say_hello'
+ 1,13-1,14: OP '('
+ 1,14-1,15: OP ')'
+ 1,15-1,16: OP ':'
+ 1,16-1,17: NEWLINE '\n'
+ 2,0-2,4: INDENT ' '
+ 2,4-2,9: NAME 'print'
+ 2,9-2,10: OP '('
+ 2,10-2,25: STRING '"Hello, World!"'
+ 2,25-2,26: OP ')'
+ 2,26-2,27: NEWLINE '\n'
+ 3,0-3,1: NL '\n'
+ 4,0-4,0: DEDENT ''
+ 4,0-4,9: NAME 'say_hello'
+ 4,9-4,10: OP '('
+ 4,10-4,11: OP ')'
+ 4,11-4,12: NEWLINE '\n'
+ 5,0-5,0: ENDMARKER ''
+
+The exact token type names can be displayed using the ``-e`` option:
+
+.. code-block:: sh
+
+ $ python -m tokenize -e hello.py
+ 0,0-0,0: ENCODING 'utf-8'
+ 1,0-1,3: NAME 'def'
+ 1,4-1,13: NAME 'say_hello'
+ 1,13-1,14: LPAR '('
+ 1,14-1,15: RPAR ')'
+ 1,15-1,16: COLON ':'
+ 1,16-1,17: NEWLINE '\n'
+ 2,0-2,4: INDENT ' '
+ 2,4-2,9: NAME 'print'
+ 2,9-2,10: LPAR '('
+ 2,10-2,25: STRING '"Hello, World!"'
+ 2,25-2,26: RPAR ')'
+ 2,26-2,27: NEWLINE '\n'
+ 3,0-3,1: NL '\n'
+ 4,0-4,0: DEDENT ''
+ 4,0-4,9: NAME 'say_hello'
+ 4,9-4,10: LPAR '('
+ 4,10-4,11: RPAR ')'
+ 4,11-4,12: NEWLINE '\n'
+ 5,0-5,0: ENDMARKER ''
diff --git a/Doc/library/unicodedata.rst b/Doc/library/unicodedata.rst
index 42400df..b735a69 100644
--- a/Doc/library/unicodedata.rst
+++ b/Doc/library/unicodedata.rst
@@ -15,8 +15,8 @@
This module provides access to the Unicode Character Database (UCD) which
defines character properties for all Unicode characters. The data contained in
-this database is compiled from the `UCD version 6.0.0
-<http://www.unicode.org/Public/6.0.0/ucd>`_.
+this database is compiled from the `UCD version 6.1.0
+<http://www.unicode.org/Public/6.1.0/ucd>`_.
The module uses the same names and symbols as defined by Unicode
Standard Annex #44, `"Unicode Character Database"
@@ -29,6 +29,9 @@ following functions:
Look up character by name. If a character with the given name is found, return
the corresponding character. If not found, :exc:`KeyError` is raised.
+ .. versionchanged:: 3.3
+ Support for name aliases [#]_ and named sequences [#]_ has been added.
+
.. function:: name(chr[, default])
@@ -160,3 +163,9 @@ Examples:
>>> unicodedata.bidirectional('\u0660') # 'A'rabic, 'N'umber
'AN'
+
+.. rubric:: Footnotes
+
+.. [#] http://www.unicode.org/Public/6.1.0/ucd/NameAliases.txt
+
+.. [#] http://www.unicode.org/Public/6.1.0/ucd/NamedSequences.txt
diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst
index bdf07a4..bf58cad 100644
--- a/Doc/library/unittest.rst
+++ b/Doc/library/unittest.rst
@@ -792,11 +792,14 @@ Test cases
Run the test, collecting the result into the test result object passed as
*result*. If *result* is omitted or ``None``, a temporary result
object is created (by calling the :meth:`defaultTestResult` method) and
- used. The result object is not returned to :meth:`run`'s caller.
+ used. The result object is returned to :meth:`run`'s caller.
The same effect may be had by simply calling the :class:`TestCase`
instance.
+ .. versionchanged:: 3.3
+ Previous versions of ``run`` did not return the result. Neither did
+ calling an instance.
.. method:: skipTest(reason)
@@ -857,10 +860,11 @@ Test cases
| <TestCase.assertNotIsInstance>` | | |
+-----------------------------------------+-----------------------------+---------------+
- All the assert methods (except :meth:`assertRaises`,
- :meth:`assertRaisesRegex`, :meth:`assertWarns`, :meth:`assertWarnsRegex`)
- accept a *msg* argument that, if specified, is used as the error message on
- failure (see also :data:`longMessage`).
+ All the assert methods accept a *msg* argument that, if specified, is used
+ as the error message on failure (see also :data:`longMessage`).
+ Note that the *msg* keyword argument can be passed to :meth:`assertRaises`,
+ :meth:`assertRaisesRegex`, :meth:`assertWarns`, :meth:`assertWarnsRegex`
+ only when they are used as a context manager.
.. method:: assertEqual(first, second, msg=None)
@@ -955,7 +959,7 @@ Test cases
+---------------------------------------------------------+--------------------------------------+------------+
.. method:: assertRaises(exception, callable, *args, **kwds)
- assertRaises(exception)
+ assertRaises(exception, msg=None)
Test that an exception is raised when *callable* is called with any
positional or keyword arguments that are also passed to
@@ -964,12 +968,16 @@ Test cases
To catch any of a group of exceptions, a tuple containing the exception
classes may be passed as *exception*.
- If only the *exception* argument is given, returns a context manager so
- that the code under test can be written inline rather than as a function::
+ If only the *exception* and possibly the *msg* arguments are given,
+ return a context manager so that the code under test can be written
+ inline rather than as a function::
with self.assertRaises(SomeException):
do_something()
+ When used as a context manager, :meth:`assertRaises` accepts the
+ additional keyword argument *msg*.
+
The context manager will store the caught exception object in its
:attr:`exception` attribute. This can be useful if the intention
is to perform additional checks on the exception raised::
@@ -986,9 +994,12 @@ Test cases
.. versionchanged:: 3.2
Added the :attr:`exception` attribute.
+ .. versionchanged:: 3.3
+ Added the *msg* keyword argument when used as a context manager.
+
.. method:: assertRaisesRegex(exception, regex, callable, *args, **kwds)
- assertRaisesRegex(exception, regex)
+ assertRaisesRegex(exception, regex, msg=None)
Like :meth:`assertRaises` but also tests that *regex* matches
on the string representation of the raised exception. *regex* may be
@@ -1005,12 +1016,16 @@ Test cases
.. versionadded:: 3.1
under the name ``assertRaisesRegexp``.
+
.. versionchanged:: 3.2
Renamed to :meth:`assertRaisesRegex`.
+ .. versionchanged:: 3.3
+ Added the *msg* keyword argument when used as a context manager.
+
.. method:: assertWarns(warning, callable, *args, **kwds)
- assertWarns(warning)
+ assertWarns(warning, msg=None)
Test that a warning is triggered when *callable* is called with any
positional or keyword arguments that are also passed to
@@ -1019,12 +1034,16 @@ Test cases
To catch any of a group of warnings, a tuple containing the warning
classes may be passed as *warnings*.
- If only the *warning* argument is given, returns a context manager so
- that the code under test can be written inline rather than as a function::
+ If only the *warning* and possibly the *msg* arguments are given,
+ returns a context manager so that the code under test can be written
+ inline rather than as a function::
with self.assertWarns(SomeWarning):
do_something()
+ When used as a context manager, :meth:`assertRaises` accepts the
+ additional keyword argument *msg*.
+
The context manager will store the caught warning object in its
:attr:`warning` attribute, and the source line which triggered the
warnings in the :attr:`filename` and :attr:`lineno` attributes.
@@ -1042,9 +1061,12 @@ Test cases
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ Added the *msg* keyword argument when used as a context manager.
+
.. method:: assertWarnsRegex(warning, regex, callable, *args, **kwds)
- assertWarnsRegex(warning, regex)
+ assertWarnsRegex(warning, regex, msg=None)
Like :meth:`assertWarns` but also tests that *regex* matches on the
message of the triggered warning. *regex* may be a regular expression
@@ -1062,6 +1084,8 @@ Test cases
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ Added the *msg* keyword argument when used as a context manager.
There are also other methods used to perform more specific checks, such as:
@@ -1151,21 +1175,6 @@ Test cases
:meth:`.assertNotRegex`.
- .. method:: assertDictContainsSubset(subset, dictionary, msg=None)
-
- Tests whether the key/value pairs in *dictionary* are a superset of
- those in *subset*. If not, an error message listing the missing keys
- and mismatched values is generated.
-
- Note, the arguments are in the opposite order of what the method name
- dictates. Instead, consider using the set-methods on :ref:`dictionary
- views <dict-views>`, for example: ``d.keys() <= e.keys()`` or
- ``d.items() <= d.items()``.
-
- .. versionadded:: 3.1
- .. deprecated:: 3.2
-
-
.. method:: assertCountEqual(first, second, msg=None)
Test that sequence *first* contains the same elements as *second*,
@@ -1180,21 +1189,6 @@ Test cases
.. versionadded:: 3.2
- .. method:: assertSameElements(first, second, msg=None)
-
- Test that sequence *first* contains the same elements as *second*,
- regardless of their order. When they don't, an error message listing
- the differences between the sequences will be generated.
-
- Duplicate elements are ignored when comparing *first* and *second*.
- It is the equivalent of ``assertEqual(set(first), set(second))``
- but it works with sequences of unhashable objects as well. Because
- duplicates are ignored, this method has been deprecated in favour of
- :meth:`assertCountEqual`.
-
- .. versionadded:: 3.1
- .. deprecated:: 3.2
-
.. _type-specific-methods:
diff --git a/Doc/library/urllib.error.rst b/Doc/library/urllib.error.rst
index 282329f..e20db27 100644
--- a/Doc/library/urllib.error.rst
+++ b/Doc/library/urllib.error.rst
@@ -8,21 +8,23 @@
The :mod:`urllib.error` module defines the exception classes for exceptions
-raised by :mod:`urllib.request`. The base exception class is :exc:`URLError`,
-which inherits from :exc:`IOError`.
+raised by :mod:`urllib.request`. The base exception class is :exc:`URLError`.
The following exceptions are raised by :mod:`urllib.error` as appropriate:
.. exception:: URLError
The handlers raise this exception (or derived exceptions) when they run into
- a problem. It is a subclass of :exc:`IOError`.
+ a problem. It is a subclass of :exc:`OSError`.
.. attribute:: reason
The reason for this error. It can be a message string or another
- exception instance (:exc:`socket.error` for remote URLs, :exc:`OSError`
- for local URLs).
+ exception instance.
+
+ .. versionchanged:: 3.3
+ :exc:`URLError` has been made a subclass of :exc:`OSError` instead
+ of :exc:`IOError`.
.. exception:: HTTPError
diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst
index 58f33e3..7aaadedc 100644
--- a/Doc/library/urllib.request.rst
+++ b/Doc/library/urllib.request.rst
@@ -145,7 +145,7 @@ The :mod:`urllib.request` module defines the following functions:
The following classes are provided:
-.. class:: Request(url, data=None, headers={}, origin_req_host=None, unverifiable=False)
+.. class:: Request(url, data=None, headers={}, origin_req_host=None, unverifiable=False, method=None)
This class is an abstraction of a URL request.
@@ -198,6 +198,13 @@ The following classes are provided:
document, and the user had no option to approve the automatic
fetching of the image, this should be true.
+ *method* should be a string that indicates the HTTP request method that
+ will be used (e.g. ``'HEAD'``). Its value is stored in the
+ :attr:`~Request.method` attribute and is used by :meth:`get_method()`.
+
+ .. versionchanged:: 3.3
+ :attr:`Request.method` argument is added to the Request class.
+
.. class:: OpenerDirector()
@@ -263,10 +270,11 @@ The following classes are provided:
.. class:: HTTPBasicAuthHandler(password_mgr=None)
- Handle authentication with the remote host. *password_mgr*, if given, should be
- something that is compatible with :class:`HTTPPasswordMgr`; refer to section
- :ref:`http-password-mgr` for information on the interface that must be
- supported.
+ Handle authentication with the remote host. *password_mgr*, if given, should
+ be something that is compatible with :class:`HTTPPasswordMgr`; refer to
+ section :ref:`http-password-mgr` for information on the interface that must
+ be supported. HTTPBasicAuthHandler will raise a :exc:`ValueError` when
+ presented with a wrong Authentication scheme.
.. class:: ProxyBasicAuthHandler(password_mgr=None)
@@ -288,10 +296,19 @@ The following classes are provided:
.. class:: HTTPDigestAuthHandler(password_mgr=None)
- Handle authentication with the remote host. *password_mgr*, if given, should be
- something that is compatible with :class:`HTTPPasswordMgr`; refer to section
- :ref:`http-password-mgr` for information on the interface that must be
- supported.
+ Handle authentication with the remote host. *password_mgr*, if given, should
+ be something that is compatible with :class:`HTTPPasswordMgr`; refer to
+ section :ref:`http-password-mgr` for information on the interface that must
+ be supported. When both Digest Authentication Handler and Basic
+ Authentication Handler are both added, Digest Authentication is always tried
+ first. If the Digest Authentication returns a 40x response again, it is sent
+ to Basic Authentication handler to Handle. This Handler method will raise a
+ :exc:`ValueError` when presented with an authentication scheme other than
+ Digest or Basic.
+
+ .. versionchanged:: 3.3
+ Raise :exc:`ValueError` on unsupported Authentication Scheme.
+
.. class:: ProxyDigestAuthHandler(password_mgr=None)
@@ -382,27 +399,25 @@ request.
boolean, indicates whether the request is unverifiable as defined
by RFC 2965.
-.. method:: Request.add_data(data)
-
- Set the :class:`Request` data to *data*. This is ignored by all handlers except
- HTTP handlers --- and there it should be a byte string, and will change the
- request to be ``POST`` rather than ``GET``.
-
+.. attribute:: Request.method
-.. method:: Request.get_method()
-
- Return a string indicating the HTTP request method. This is only meaningful for
- HTTP requests, and currently always returns ``'GET'`` or ``'POST'``.
+ The HTTP request method to use. This value is used by
+ :meth:`~Request.get_method` to override the computed HTTP request
+ method that would otherwise be returned. This attribute is initialized with
+ the value of the *method* argument passed to the constructor.
+ .. versionadded:: 3.3
-.. method:: Request.has_data()
-
- Return whether the instance has a non-\ ``None`` data.
+.. method:: Request.get_method()
-.. method:: Request.get_data()
+ Return a string indicating the HTTP request method. If
+ :attr:`Request.method` is not ``None``, return its value, otherwise return
+ ``'GET'`` if :attr:`Request.data` is ``None``, or ``'POST'`` if it's not.
+ This is only meaningful for HTTP requests.
- Return the instance's data.
+ .. versionchanged:: 3.3
+ get_method now looks at the value of :attr:`Request.method`.
.. method:: Request.add_header(key, val)
@@ -432,38 +447,78 @@ request.
Return the URL given in the constructor.
+.. method:: Request.set_proxy(host, type)
+
+ Prepare the request by connecting to a proxy server. The *host* and *type* will
+ replace those of the instance, and the instance's selector will be the original
+ URL given in the constructor.
+
+
+.. method:: Request.add_data(data)
+
+ Set the :class:`Request` data to *data*. This is ignored by all handlers except
+ HTTP handlers --- and there it should be a byte string, and will change the
+ request to be ``POST`` rather than ``GET``. Deprecated in 3.3, use
+ :attr:`Request.data`.
+
+ .. deprecated:: 3.3
+
+
+.. method:: Request.has_data()
+
+ Return whether the instance has a non-\ ``None`` data. Deprecated in 3.3,
+ use :attr:`Request.data`.
+
+ .. deprecated:: 3.3
+
+
+.. method:: Request.get_data()
+
+ Return the instance's data. Deprecated in 3.3, use :attr:`Request.data`.
+
+ .. deprecated:: 3.3
+
+
.. method:: Request.get_type()
- Return the type of the URL --- also known as the scheme.
+ Return the type of the URL --- also known as the scheme. Deprecated in 3.3,
+ use :attr:`Request.type`.
+
+ .. deprecated:: 3.3
.. method:: Request.get_host()
- Return the host to which a connection will be made.
+ Return the host to which a connection will be made. Deprecated in 3.3, use
+ :attr:`Request.host`.
+
+ .. deprecated:: 3.3
.. method:: Request.get_selector()
Return the selector --- the part of the URL that is sent to the server.
+ Deprecated in 3.3, use :attr:`Request.selector`.
-
-.. method:: Request.set_proxy(host, type)
-
- Prepare the request by connecting to a proxy server. The *host* and *type* will
- replace those of the instance, and the instance's selector will be the original
- URL given in the constructor.
+ .. deprecated:: 3.3
.. method:: Request.get_origin_req_host()
- Return the request-host of the origin transaction, as defined by :rfc:`2965`.
- See the documentation for the :class:`Request` constructor.
+ Return the request-host of the origin transaction, as defined by
+ :rfc:`2965`. See the documentation for the :class:`Request` constructor.
+ Deprecated in 3.3, use :attr:`Request.origin_req_host`.
+
+ .. deprecated:: 3.3
.. method:: Request.is_unverifiable()
Return whether the request is unverifiable, as defined by RFC 2965. See the
- documentation for the :class:`Request` constructor.
+ documentation for the :class:`Request` constructor. Deprecated in 3.3, use
+ :attr:`Request.is_unverifiable`.
+
+ .. deprecated:: 3.3
.. _opener-director-objects:
@@ -1128,16 +1183,14 @@ The following functions and classes are ported from the Python 2 module
``urllib`` (as opposed to ``urllib2``). They might become deprecated at
some point in the future.
-
.. function:: urlretrieve(url, filename=None, reporthook=None, data=None)
- Copy a network object denoted by a URL to a local file, if necessary. If the URL
- points to a local file, or a valid cached copy of the object exists, the object
- is not copied. Return a tuple ``(filename, headers)`` where *filename* is the
+ Copy a network object denoted by a URL to a local file. If the URL
+ points to a local file, the object will not be copied unless filename is supplied.
+ Return a tuple ``(filename, headers)`` where *filename* is the
local file name under which the object can be found, and *headers* is whatever
the :meth:`info` method of the object returned by :func:`urlopen` returned (for
- a remote object, possibly cached). Exceptions are the same as for
- :func:`urlopen`.
+ a remote object). Exceptions are the same as for :func:`urlopen`.
The second argument, if present, specifies the file location to copy to (if
absent, the location will be a tempfile with a generated name). The third
@@ -1148,6 +1201,13 @@ some point in the future.
third argument may be ``-1`` on older FTP servers which do not return a file
size in response to a retrieval request.
+ The following example illustrates the most common usage scenario::
+
+ >>> import urllib.request
+ >>> local_filename, headers = urllib.request.urlretrieve('http://python.org/')
+ >>> html = open(local_filename)
+ >>> html.close()
+
If the *url* uses the :file:`http:` scheme identifier, the optional *data*
argument may be given to specify a ``POST`` request (normally the request
type is ``GET``). The *data* argument must be a bytes object in standard
@@ -1160,20 +1220,20 @@ some point in the future.
the download is interrupted.
The *Content-Length* is treated as a lower bound: if there's more data to read,
- :func:`urlretrieve` reads more data, but if less data is available, it raises
- the exception.
+ urlretrieve reads more data, but if less data is available, it raises the
+ exception.
You can still retrieve the downloaded data in this case, it is stored in the
:attr:`content` attribute of the exception instance.
- If no *Content-Length* header was supplied, :func:`urlretrieve` can not check
- the size of the data it has downloaded, and just returns it. In this case
- you just have to assume that the download was successful.
+ If no *Content-Length* header was supplied, urlretrieve can not check the size
+ of the data it has downloaded, and just returns it. In this case you just have
+ to assume that the download was successful.
.. function:: urlcleanup()
- Clear the cache that may have been built up by previous calls to
- :func:`urlretrieve`.
+ Cleans up temporary files that may have been left behind by previous
+ calls to :func:`urlretrieve`.
.. class:: URLopener(proxies=None, **x509)
@@ -1197,7 +1257,7 @@ some point in the future.
*key_file* and *cert_file* are supported to provide an SSL key and certificate;
both are needed to support client authentication.
- :class:`URLopener` objects will raise an :exc:`IOError` exception if the server
+ :class:`URLopener` objects will raise an :exc:`OSError` exception if the server
returns an error code.
.. method:: open(fullurl, data=None)
diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst
index 8af19a2..8387f5a 100644
--- a/Doc/library/warnings.rst
+++ b/Doc/library/warnings.rst
@@ -339,8 +339,7 @@ Available Functions
Write a warning to a file. The default implementation calls
``formatwarning(message, category, filename, lineno, line)`` and writes the
resulting string to *file*, which defaults to ``sys.stderr``. You may replace
- this function with an alternative implementation by assigning to
- ``warnings.showwarning``.
+ this function with any callable by assigning to ``warnings.showwarning``.
*line* is a line of source code to be included in the warning
message; if *line* is not supplied, :func:`showwarning` will
try to read the line specified by *filename* and *lineno*.
diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst
index 23ba6c5..9cf8c50 100644
--- a/Doc/library/webbrowser.rst
+++ b/Doc/library/webbrowser.rst
@@ -96,47 +96,55 @@ A number of browser types are predefined. This table gives the type names that
may be passed to the :func:`get` function and the corresponding instantiations
for the controller classes, all defined in this module.
-+-----------------------+-----------------------------------------+-------+
-| Type Name | Class Name | Notes |
-+=======================+=========================================+=======+
-| ``'mozilla'`` | :class:`Mozilla('mozilla')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'firefox'`` | :class:`Mozilla('mozilla')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'netscape'`` | :class:`Mozilla('netscape')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'galeon'`` | :class:`Galeon('galeon')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'epiphany'`` | :class:`Galeon('epiphany')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'skipstone'`` | :class:`BackgroundBrowser('skipstone')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'kfmclient'`` | :class:`Konqueror()` | \(1) |
-+-----------------------+-----------------------------------------+-------+
-| ``'konqueror'`` | :class:`Konqueror()` | \(1) |
-+-----------------------+-----------------------------------------+-------+
-| ``'kfm'`` | :class:`Konqueror()` | \(1) |
-+-----------------------+-----------------------------------------+-------+
-| ``'mosaic'`` | :class:`BackgroundBrowser('mosaic')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'opera'`` | :class:`Opera()` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'grail'`` | :class:`Grail()` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'links'`` | :class:`GenericBrowser('links')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'elinks'`` | :class:`Elinks('elinks')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'lynx'`` | :class:`GenericBrowser('lynx')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'w3m'`` | :class:`GenericBrowser('w3m')` | |
-+-----------------------+-----------------------------------------+-------+
-| ``'windows-default'`` | :class:`WindowsDefault` | \(2) |
-+-----------------------+-----------------------------------------+-------+
-| ``'internet-config'`` | :class:`InternetConfig` | \(3) |
-+-----------------------+-----------------------------------------+-------+
-| ``'macosx'`` | :class:`MacOSX('default')` | \(4) |
-+-----------------------+-----------------------------------------+-------+
++------------------------+-----------------------------------------+-------+
+| Type Name | Class Name | Notes |
++========================+=========================================+=======+
+| ``'mozilla'`` | :class:`Mozilla('mozilla')` | |
++------------------------+-----------------------------------------+-------+
+| ``'firefox'`` | :class:`Mozilla('mozilla')` | |
++------------------------+-----------------------------------------+-------+
+| ``'netscape'`` | :class:`Mozilla('netscape')` | |
++------------------------+-----------------------------------------+-------+
+| ``'galeon'`` | :class:`Galeon('galeon')` | |
++------------------------+-----------------------------------------+-------+
+| ``'epiphany'`` | :class:`Galeon('epiphany')` | |
++------------------------+-----------------------------------------+-------+
+| ``'skipstone'`` | :class:`BackgroundBrowser('skipstone')` | |
++------------------------+-----------------------------------------+-------+
+| ``'kfmclient'`` | :class:`Konqueror()` | \(1) |
++------------------------+-----------------------------------------+-------+
+| ``'konqueror'`` | :class:`Konqueror()` | \(1) |
++------------------------+-----------------------------------------+-------+
+| ``'kfm'`` | :class:`Konqueror()` | \(1) |
++------------------------+-----------------------------------------+-------+
+| ``'mosaic'`` | :class:`BackgroundBrowser('mosaic')` | |
++------------------------+-----------------------------------------+-------+
+| ``'opera'`` | :class:`Opera()` | |
++------------------------+-----------------------------------------+-------+
+| ``'grail'`` | :class:`Grail()` | |
++------------------------+-----------------------------------------+-------+
+| ``'links'`` | :class:`GenericBrowser('links')` | |
++------------------------+-----------------------------------------+-------+
+| ``'elinks'`` | :class:`Elinks('elinks')` | |
++------------------------+-----------------------------------------+-------+
+| ``'lynx'`` | :class:`GenericBrowser('lynx')` | |
++------------------------+-----------------------------------------+-------+
+| ``'w3m'`` | :class:`GenericBrowser('w3m')` | |
++------------------------+-----------------------------------------+-------+
+| ``'windows-default'`` | :class:`WindowsDefault` | \(2) |
++------------------------+-----------------------------------------+-------+
+| ``'internet-config'`` | :class:`InternetConfig` | \(3) |
++------------------------+-----------------------------------------+-------+
+| ``'macosx'`` | :class:`MacOSX('default')` | \(4) |
++------------------------+-----------------------------------------+-------+
+| ``'google-chrome'`` | :class:`Chrome('google-chrome')` | |
++------------------------+-----------------------------------------+-------+
+| ``'chrome'`` | :class:`Chrome('chrome')` | |
++------------------------+-----------------------------------------+-------+
+| ``'chromium'`` | :class:`Chromium('chromium')` | |
++------------------------+-----------------------------------------+-------+
+| ``'chromium-browser'`` | :class:`Chromium('chromium-browser')` | |
++------------------------+-----------------------------------------+-------+
Notes:
@@ -156,12 +164,15 @@ Notes:
(4)
Only on Mac OS X platform.
+.. versionadded:: 3.3
+ Support for Chrome/Chromium has been added.
+
Here are some simple examples::
- url = 'http://www.python.org/'
+ url = 'http://docs.python.org/'
# Open URL in a new tab, if a browser window is already open.
- webbrowser.open_new_tab(url + 'doc/')
+ webbrowser.open_new_tab(url)
# Open URL in new window, raising the window if possible.
webbrowser.open_new(url)
diff --git a/Doc/library/winreg.rst b/Doc/library/winreg.rst
index 5cf30ee..376752e 100644
--- a/Doc/library/winreg.rst
+++ b/Doc/library/winreg.rst
@@ -38,7 +38,11 @@ This module offers the following functions:
*key* is the predefined handle to connect to.
The return value is the handle of the opened key. If the function fails, a
- :exc:`WindowsError` exception is raised.
+ :exc:`OSError` exception is raised.
+
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
.. function:: CreateKey(key, sub_key)
@@ -57,7 +61,11 @@ This module offers the following functions:
If the key already exists, this function opens the existing key.
The return value is the handle of the opened key. If the function fails, a
- :exc:`WindowsError` exception is raised.
+ :exc:`OSError` exception is raised.
+
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
.. function:: CreateKeyEx(key, sub_key, reserved=0, access=KEY_ALL_ACCESS)
@@ -82,10 +90,14 @@ This module offers the following functions:
If the key already exists, this function opens the existing key.
The return value is the handle of the opened key. If the function fails, a
- :exc:`WindowsError` exception is raised.
+ :exc:`OSError` exception is raised.
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
+
.. function:: DeleteKey(key, sub_key)
@@ -100,7 +112,11 @@ This module offers the following functions:
*This method can not delete keys with subkeys.*
If the method succeeds, the entire key, including all of its values, is removed.
- If the method fails, a :exc:`WindowsError` exception is raised.
+ If the method fails, a :exc:`OSError` exception is raised.
+
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
.. function:: DeleteKeyEx(key, sub_key, access=KEY_ALL_ACCESS, reserved=0)
@@ -129,12 +145,16 @@ This module offers the following functions:
*This method can not delete keys with subkeys.*
If the method succeeds, the entire key, including all of its values, is
- removed. If the method fails, a :exc:`WindowsError` exception is raised.
+ removed. If the method fails, a :exc:`OSError` exception is raised.
On unsupported Windows versions, :exc:`NotImplementedError` is raised.
.. versionadded:: 3.2
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
+
.. function:: DeleteValue(key, value)
@@ -156,9 +176,13 @@ This module offers the following functions:
*index* is an integer that identifies the index of the key to retrieve.
The function retrieves the name of one subkey each time it is called. It is
- typically called repeatedly until a :exc:`WindowsError` exception is
+ typically called repeatedly until a :exc:`OSError` exception is
raised, indicating, no more values are available.
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
+
.. function:: EnumValue(key, index)
@@ -170,7 +194,7 @@ This module offers the following functions:
*index* is an integer that identifies the index of the value to retrieve.
The function retrieves the name of one subkey each time it is called. It is
- typically called repeatedly, until a :exc:`WindowsError` exception is
+ typically called repeatedly, until a :exc:`OSError` exception is
raised, indicating no more values.
The result is a tuple of 3 items:
@@ -189,6 +213,10 @@ This module offers the following functions:
| | :meth:`SetValueEx`) |
+-------+--------------------------------------------+
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
+
.. function:: ExpandEnvironmentStrings(str)
@@ -260,10 +288,14 @@ This module offers the following functions:
The result is a new handle to the specified key.
- If the function fails, :exc:`WindowsError` is raised.
+ If the function fails, :exc:`OSError` is raised.
.. versionchanged:: 3.2 Allow the use of named arguments.
+ .. versionchanged:: 3.3
+ This function used to raise a :exc:`WindowsError`, which is now an
+ alias of :exc:`OSError`.
+
.. function:: OpenKeyEx()
diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst
index c5c8802..6fe81c9 100644
--- a/Doc/library/xml.etree.elementtree.rst
+++ b/Doc/library/xml.etree.elementtree.rst
@@ -32,17 +32,18 @@ To create an element instance, use the :class:`Element` constructor or the
The :class:`ElementTree` class can be used to wrap an element structure, and
convert it from and to XML.
-A C implementation of this API is available as :mod:`xml.etree.cElementTree`.
-
See http://effbot.org/zone/element-index.htm for tutorials and links to other
-docs. Fredrik Lundh's page is also the location of the development version of
-the xml.etree.ElementTree.
+docs.
.. versionchanged:: 3.2
The ElementTree API is updated to 1.3. For more information, see
`Introducing ElementTree 1.3
<http://effbot.org/zone/elementtree-13-intro.htm>`_.
+.. versionchanged:: 3.3
+ This module will use a fast implementation whenever available.
+ The :mod:`xml.etree.cElementTree` module is deprecated.
+
.. _elementtree-functions:
@@ -200,7 +201,6 @@ Functions
Element Objects
---------------
-
.. class:: Element(tag, attrib={}, **extra)
Element class. This class defines the Element interface, and provides a
@@ -645,6 +645,24 @@ This is an example of counting the maximum depth of an XML file::
>>> parser.close()
4
+Exceptions
+----------
+
+.. class:: ParseError
+
+ XML parse error, raised by the various parsing methods in this module when
+ parsing fails. The string representation of an instance of this exception
+ will contain a user-friendly error message. In addition, it will have
+ the following attributes available:
+
+ .. attribute:: code
+
+ A numeric error code from the expat parser. See the documentation of
+ :mod:`xml.parsers.expat` for the list of error codes and their meanings.
+
+ .. attribute:: position
+
+ A tuple of *line*, *column* numbers, specifying where the error occurred.
.. rubric:: Footnotes
diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst
index e72770a..1871c99 100644
--- a/Doc/library/xmlrpc.client.rst
+++ b/Doc/library/xmlrpc.client.rst
@@ -8,7 +8,7 @@
.. XXX Not everything is documented yet. It might be good to describe
- Marshaller, Unmarshaller, getparser, dumps, loads, and Transport.
+ Marshaller, Unmarshaller, getparser and Transport.
**Source code:** :source:`Lib/xmlrpc/client.py`
@@ -21,7 +21,12 @@ supports writing XML-RPC client code; it handles all the details of translating
between conformable Python objects and XML on the wire.
-.. class:: ServerProxy(uri, transport=None, encoding=None, verbose=False, allow_none=False, use_datetime=False)
+.. class:: ServerProxy(uri, transport=None, encoding=None, verbose=False, \
+ allow_none=False, use_datetime=False, \
+ use_builtin_types=False)
+
+ .. versionchanged:: 3.3
+ The *use_builtin_types* flag was added.
A :class:`ServerProxy` instance is an object that manages communication with a
remote XML-RPC server. The required first argument is a URI (Uniform Resource
@@ -34,9 +39,13 @@ between conformable Python objects and XML on the wire.
XML; the default behaviour is for ``None`` to raise a :exc:`TypeError`. This is
a commonly-used extension to the XML-RPC specification, but isn't supported by
all clients and servers; see http://ontosys.com/xml-rpc/extensions.php for a
- description. The *use_datetime* flag can be used to cause date/time values to
- be presented as :class:`datetime.datetime` objects; this is false by default.
- :class:`datetime.datetime` objects may be passed to calls.
+ description. The *use_builtin_types* flag can be used to cause date/time values
+ to be presented as :class:`datetime.datetime` objects and binary data to be
+ presented as :class:`bytes` objects; this flag is false by default.
+ :class:`datetime.datetime` and :class:`bytes` objects may be passed to calls.
+
+ The obsolete *use_datetime* flag is similar to *use_builtin_types* but it
+ applies only to date/time values.
Both the HTTP and HTTPS transports support the URL syntax extension for HTTP
Basic Authentication: ``http://user:pass@host:port/path``. The ``user:pass``
@@ -78,12 +87,12 @@ between conformable Python objects and XML on the wire.
| | only their *__dict__* attribute is |
| | transmitted. |
+---------------------------------+---------------------------------------------+
- | :const:`dates` | in seconds since the epoch (pass in an |
- | | instance of the :class:`DateTime` class) or |
+ | :const:`dates` | In seconds since the epoch. Pass in an |
+ | | instance of the :class:`DateTime` class or |
| | a :class:`datetime.datetime` instance. |
+---------------------------------+---------------------------------------------+
- | :const:`binary data` | pass in an instance of the :class:`Binary` |
- | | wrapper class |
+ | :const:`binary data` | Pass in an instance of the :class:`Binary` |
+ | | wrapper class or a :class:`bytes` instance. |
+---------------------------------+---------------------------------------------+
This is the full set of data types supported by XML-RPC. Method calls may also
@@ -98,8 +107,9 @@ between conformable Python objects and XML on the wire.
ensure that the string is free of characters that aren't allowed in XML, such as
the control characters with ASCII values between 0 and 31 (except, of course,
tab, newline and carriage return); failing to do this will result in an XML-RPC
- request that isn't well-formed XML. If you have to pass arbitrary strings via
- XML-RPC, use the :class:`Binary` wrapper class described below.
+ request that isn't well-formed XML. If you have to pass arbitrary bytes
+ via XML-RPC, use the :class:`bytes` class or the class:`Binary` wrapper class
+ described below.
:class:`Server` is retained as an alias for :class:`ServerProxy` for backwards
compatibility. New code should use :class:`ServerProxy`.
@@ -249,7 +259,7 @@ The client code for the preceding server::
Binary Objects
--------------
-This class may be initialized from string data (which may include NULs). The
+This class may be initialized from bytes data (which may include NULs). The
primary access to the content of a :class:`Binary` object is provided by an
attribute:
@@ -257,15 +267,15 @@ attribute:
.. attribute:: Binary.data
The binary data encapsulated by the :class:`Binary` instance. The data is
- provided as an 8-bit string.
+ provided as a :class:`bytes` object.
:class:`Binary` objects have the following methods, supported mainly for
internal use by the marshalling/unmarshalling code:
-.. method:: Binary.decode(string)
+.. method:: Binary.decode(bytes)
- Accept a base64 string and decode it as the instance's new data.
+ Accept a base64 :class:`bytes` object and decode it as the instance's new data.
.. method:: Binary.encode(out)
@@ -471,14 +481,21 @@ Convenience Functions
it via an extension, provide a true value for *allow_none*.
-.. function:: loads(data, use_datetime=False)
+.. function:: loads(data, use_datetime=False, use_builtin_types=False)
Convert an XML-RPC request or response into Python objects, a ``(params,
methodname)``. *params* is a tuple of argument; *methodname* is a string, or
``None`` if no method name is present in the packet. If the XML-RPC packet
represents a fault condition, this function will raise a :exc:`Fault` exception.
- The *use_datetime* flag can be used to cause date/time values to be presented as
- :class:`datetime.datetime` objects; this is false by default.
+ The *use_builtin_types* flag can be used to cause date/time values to be
+ presented as :class:`datetime.datetime` objects and binary data to be
+ presented as :class:`bytes` objects; this flag is false by default.
+
+ The obsolete *use_datetime* flag is similar to *use_builtin_types* but it
+ applies only to date/time values.
+
+ .. versionchanged:: 3.3
+ The *use_builtin_types* flag was added.
.. _xmlrpc-client-example:
diff --git a/Doc/library/xmlrpc.server.rst b/Doc/library/xmlrpc.server.rst
index 67feba6..6493fd4 100644
--- a/Doc/library/xmlrpc.server.rst
+++ b/Doc/library/xmlrpc.server.rst
@@ -16,7 +16,9 @@ servers written in Python. Servers can either be free standing, using
:class:`CGIXMLRPCRequestHandler`.
-.. class:: SimpleXMLRPCServer(addr, requestHandler=SimpleXMLRPCRequestHandler, logRequests=True, allow_none=False, encoding=None, bind_and_activate=True)
+.. class:: SimpleXMLRPCServer(addr, requestHandler=SimpleXMLRPCRequestHandler,\
+ logRequests=True, allow_none=False, encoding=None,\
+ bind_and_activate=True, use_builtin_types=False)
Create a new server instance. This class provides methods for registration of
functions that can be called by the XML-RPC protocol. The *requestHandler*
@@ -25,18 +27,31 @@ servers written in Python. Servers can either be free standing, using
are passed to the :class:`socketserver.TCPServer` constructor. If *logRequests*
is true (the default), requests will be logged; setting this parameter to false
will turn off logging. The *allow_none* and *encoding* parameters are passed
- on to :mod:`xmlrpc.client` and control the XML-RPC responses that will be returned
+ on to :mod:`xmlrpc.client` and control the XML-RPC responses that will be returned
from the server. The *bind_and_activate* parameter controls whether
:meth:`server_bind` and :meth:`server_activate` are called immediately by the
constructor; it defaults to true. Setting it to false allows code to manipulate
the *allow_reuse_address* class variable before the address is bound.
+ The *use_builtin_types* parameter is passed to the
+ :func:`~xmlrpc.client.loads` function and controls which types are processed
+ when date/times values or binary data are received; it defaults to false.
+ .. versionchanged:: 3.3
+ The *use_builtin_types* flag was added.
-.. class:: CGIXMLRPCRequestHandler(allow_none=False, encoding=None)
+
+.. class:: CGIXMLRPCRequestHandler(allow_none=False, encoding=None,\
+ use_builtin_types=False)
Create a new instance to handle XML-RPC requests in a CGI environment. The
*allow_none* and *encoding* parameters are passed on to :mod:`xmlrpc.client`
and control the XML-RPC responses that will be returned from the server.
+ The *use_builtin_types* parameter is passed to the
+ :func:`~xmlrpc.client.loads` function and controls which types are processed
+ when date/times values or binary data are received; it defaults to false.
+
+ .. versionchanged:: 3.3
+ The *use_builtin_types* flag was added.
.. class:: SimpleXMLRPCRequestHandler()
@@ -233,12 +248,17 @@ to HTTP GET requests. Servers can either be free standing, using
:class:`DocCGIXMLRPCRequestHandler`.
-.. class:: DocXMLRPCServer(addr, requestHandler=DocXMLRPCRequestHandler, logRequests=True, allow_none=False, encoding=None, bind_and_activate=True)
+.. class:: DocXMLRPCServer(addr, requestHandler=DocXMLRPCRequestHandler,\
+ logRequests=True, allow_none=False, encoding=None,\
+ bind_and_activate=True, use_builtin_types=True)
Create a new server instance. All parameters have the same meaning as for
:class:`SimpleXMLRPCServer`; *requestHandler* defaults to
:class:`DocXMLRPCRequestHandler`.
+ .. versionchanged:: 3.3
+ The *use_builtin_types* flag was added.
+
.. class:: DocCGIXMLRPCRequestHandler()
diff --git a/Doc/library/zipimport.rst b/Doc/library/zipimport.rst
index 4f17092..b47c35b 100644
--- a/Doc/library/zipimport.rst
+++ b/Doc/library/zipimport.rst
@@ -85,9 +85,12 @@ zipimporter Objects
.. method:: get_data(pathname)
- Return the data associated with *pathname*. Raise :exc:`IOError` if the
+ Return the data associated with *pathname*. Raise :exc:`OSError` if the
file wasn't found.
+ .. versionchanged:: 3.3
+ :exc:`IOError` used to be raised instead of :exc:`OSError`.
+
.. method:: get_filename(fullname)
diff --git a/Doc/library/zlib.rst b/Doc/library/zlib.rst
index 897d919..1e9a2bc 100644
--- a/Doc/library/zlib.rst
+++ b/Doc/library/zlib.rst
@@ -120,6 +120,7 @@ The available exception and functions in this module are:
won't fit into memory at once. The *wbits* parameter controls the size of the
window buffer.
+
Compression objects support the following methods:
@@ -150,7 +151,7 @@ Compression objects support the following methods:
compress a set of data that share a common initial prefix.
-Decompression objects support the following methods, and two attributes:
+Decompression objects support the following methods and attributes:
.. attribute:: Decompress.unused_data
@@ -160,13 +161,6 @@ Decompression objects support the following methods, and two attributes:
available. If the whole bytestring turned out to contain compressed data, this is
``b""``, an empty bytes object.
- The only way to determine where a bytestring of compressed data ends is by actually
- decompressing it. This means that when compressed data is contained part of a
- larger file, you can only find the end of it by reading data and feeding it
- followed by some non-empty bytestring into a decompression object's
- :meth:`decompress` method until the :attr:`unused_data` attribute is no longer
- empty.
-
.. attribute:: Decompress.unconsumed_tail
@@ -177,6 +171,17 @@ Decompression objects support the following methods, and two attributes:
:meth:`decompress` method call in order to get correct output.
+.. attribute:: Decompress.eof
+
+ A boolean indicating whether the end of the compressed data stream has been
+ reached.
+
+ This makes it possible to distinguish between a properly-formed compressed
+ stream, and an incomplete or truncated one.
+
+ .. versionadded:: 3.3
+
+
.. method:: Decompress.decompress(data[, max_length])
Decompress *data*, returning a bytes object containing the uncompressed data
@@ -211,6 +216,24 @@ Decompression objects support the following methods, and two attributes:
seeks into the stream at a future point.
+Information about the version of the zlib library in use is available through
+the following constants:
+
+
+.. data:: ZLIB_VERSION
+
+ The version string of the zlib library that was used for building the module.
+ This may be different from the zlib library actually used at runtime, which
+ is available as :const:`ZLIB_RUNTIME_VERSION`.
+
+
+.. data:: ZLIB_RUNTIME_VERSION
+
+ The version string of the zlib library actually loaded by the interpreter.
+
+ .. versionadded:: 3.3
+
+
.. seealso::
Module :mod:`gzip`
diff --git a/Doc/license.rst b/Doc/license.rst
index cb20c83..9d6ef24 100644
--- a/Doc/license.rst
+++ b/Doc/license.rst
@@ -118,7 +118,7 @@ been GPL-compatible; the table below summarizes the various releases.
+----------------+--------------+------------+------------+-----------------+
| 3.2.2 | 3.2.1 | 2011 | PSF | yes |
+----------------+--------------+------------+------------+-----------------+
-| 3.2.3 | 3.2.2 | 2012 | PSF | yes |
+| 3.3.0 | 3.2 | 2012 | PSF | yes |
+----------------+--------------+------------+------------+-----------------+
.. note::
diff --git a/Doc/packaging/builtdist.rst b/Doc/packaging/builtdist.rst
new file mode 100644
index 0000000..1d9a349
--- /dev/null
+++ b/Doc/packaging/builtdist.rst
@@ -0,0 +1,302 @@
+.. _packaging-built-dist:
+
+****************************
+Creating Built Distributions
+****************************
+
+A "built distribution" is what you're probably used to thinking of either as a
+"binary package" or an "installer" (depending on your background). It's not
+necessarily binary, though, because it might contain only Python source code
+and/or byte-code; and we don't call it a package, because that word is already
+spoken for in Python. (And "installer" is a term specific to the world of
+mainstream desktop systems.)
+
+A built distribution is how you make life as easy as possible for installers of
+your module distribution: for users of RPM-based Linux systems, it's a binary
+RPM; for Windows users, it's an executable installer; for Debian-based Linux
+users, it's a Debian package; and so forth. Obviously, no one person will be
+able to create built distributions for every platform under the sun, so the
+Distutils are designed to enable module developers to concentrate on their
+specialty---writing code and creating source distributions---while an
+intermediary species called *packagers* springs up to turn source distributions
+into built distributions for as many platforms as there are packagers.
+
+Of course, the module developer could be his own packager; or the packager could
+be a volunteer "out there" somewhere who has access to a platform which the
+original developer does not; or it could be software periodically grabbing new
+source distributions and turning them into built distributions for as many
+platforms as the software has access to. Regardless of who they are, a packager
+uses the setup script and the :command:`bdist` command family to generate built
+distributions.
+
+As a simple example, if I run the following command in the Distutils source
+tree::
+
+ python setup.py bdist
+
+then the Distutils builds my module distribution (the Distutils itself in this
+case), does a "fake" installation (also in the :file:`build` directory), and
+creates the default type of built distribution for my platform. The default
+format for built distributions is a "dumb" tar file on Unix, and a simple
+executable installer on Windows. (That tar file is considered "dumb" because it
+has to be unpacked in a specific location to work.)
+
+Thus, the above command on a Unix system creates
+:file:`Distutils-1.0.{plat}.tar.gz`; unpacking this tarball from the right place
+installs the Distutils just as though you had downloaded the source distribution
+and run ``python setup.py install``. (The "right place" is either the root of
+the filesystem or Python's :file:`{prefix}` directory, depending on the options
+given to the :command:`bdist_dumb` command; the default is to make dumb
+distributions relative to :file:`{prefix}`.)
+
+Obviously, for pure Python distributions, this isn't any simpler than just
+running ``python setup.py install``\ ---but for non-pure distributions, which
+include extensions that would need to be compiled, it can mean the difference
+between someone being able to use your extensions or not. And creating "smart"
+built distributions, such as an executable installer for
+Windows, is far more convenient for users even if your distribution doesn't
+include any extensions.
+
+The :command:`bdist` command has a :option:`--formats` option, similar to the
+:command:`sdist` command, which you can use to select the types of built
+distribution to generate: for example, ::
+
+ python setup.py bdist --format=zip
+
+would, when run on a Unix system, create :file:`Distutils-1.0.{plat}.zip`\
+---again, this archive would be unpacked from the root directory to install the
+Distutils.
+
+The available formats for built distributions are:
+
++-------------+------------------------------+---------+
+| Format | Description | Notes |
++=============+==============================+=========+
+| ``gztar`` | gzipped tar file | (1),(3) |
+| | (:file:`.tar.gz`) | |
++-------------+------------------------------+---------+
+| ``tar`` | tar file (:file:`.tar`) | \(3) |
++-------------+------------------------------+---------+
+| ``zip`` | zip file (:file:`.zip`) | (2),(4) |
++-------------+------------------------------+---------+
+| ``wininst`` | self-extracting ZIP file for | \(4) |
+| | Windows | |
++-------------+------------------------------+---------+
+| ``msi`` | Microsoft Installer. | |
++-------------+------------------------------+---------+
+
+
+Notes:
+
+(1)
+ default on Unix
+
+(2)
+ default on Windows
+
+(3)
+ requires external utilities: :program:`tar` and possibly one of :program:`gzip`
+ or :program:`bzip2`
+
+(4)
+ requires either external :program:`zip` utility or :mod:`zipfile` module (part
+ of the standard Python library since Python 1.6)
+
+You don't have to use the :command:`bdist` command with the :option:`--formats`
+option; you can also use the command that directly implements the format you're
+interested in. Some of these :command:`bdist` "sub-commands" actually generate
+several similar formats; for instance, the :command:`bdist_dumb` command
+generates all the "dumb" archive formats (``tar``, ``gztar``, and
+``zip``). The :command:`bdist` sub-commands, and the formats generated by
+each, are:
+
++--------------------------+-----------------------+
+| Command | Formats |
++==========================+=======================+
+| :command:`bdist_dumb` | tar, gztar, zip |
++--------------------------+-----------------------+
+| :command:`bdist_wininst` | wininst |
++--------------------------+-----------------------+
+| :command:`bdist_msi` | msi |
++--------------------------+-----------------------+
+
+The following sections give details on the individual :command:`bdist_\*`
+commands.
+
+
+.. _packaging-creating-dumb:
+
+Creating dumb built distributions
+=================================
+
+.. XXX Need to document absolute vs. prefix-relative packages here, but first
+ I have to implement it!
+
+
+.. _packaging-creating-wininst:
+
+Creating Windows Installers
+===========================
+
+Executable installers are the natural format for binary distributions on
+Windows. They display a nice graphical user interface, display some information
+about the module distribution to be installed taken from the metadata in the
+setup script, let the user select a few options, and start or cancel the
+installation.
+
+Since the metadata is taken from the setup script, creating Windows installers
+is usually as easy as running::
+
+ python setup.py bdist_wininst
+
+or the :command:`bdist` command with the :option:`--formats` option::
+
+ python setup.py bdist --formats=wininst
+
+If you have a pure module distribution (only containing pure Python modules and
+packages), the resulting installer will be version independent and have a name
+like :file:`foo-1.0.win32.exe`. These installers can even be created on Unix
+platforms or Mac OS X.
+
+If you have a non-pure distribution, the extensions can only be created on a
+Windows platform, and will be Python version dependent. The installer filename
+will reflect this and now has the form :file:`foo-1.0.win32-py2.0.exe`. You
+have to create a separate installer for every Python version you want to
+support.
+
+The installer will try to compile pure modules into :term:`bytecode` after installation
+on the target system in normal and optimizing mode. If you don't want this to
+happen for some reason, you can run the :command:`bdist_wininst` command with
+the :option:`--no-target-compile` and/or the :option:`--no-target-optimize`
+option.
+
+By default the installer will display the cool "Python Powered" logo when it is
+run, but you can also supply your own 152x261 bitmap which must be a Windows
+:file:`.bmp` file with the :option:`--bitmap` option.
+
+The installer will also display a large title on the desktop background window
+when it is run, which is constructed from the name of your distribution and the
+version number. This can be changed to another text by using the
+:option:`--title` option.
+
+The installer file will be written to the "distribution directory" --- normally
+:file:`dist/`, but customizable with the :option:`--dist-dir` option.
+
+.. _packaging-cross-compile-windows:
+
+Cross-compiling on Windows
+==========================
+
+Starting with Python 2.6, packaging is capable of cross-compiling between
+Windows platforms. In practice, this means that with the correct tools
+installed, you can use a 32bit version of Windows to create 64bit extensions
+and vice-versa.
+
+To build for an alternate platform, specify the :option:`--plat-name` option
+to the build command. Valid values are currently 'win32', 'win-amd64' and
+'win-ia64'. For example, on a 32bit version of Windows, you could execute::
+
+ python setup.py build --plat-name=win-amd64
+
+to build a 64bit version of your extension. The Windows Installers also
+support this option, so the command::
+
+ python setup.py build --plat-name=win-amd64 bdist_wininst
+
+would create a 64bit installation executable on your 32bit version of Windows.
+
+To cross-compile, you must download the Python source code and cross-compile
+Python itself for the platform you are targetting - it is not possible from a
+binary installtion of Python (as the .lib etc file for other platforms are
+not included.) In practice, this means the user of a 32 bit operating
+system will need to use Visual Studio 2008 to open the
+:file:`PCBuild/PCbuild.sln` solution in the Python source tree and build the
+"x64" configuration of the 'pythoncore' project before cross-compiling
+extensions is possible.
+
+Note that by default, Visual Studio 2008 does not install 64bit compilers or
+tools. You may need to reexecute the Visual Studio setup process and select
+these tools (using Control Panel->[Add/Remove] Programs is a convenient way to
+check or modify your existing install.)
+
+.. _packaging-postinstallation-script:
+
+The Postinstallation script
+---------------------------
+
+Starting with Python 2.3, a postinstallation script can be specified with the
+:option:`--install-script` option. The basename of the script must be
+specified, and the script filename must also be listed in the scripts argument
+to the setup function.
+
+This script will be run at installation time on the target system after all the
+files have been copied, with ``argv[1]`` set to :option:`-install`, and again at
+uninstallation time before the files are removed with ``argv[1]`` set to
+:option:`-remove`.
+
+The installation script runs embedded in the windows installer, every output
+(``sys.stdout``, ``sys.stderr``) is redirected into a buffer and will be
+displayed in the GUI after the script has finished.
+
+Some functions especially useful in this context are available as additional
+built-in functions in the installation script.
+
+.. currentmodule:: bdist_wininst-postinst-script
+
+.. function:: directory_created(path)
+ file_created(path)
+
+ These functions should be called when a directory or file is created by the
+ postinstall script at installation time. It will register *path* with the
+ uninstaller, so that it will be removed when the distribution is uninstalled.
+ To be safe, directories are only removed if they are empty.
+
+
+.. function:: get_special_folder_path(csidl_string)
+
+ This function can be used to retrieve special folder locations on Windows like
+ the Start Menu or the Desktop. It returns the full path to the folder.
+ *csidl_string* must be one of the following strings::
+
+ "CSIDL_APPDATA"
+
+ "CSIDL_COMMON_STARTMENU"
+ "CSIDL_STARTMENU"
+
+ "CSIDL_COMMON_DESKTOPDIRECTORY"
+ "CSIDL_DESKTOPDIRECTORY"
+
+ "CSIDL_COMMON_STARTUP"
+ "CSIDL_STARTUP"
+
+ "CSIDL_COMMON_PROGRAMS"
+ "CSIDL_PROGRAMS"
+
+ "CSIDL_FONTS"
+
+ If the folder cannot be retrieved, :exc:`OSError` is raised.
+
+ Which folders are available depends on the exact Windows version, and probably
+ also the configuration. For details refer to Microsoft's documentation of the
+ :c:func:`SHGetSpecialFolderPath` function.
+
+
+.. function:: create_shortcut(target, description, filename[, arguments[, workdir[, iconpath[, iconindex]]]])
+
+ This function creates a shortcut. *target* is the path to the program to be
+ started by the shortcut. *description* is the description of the shortcut.
+ *filename* is the title of the shortcut that the user will see. *arguments*
+ specifies the command-line arguments, if any. *workdir* is the working directory
+ for the program. *iconpath* is the file containing the icon for the shortcut,
+ and *iconindex* is the index of the icon in the file *iconpath*. Again, for
+ details consult the Microsoft documentation for the :class:`IShellLink`
+ interface.
+
+
+Vista User Access Control (UAC)
+===============================
+
+Starting with Python 2.6, bdist_wininst supports a :option:`--user-access-control`
+option. The default is 'none' (meaning no UAC handling is done), and other
+valid values are 'auto' (meaning prompt for UAC elevation if Python was
+installed for all users) and 'force' (meaning always prompt for elevation).
diff --git a/Doc/packaging/commandhooks.rst b/Doc/packaging/commandhooks.rst
new file mode 100644
index 0000000..b261d00
--- /dev/null
+++ b/Doc/packaging/commandhooks.rst
@@ -0,0 +1,47 @@
+.. TODO integrate this in commandref and configfile
+
+.. _packaging-command-hooks:
+
+=============
+Command hooks
+=============
+
+Packaging provides a way of extending its commands by the use of pre- and
+post-command hooks. Hooks are Python functions (or any callable object) that
+take a command object as argument. They're specified in :ref:`config files
+<packaging-config-filenames>` using their fully qualified names. After a
+command is finalized (its options are processed), the pre-command hooks are
+executed, then the command itself is run, and finally the post-command hooks are
+executed.
+
+See also global setup hooks in :ref:`setupcfg-spec`.
+
+
+.. _packaging-finding-hooks:
+
+Finding hooks
+=============
+
+As a hook is configured with a Python dotted name, it must either be defined in
+a module installed on the system, or in a module present in the project
+directory, where the :file:`setup.cfg` file lives::
+
+ # file: _setuphooks.py
+
+ def hook(install_cmd):
+ metadata = install_cmd.dist.metadata
+ print('Hooked while installing %r %s!' % (metadata['Name'],
+ metadata['Version']))
+
+Then you need to configure it in :file:`setup.cfg`::
+
+ [install_dist]
+ pre-hook.a = _setuphooks.hook
+
+Packaging will add the project directory to :data:`sys.path` and find the
+``_setuphooks`` module.
+
+Hooks defined in different config files (system-wide, user-wide and
+project-wide) do not override each other as long as they are specified with
+different aliases (additional names after the dot). The alias in the example
+above is ``a``.
diff --git a/Doc/packaging/commandref.rst b/Doc/packaging/commandref.rst
new file mode 100644
index 0000000..2165b56
--- /dev/null
+++ b/Doc/packaging/commandref.rst
@@ -0,0 +1,374 @@
+.. _packaging-command-reference:
+
+*****************
+Command Reference
+*****************
+
+This reference briefly documents all standard Packaging commands and some of
+their options.
+
+.. FIXME does not work: Use pysetup run --help-commands to list all
+ standard and extra commands availavble on your system, with their
+ description. Use pysetup run <command> --help to get help about the options
+ of one command.
+
+.. XXX sections from this document should be merged with other docs (e.g. check
+ and upload with uploading.rst, install_* with install/install.rst, etc.);
+ there is no value in partially duplicating information. this file could
+ however serve as an index, i.e. just a list of all commands with links to
+ every section that describes options or usage
+
+
+Preparing distributions
+=======================
+
+:command:`check`
+----------------
+
+Perform some tests on the metadata of a distribution.
+
+For example, it verifies that all required metadata fields are provided in the
+:file:`setup.cfg` file.
+
+.. TODO document reST checks
+
+
+:command:`test`
+---------------
+
+Run a test suite.
+
+When doing test-driven development, or running automated builds that need
+testing before they are installed for downloading or use, it's often useful to
+be able to run a project's unit tests without actually installing the project
+anywhere. The :command:`test` command runs project's unit tests without
+actually installing it, by temporarily putting the project's source on
+:data:`sys.path`, after first running :command:`build_ext -i` to ensure that any
+C extensions are built.
+
+You can use this command in one of two ways: either by specifying a
+unittest-compatible test suite for your project (or any callable that returns
+it) or by passing a test runner function that will run your tests and display
+results in the console. Both options take a Python dotted name in the form
+``package.module.callable`` to specify the object to use.
+
+If none of these options are specified, Packaging will try to perform test
+discovery using either unittest (for Python 3.2 and higher) or unittest2 (for
+older versions, if installed).
+
+.. this is a pseudo-command name used to disambiguate the options in indexes and
+ links
+.. program:: packaging test
+
+.. cmdoption:: --suite=NAME, -s NAME
+
+ Specify the test suite (or module, class, or method) to be run. The default
+ for this option can be set by in the project's :file:`setup.cfg` file:
+
+ .. code-block:: cfg
+
+ [test]
+ suite = mypackage.tests.get_all_tests
+
+.. cmdoption:: --runner=NAME, -r NAME
+
+ Specify the test runner to be called.
+
+
+:command:`config`
+-----------------
+
+Perform distribution configuration.
+
+
+The build step
+==============
+
+This step is mainly useful to compile C/C++ libraries or extension modules. The
+build commands can be run manually to check for syntax errors or packaging
+issues (for example if the addition of a new source file was forgotten in the
+:file:`setup.cfg` file), and is also run automatically by commands which need
+it. Packaging checks the mtime of source and built files to avoid re-building
+if it's not necessary.
+
+
+:command:`build`
+----------------
+
+Build all files of a distribution, delegating to the other :command:`build_*`
+commands to do the work.
+
+
+:command:`build_clib`
+---------------------
+
+Build C libraries.
+
+
+:command:`build_ext`
+--------------------
+
+Build C/C++ extension modules.
+
+
+:command:`build_py`
+-------------------
+
+Build the Python modules (just copy them to the build directory) and
+:term:`byte-compile <bytecode>` them to :file:`.pyc` and/or :file:`.pyo` files.
+
+The byte compilation is controlled by two sets of options:
+
+- ``--compile`` and ``--no-compile`` are used to control the creation of
+ :file:`.pyc` files; the default is ``--no-compile``.
+
+- ``--optimize N`` (or ``-ON``) is used to control the creation of :file:`.pyo`
+ files: ``-O1`` turns on basic optimizations, ``-O2`` also discards docstrings,
+ ``-O0`` does not create :file:`.pyo` files; the default is ``-O0``.
+
+You can mix and match these options: for example, ``--no-compile --optimize 2``
+will create :file:`.pyo` files but no :file:`.pyc` files.
+
+.. XXX these option roles do not work
+
+Calling Python with :option:`-O` or :option:`-B` does not control the creation
+of bytecode files, only the options described above do.
+
+
+:command:`build_scripts`
+------------------------
+Build the scripts (just copy them to the build directory and adjust their
+shebang if they're Python scripts).
+
+
+:command:`clean`
+----------------
+
+Clean the build tree of the release.
+
+.. program:: packaging clean
+
+.. cmdoption:: --all, -a
+
+ Remove build directories for modules, scripts, etc., not only temporary build
+ by-products.
+
+
+Creating source and built distributions
+=======================================
+
+:command:`sdist`
+----------------
+
+Build a source distribution for a release.
+
+It is recommended that you always build and upload a source distribution. Users
+of OSes with easy access to compilers and users of advanced packaging tools will
+prefer to compile from source rather than using pre-built distributions. For
+Windows users, providing a binary installer is also recommended practice.
+
+
+:command:`bdist`
+----------------
+
+Build a binary distribution for a release.
+
+This command will call other :command:`bdist_*` commands to create one or more
+distributions depending on the options given. The default is to create a
+.tar.gz archive on Unix and a zip archive on Windows or OS/2.
+
+.. program:: packaging bdist
+
+.. cmdoption:: --formats
+
+ Binary formats to build (comma-separated list).
+
+.. cmdoption:: --show-formats
+
+ Dump list of available formats.
+
+
+:command:`bdist_dumb`
+---------------------
+
+Build a "dumb" installer, a simple archive of files that could be unpacked under
+``$prefix`` or ``$exec_prefix``.
+
+
+:command:`bdist_wininst`
+------------------------
+
+Build a Windows installer.
+
+
+:command:`bdist_msi`
+--------------------
+
+Build a `Microsoft Installer`_ (.msi) file.
+
+.. _Microsoft Installer: http://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx
+
+In most cases, the :command:`bdist_msi` installer is a better choice than the
+:command:`bdist_wininst` installer, because it provides better support for Win64
+platforms, allows administrators to perform non-interactive installations, and
+allows installation through group policies.
+
+
+Publishing distributions
+========================
+
+:command:`register`
+-------------------
+
+This command registers the current release with the Python Package Index. This
+is described in more detail in :PEP:`301`.
+
+.. TODO explain user and project registration with the web UI
+
+
+:command:`upload`
+-----------------
+
+Upload source and/or binary distributions to PyPI.
+
+The distributions have to be built on the same command line as the
+:command:`upload` command; see :ref:`packaging-package-upload` for more info.
+
+.. program:: packaging upload
+
+.. cmdoption:: --sign, -s
+
+ Sign each uploaded file using GPG (GNU Privacy Guard). The ``gpg`` program
+ must be available for execution on the system ``PATH``.
+
+.. cmdoption:: --identity=NAME, -i NAME
+
+ Specify the identity or key name for GPG to use when signing. The value of
+ this option will be passed through the ``--local-user`` option of the
+ ``gpg`` program.
+
+.. cmdoption:: --show-response
+
+ Display the full response text from server; this is useful for debugging
+ PyPI problems.
+
+.. cmdoption:: --repository=URL, -r URL
+
+ The URL of the repository to upload to. Defaults to
+ http://pypi.python.org/pypi (i.e., the main PyPI installation).
+
+.. cmdoption:: --upload-docs
+
+ Also run :command:`upload_docs`. Mainly useful as a default value in
+ :file:`setup.cfg` (on the command line, it's shorter to just type both
+ commands).
+
+
+:command:`upload_docs`
+----------------------
+
+Upload HTML documentation to PyPI.
+
+PyPI now supports publishing project documentation at a URI of the form
+``http://packages.python.org/<project>``. :command:`upload_docs` will create
+the necessary zip file out of a documentation directory and will post to the
+repository.
+
+Note that to upload the documentation of a project, the corresponding version
+must already be registered with PyPI, using the :command:`register` command ---
+just like with :command:`upload`.
+
+Assuming there is an ``Example`` project with documentation in the subdirectory
+:file:`docs`, for example::
+
+ Example/
+ example.py
+ setup.cfg
+ docs/
+ build/
+ html/
+ index.html
+ tips_tricks.html
+ conf.py
+ index.txt
+ tips_tricks.txt
+
+You can simply specify the directory with the HTML files in your
+:file:`setup.cfg` file:
+
+.. code-block:: cfg
+
+ [upload_docs]
+ upload-dir = docs/build/html
+
+
+.. program:: packaging upload_docs
+
+.. cmdoption:: --upload-dir
+
+ The directory to be uploaded to the repository. By default documentation
+ is searched for in ``docs`` (or ``doc``) directory in project root.
+
+.. cmdoption:: --show-response
+
+ Display the full response text from server; this is useful for debugging
+ PyPI problems.
+
+.. cmdoption:: --repository=URL, -r URL
+
+ The URL of the repository to upload to. Defaults to
+ http://pypi.python.org/pypi (i.e., the main PyPI installation).
+
+
+The install step
+================
+
+These commands are used by end-users of a project using :program:`pysetup` or
+another compatible installer. Each command will run the corresponding
+:command:`build_*` command and then move the built files to their destination on
+the target system.
+
+
+:command:`install_dist`
+-----------------------
+
+Install a distribution, delegating to the other :command:`install_*` commands to
+do the work. See :ref:`packaging-how-install-works` for complete usage
+instructions.
+
+
+:command:`install_data`
+-----------------------
+
+Install data files.
+
+
+:command:`install_distinfo`
+---------------------------
+
+Install files recording details of the installation as specified in :PEP:`376`.
+
+
+:command:`install_headers`
+--------------------------
+
+Install C/C++ header files.
+
+
+:command:`install_lib`
+----------------------
+
+Install all modules (extensions and pure Python).
+
+.. XXX what about C libraries created with build_clib?
+
+Similarly to ``build_py``, there are options to control the compilation of
+Python code to :term:`bytecode` files (see above). By default, :file:`.pyc`
+files will be created (``--compile``) and :file:`.pyo` files will not
+(``--optimize 0``).
+
+
+:command:`install_scripts`
+--------------------------
+
+Install scripts.
diff --git a/Doc/packaging/configfile.rst b/Doc/packaging/configfile.rst
new file mode 100644
index 0000000..825b5cb
--- /dev/null
+++ b/Doc/packaging/configfile.rst
@@ -0,0 +1,125 @@
+.. _packaging-setup-config:
+
+************************************
+Writing the Setup Configuration File
+************************************
+
+Often, it's not possible to write down everything needed to build a distribution
+*a priori*: you may need to get some information from the user, or from the
+user's system, in order to proceed. As long as that information is fairly
+simple---a list of directories to search for C header files or libraries, for
+example---then providing a configuration file, :file:`setup.cfg`, for users to
+edit is a cheap and easy way to solicit it. Configuration files also let you
+provide default values for any command option, which the installer can then
+override either on the command line or by editing the config file.
+
+The setup configuration file is a useful middle-ground between the setup script
+---which, ideally, would be opaque to installers [#]_---and the command line to
+the setup script, which is outside of your control and entirely up to the
+installer. In fact, :file:`setup.cfg` (and any other Distutils configuration
+files present on the target system) are processed after the contents of the
+setup script, but before the command line. This has several useful
+consequences:
+
+.. If you have more advanced needs, such as determining which extensions to
+ build based on what capabilities are present on the target system, then you
+ need the Distutils auto-configuration facility. This started to appear in
+ Distutils 0.9 but, as of this writing, isn't mature or stable enough yet
+ for real-world use.
+
+* installers can override some of what you put in :file:`setup.py` by editing
+ :file:`setup.cfg`
+
+* you can provide non-standard defaults for options that are not easily set in
+ :file:`setup.py`
+
+* installers can override anything in :file:`setup.cfg` using the command-line
+ options to :file:`setup.py`
+
+The basic syntax of the configuration file is simple::
+
+ [command]
+ option = value
+ ...
+
+where *command* is one of the Distutils commands (e.g. :command:`build_py`,
+:command:`install_dist`), and *option* is one of the options that command supports.
+Any number of options can be supplied for each command, and any number of
+command sections can be included in the file. Blank lines are ignored, as are
+comments, which run from a ``'#'`` character until the end of the line. Long
+option values can be split across multiple lines simply by indenting the
+continuation lines.
+
+You can find out the list of options supported by a particular command with the
+universal :option:`--help` option, e.g. ::
+
+ > python setup.py --help build_ext
+ [...]
+ Options for 'build_ext' command:
+ --build-lib (-b) directory for compiled extension modules
+ --build-temp (-t) directory for temporary files (build by-products)
+ --inplace (-i) ignore build-lib and put compiled extensions into the
+ source directory alongside your pure Python modules
+ --include-dirs (-I) list of directories to search for header files
+ --define (-D) C preprocessor macros to define
+ --undef (-U) C preprocessor macros to undefine
+ --swig-opts list of SWIG command-line options
+ [...]
+
+.. XXX do we want to support ``setup.py --help metadata``?
+
+Note that an option spelled :option:`--foo-bar` on the command line is spelled
+:option:`foo_bar` in configuration files.
+
+For example, say you want your extensions to be built "in-place"---that is, you
+have an extension :mod:`pkg.ext`, and you want the compiled extension file
+(:file:`ext.so` on Unix, say) to be put in the same source directory as your
+pure Python modules :mod:`pkg.mod1` and :mod:`pkg.mod2`. You can always use the
+:option:`--inplace` option on the command line to ensure this::
+
+ python setup.py build_ext --inplace
+
+But this requires that you always specify the :command:`build_ext` command
+explicitly, and remember to provide :option:`--inplace`. An easier way is to
+"set and forget" this option, by encoding it in :file:`setup.cfg`, the
+configuration file for this distribution::
+
+ [build_ext]
+ inplace = 1
+
+This will affect all builds of this module distribution, whether or not you
+explicitly specify :command:`build_ext`. If you include :file:`setup.cfg` in
+your source distribution, it will also affect end-user builds---which is
+probably a bad idea for this option, since always building extensions in-place
+would break installation of the module distribution. In certain peculiar cases,
+though, modules are built right in their installation directory, so this is
+conceivably a useful ability. (Distributing extensions that expect to be built
+in their installation directory is almost always a bad idea, though.)
+
+Another example: certain commands take options that vary from project to
+project but not depending on the installation system, for example,
+:command:`test` needs to know where your test suite is located and what test
+runner to use; likewise, :command:`upload_docs` can find HTML documentation in
+a :file:`doc` or :file:`docs` directory, but needs an option to find files in
+:file:`docs/build/html`. Instead of having to type out these options each
+time you want to run the command, you can put them in the project's
+:file:`setup.cfg`::
+
+ [test]
+ suite = packaging.tests
+
+ [upload_docs]
+ upload-dir = docs/build/html
+
+
+.. seealso::
+
+ :ref:`packaging-config-syntax` in "Installing Python Projects"
+ More information on the configuration files is available in the manual for
+ system administrators.
+
+
+.. rubric:: Footnotes
+
+.. [#] This ideal probably won't be achieved until auto-configuration is fully
+ supported by the Distutils.
diff --git a/Doc/packaging/examples.rst b/Doc/packaging/examples.rst
new file mode 100644
index 0000000..594ade0
--- /dev/null
+++ b/Doc/packaging/examples.rst
@@ -0,0 +1,334 @@
+.. _packaging-examples:
+
+********
+Examples
+********
+
+This chapter provides a number of basic examples to help get started with
+Packaging.
+
+
+.. _packaging-pure-mod:
+
+Pure Python distribution (by module)
+====================================
+
+If you're just distributing a couple of modules, especially if they don't live
+in a particular package, you can specify them individually using the
+:option:`py_modules` option in the setup script.
+
+In the simplest case, you'll have two files to worry about: a setup script and
+the single module you're distributing, :file:`foo.py` in this example::
+
+ <root>/
+ setup.py
+ foo.py
+
+(In all diagrams in this section, *<root>* will refer to the distribution root
+directory.) A minimal setup script to describe this situation would be::
+
+ from packaging.core import setup
+ setup(name='foo',
+ version='1.0',
+ py_modules=['foo'])
+
+Note that the name of the distribution is specified independently with the
+:option:`name` option, and there's no rule that says it has to be the same as
+the name of the sole module in the distribution (although that's probably a good
+convention to follow). However, the distribution name is used to generate
+filenames, so you should stick to letters, digits, underscores, and hyphens.
+
+Since :option:`py_modules` is a list, you can of course specify multiple
+modules, e.g. if you're distributing modules :mod:`foo` and :mod:`bar`, your
+setup might look like this::
+
+ <root>/
+ setup.py
+ foo.py
+ bar.py
+
+and the setup script might be ::
+
+ from packaging.core import setup
+ setup(name='foobar',
+ version='1.0',
+ py_modules=['foo', 'bar'])
+
+You can put module source files into another directory, but if you have enough
+modules to do that, it's probably easier to specify modules by package rather
+than listing them individually.
+
+
+.. _packaging-pure-pkg:
+
+Pure Python distribution (by package)
+=====================================
+
+If you have more than a couple of modules to distribute, especially if they are
+in multiple packages, it's probably easier to specify whole packages rather than
+individual modules. This works even if your modules are not in a package; you
+can just tell the Distutils to process modules from the root package, and that
+works the same as any other package (except that you don't have to have an
+:file:`__init__.py` file).
+
+The setup script from the last example could also be written as ::
+
+ from packaging.core import setup
+ setup(name='foobar',
+ version='1.0',
+ packages=[''])
+
+(The empty string stands for the root package.)
+
+If those two files are moved into a subdirectory, but remain in the root
+package, e.g.::
+
+ <root>/
+ setup.py
+ src/
+ foo.py
+ bar.py
+
+then you would still specify the root package, but you have to tell the
+Distutils where source files in the root package live::
+
+ from packaging.core import setup
+ setup(name='foobar',
+ version='1.0',
+ package_dir={'': 'src'},
+ packages=[''])
+
+More typically, though, you will want to distribute multiple modules in the same
+package (or in sub-packages). For example, if the :mod:`foo` and :mod:`bar`
+modules belong in package :mod:`foobar`, one way to lay out your source tree is
+
+::
+
+ <root>/
+ setup.py
+ foobar/
+ __init__.py
+ foo.py
+ bar.py
+
+This is in fact the default layout expected by the Distutils, and the one that
+requires the least work to describe in your setup script::
+
+ from packaging.core import setup
+ setup(name='foobar',
+ version='1.0',
+ packages=['foobar'])
+
+If you want to put modules in directories not named for their package, then you
+need to use the :option:`package_dir` option again. For example, if the
+:file:`src` directory holds modules in the :mod:`foobar` package::
+
+ <root>/
+ setup.py
+ src/
+ __init__.py
+ foo.py
+ bar.py
+
+an appropriate setup script would be ::
+
+ from packaging.core import setup
+ setup(name='foobar',
+ version='1.0',
+ package_dir={'foobar': 'src'},
+ packages=['foobar'])
+
+Or, you might put modules from your main package right in the distribution
+root::
+
+ <root>/
+ setup.py
+ __init__.py
+ foo.py
+ bar.py
+
+in which case your setup script would be ::
+
+ from packaging.core import setup
+ setup(name='foobar',
+ version='1.0',
+ package_dir={'foobar': ''},
+ packages=['foobar'])
+
+(The empty string also stands for the current directory.)
+
+If you have sub-packages, they must be explicitly listed in :option:`packages`,
+but any entries in :option:`package_dir` automatically extend to sub-packages.
+(In other words, the Distutils does *not* scan your source tree, trying to
+figure out which directories correspond to Python packages by looking for
+:file:`__init__.py` files.) Thus, if the default layout grows a sub-package::
+
+ <root>/
+ setup.py
+ foobar/
+ __init__.py
+ foo.py
+ bar.py
+ subfoo/
+ __init__.py
+ blah.py
+
+then the corresponding setup script would be ::
+
+ from packaging.core import setup
+ setup(name='foobar',
+ version='1.0',
+ packages=['foobar', 'foobar.subfoo'])
+
+(Again, the empty string in :option:`package_dir` stands for the current
+directory.)
+
+
+.. _packaging-single-ext:
+
+Single extension module
+=======================
+
+Extension modules are specified using the :option:`ext_modules` option.
+:option:`package_dir` has no effect on where extension source files are found;
+it only affects the source for pure Python modules. The simplest case, a
+single extension module in a single C source file, is::
+
+ <root>/
+ setup.py
+ foo.c
+
+If the :mod:`foo` extension belongs in the root package, the setup script for
+this could be ::
+
+ from packaging.core import setup, Extension
+ setup(name='foobar',
+ version='1.0',
+ ext_modules=[Extension('foo', ['foo.c'])])
+
+If the extension actually belongs in a package, say :mod:`foopkg`, then
+
+With exactly the same source tree layout, this extension can be put in the
+:mod:`foopkg` package simply by changing the name of the extension::
+
+ from packaging.core import setup, Extension
+ setup(name='foobar',
+ version='1.0',
+ packages=['foopkg'],
+ ext_modules=[Extension('foopkg.foo', ['foo.c'])])
+
+
+Checking metadata
+=================
+
+The ``check`` command allows you to verify if your project's metadata
+meets the minimum requirements to build a distribution.
+
+To run it, just call it using your :file:`setup.py` script. If something is
+missing, ``check`` will display a warning.
+
+Let's take an example with a simple script::
+
+ from packaging.core import setup
+
+ setup(name='foobar')
+
+.. TODO configure logging StreamHandler to match this output
+
+Running the ``check`` command will display some warnings::
+
+ $ python setup.py check
+ running check
+ warning: check: missing required metadata: version, home_page
+ warning: check: missing metadata: either (author and author_email) or
+ (maintainer and maintainer_email) must be supplied
+
+
+If you use the reStructuredText syntax in the ``long_description`` field and
+`Docutils <http://docutils.sourceforge.net/>`_ is installed you can check if
+the syntax is fine with the ``check`` command, using the ``restructuredtext``
+option.
+
+For example, if the :file:`setup.py` script is changed like this::
+
+ from packaging.core import setup
+
+ desc = """\
+ Welcome to foobar!
+ ===============
+
+ This is the description of the ``foobar`` project.
+ """
+
+ setup(name='foobar',
+ version='1.0',
+ author=u'Tarek Ziadé',
+ author_email='tarek@ziade.org',
+ summary='Foobar utilities'
+ description=desc,
+ home_page='http://example.com')
+
+Where the long description is broken, ``check`` will be able to detect it
+by using the :mod:`docutils` parser::
+
+ $ python setup.py check --restructuredtext
+ running check
+ warning: check: Title underline too short. (line 2)
+ warning: check: Could not finish the parsing.
+
+
+.. _packaging-reading-metadata:
+
+Reading the metadata
+====================
+
+The :func:`packaging.core.setup` function provides a command-line interface
+that allows you to query the metadata fields of a project through the
+:file:`setup.py` script of a given project::
+
+ $ python setup.py --name
+ foobar
+
+This call reads the ``name`` metadata by running the
+:func:`packaging.core.setup` function. When a source or binary
+distribution is created with Distutils, the metadata fields are written
+in a static file called :file:`PKG-INFO`. When a Distutils-based project is
+installed in Python, the :file:`PKG-INFO` file is copied alongside the modules
+and packages of the distribution under :file:`NAME-VERSION-pyX.X.egg-info`,
+where ``NAME`` is the name of the project, ``VERSION`` its version as defined
+in the Metadata, and ``pyX.X`` the major and minor version of Python like
+``2.7`` or ``3.2``.
+
+You can read back this static file, by using the
+:class:`packaging.dist.Metadata` class and its
+:func:`read_pkg_file` method::
+
+ >>> from packaging.metadata import Metadata
+ >>> metadata = Metadata()
+ >>> metadata.read_pkg_file(open('distribute-0.6.8-py2.7.egg-info'))
+ >>> metadata.name
+ 'distribute'
+ >>> metadata.version
+ '0.6.8'
+ >>> metadata.description
+ 'Easily download, build, install, upgrade, and uninstall Python packages'
+
+Notice that the class can also be instantiated with a metadata file path to
+loads its values::
+
+ >>> pkg_info_path = 'distribute-0.6.8-py2.7.egg-info'
+ >>> Metadata(pkg_info_path).name
+ 'distribute'
+
+
+.. XXX These comments have been here for at least ten years. Write the
+ sections or delete the comments (we can maybe ask Greg Ward about
+ the planned contents). (Unindent to make them section titles)
+
+ .. multiple-ext::
+
+ Multiple extension modules
+ ==========================
+
+ Putting it all together
+ =======================
diff --git a/Doc/packaging/extending.rst b/Doc/packaging/extending.rst
new file mode 100644
index 0000000..f2d3863
--- /dev/null
+++ b/Doc/packaging/extending.rst
@@ -0,0 +1,95 @@
+.. _extending-packaging:
+
+*******************
+Extending Distutils
+*******************
+
+Distutils can be extended in various ways. Most extensions take the form of new
+commands or replacements for existing commands. New commands may be written to
+support new types of platform-specific packaging, for example, while
+replacements for existing commands may be made to modify details of how the
+command operates on a package.
+
+Most extensions of the packaging are made within :file:`setup.py` scripts that
+want to modify existing commands; many simply add a few file extensions that
+should be copied into packages in addition to :file:`.py` files as a
+convenience.
+
+Most packaging command implementations are subclasses of the
+:class:`packaging.cmd.Command` class. New commands may directly inherit from
+:class:`Command`, while replacements often derive from :class:`Command`
+indirectly, directly subclassing the command they are replacing. Commands are
+required to derive from :class:`Command`.
+
+.. .. _extend-existing:
+ Extending existing commands
+ ===========================
+
+
+.. .. _new-commands:
+ Writing new commands
+ ====================
+
+
+Integrating new commands
+========================
+
+There are different ways to integrate new command implementations into
+packaging. The most difficult is to lobby for the inclusion of the new features
+in packaging itself, and wait for (and require) a version of Python that
+provides that support. This is really hard for many reasons.
+
+The most common, and possibly the most reasonable for most needs, is to include
+the new implementations with your :file:`setup.py` script, and cause the
+:func:`packaging.core.setup` function use them::
+
+ from packaging.core import setup
+ from packaging.command.build_py import build_py as _build_py
+
+ class build_py(_build_py):
+ """Specialized Python source builder."""
+
+ # implement whatever needs to be different...
+
+ setup(..., cmdclass={'build_py': build_py})
+
+This approach is most valuable if the new implementations must be used to use a
+particular package, as everyone interested in the package will need to have the
+new command implementation.
+
+Beginning with Python 2.4, a third option is available, intended to allow new
+commands to be added which can support existing :file:`setup.py` scripts without
+requiring modifications to the Python installation. This is expected to allow
+third-party extensions to provide support for additional packaging systems, but
+the commands can be used for anything packaging commands can be used for. A new
+configuration option, :option:`command_packages` (command-line option
+:option:`--command-packages`), can be used to specify additional packages to be
+searched for modules implementing commands. Like all packaging options, this
+can be specified on the command line or in a configuration file. This option
+can only be set in the ``[global]`` section of a configuration file, or before
+any commands on the command line. If set in a configuration file, it can be
+overridden from the command line; setting it to an empty string on the command
+line causes the default to be used. This should never be set in a configuration
+file provided with a package.
+
+This new option can be used to add any number of packages to the list of
+packages searched for command implementations; multiple package names should be
+separated by commas. When not specified, the search is only performed in the
+:mod:`packaging.command` package. When :file:`setup.py` is run with the option
+:option:`--command-packages` :option:`distcmds,buildcmds`, however, the packages
+:mod:`packaging.command`, :mod:`distcmds`, and :mod:`buildcmds` will be searched
+in that order. New commands are expected to be implemented in modules of the
+same name as the command by classes sharing the same name. Given the example
+command-line option above, the command :command:`bdist_openpkg` could be
+implemented by the class :class:`distcmds.bdist_openpkg.bdist_openpkg` or
+:class:`buildcmds.bdist_openpkg.bdist_openpkg`.
+
+
+Adding new distribution types
+=============================
+
+Commands that create distributions (files in the :file:`dist/` directory) need
+to add ``(command, filename)`` pairs to ``self.distribution.dist_files`` so that
+:command:`upload` can upload it to PyPI. The *filename* in the pair contains no
+path information, only the name of the file itself. In dry-run mode, pairs
+should still be added to represent what would have been created.
diff --git a/Doc/packaging/index.rst b/Doc/packaging/index.rst
new file mode 100644
index 0000000..d3d0dec
--- /dev/null
+++ b/Doc/packaging/index.rst
@@ -0,0 +1,45 @@
+.. _packaging-index:
+
+##############################
+ Distributing Python Projects
+##############################
+
+:Authors: The Fellowship of the Packaging
+:Email: distutils-sig@python.org
+:Release: |version|
+:Date: |today|
+
+This document describes Packaging for Python authors, describing how to use the
+module to make Python applications, packages or modules easily available to a
+wider audience with very little overhead for build/release/install mechanics.
+
+.. toctree::
+ :maxdepth: 2
+ :numbered:
+
+ tutorial
+ setupcfg
+ introduction
+ setupscript
+ configfile
+ sourcedist
+ builtdist
+ packageindex
+ uploading
+ examples
+ extending
+ commandhooks
+ commandref
+
+
+.. seealso::
+
+ :ref:`packaging-install-index`
+ A user-centered manual which includes information on adding projects
+ into an existing Python installation. You do not need to be a Python
+ programmer to read this manual.
+
+ :mod:`packaging`
+ A library reference for developers of packaging tools wanting to use
+ standalone building blocks like :mod:`~packaging.version` or
+ :mod:`~packaging.metadata`, or extend Packaging itself.
diff --git a/Doc/packaging/introduction.rst b/Doc/packaging/introduction.rst
new file mode 100644
index 0000000..a757ffc
--- /dev/null
+++ b/Doc/packaging/introduction.rst
@@ -0,0 +1,193 @@
+.. _packaging-intro:
+
+*****************************
+An Introduction to Packaging
+*****************************
+
+This document covers using Packaging to distribute your Python modules,
+concentrating on the role of developer/distributor. If you're looking for
+information on installing Python modules you should refer to the
+:ref:`packaging-install-index` chapter.
+
+Throughout this documentation, the terms "Distutils", "the Distutils" and
+"Packaging" will be used interchangeably.
+
+.. _packaging-concepts:
+
+Concepts & Terminology
+======================
+
+Using Distutils is quite simple both for module developers and for
+users/administrators installing third-party modules. As a developer, your
+responsibilities (apart from writing solid, well-documented and well-tested
+code, of course!) are:
+
+* writing a setup script (:file:`setup.py` by convention)
+
+* (optional) writing a setup configuration file
+
+* creating a source distribution
+
+* (optional) creating one or more "built" (binary) distributions of your
+ project
+
+All of these tasks are covered in this document.
+
+Not all module developers have access to multiple platforms, so one cannot
+expect them to create buildt distributions for every platform. To remedy
+this, it is hoped that intermediaries called *packagers* will arise to address
+this need. Packagers take source distributions released by module developers,
+build them on one or more platforms and release the resulting built
+distributions. Thus, users on a greater range of platforms will be able to
+install the most popular Python modules in the most natural way for their
+platform without having to run a setup script or compile a single line of code.
+
+
+.. _packaging-simple-example:
+
+A Simple Example
+================
+
+A setup script is usually quite simple, although since it's written in Python
+there are no arbitrary limits to what you can do with it, though you should be
+careful about putting expensive operations in your setup script.
+Unlike, say, Autoconf-style configure scripts the setup script may be run
+multiple times in the course of building and installing a module
+distribution.
+
+If all you want to do is distribute a module called :mod:`foo`, contained in a
+file :file:`foo.py`, then your setup script can be as simple as::
+
+ from packaging.core import setup
+ setup(name='foo',
+ version='1.0',
+ py_modules=['foo'])
+
+Some observations:
+
+* most information that you supply to the Distutils is supplied as keyword
+ arguments to the :func:`setup` function
+
+* those keyword arguments fall into two categories: package metadata (name,
+ version number, etc.) and information about what's in the package (a list
+ of pure Python modules in this case)
+
+* modules are specified by module name, not filename (the same will hold true
+ for packages and extensions)
+
+* it's recommended that you supply a little more metadata than we have in the
+ example. In particular your name, email address and a URL for the
+ project if appropriate (see section :ref:`packaging-setup-script` for an example)
+
+To create a source distribution for this module you would create a setup
+script, :file:`setup.py`, containing the above code and run::
+
+ python setup.py sdist
+
+which will create an archive file (e.g., tarball on Unix, ZIP file on Windows)
+containing your setup script :file:`setup.py`, and your module :file:`foo.py`.
+The archive file will be named :file:`foo-1.0.tar.gz` (or :file:`.zip`), and
+will unpack into a directory :file:`foo-1.0`.
+
+If an end-user wishes to install your :mod:`foo` module all he has to do is
+download :file:`foo-1.0.tar.gz` (or :file:`.zip`), unpack it, and from the
+:file:`foo-1.0` directory run ::
+
+ python setup.py install
+
+which will copy :file:`foo.py` to the appropriate directory for
+third-party modules in their Python installation.
+
+This simple example demonstrates some fundamental concepts of Distutils.
+First, both developers and installers have the same basic user interface, i.e.
+the setup script. The difference is which Distutils *commands* they use: the
+:command:`sdist` command is almost exclusively for module developers, while
+:command:`install` is more often used by installers (although some developers
+will want to install their own code occasionally).
+
+If you want to make things really easy for your users, you can create more
+than one built distributions for them. For instance, if you are running on a
+Windows machine and want to make things easy for other Windows users, you can
+create an executable installer (the most appropriate type of built distribution
+for this platform) with the :command:`bdist_wininst` command. For example::
+
+ python setup.py bdist_wininst
+
+will create an executable installer, :file:`foo-1.0.win32.exe`, in the current
+directory. You can find out what distribution formats are available at any time
+by running ::
+
+ python setup.py bdist --help-formats
+
+
+.. _packaging-python-terms:
+
+General Python terminology
+==========================
+
+If you're reading this document, you probably have a good idea of what Python
+modules, extensions and so forth are. Nevertheless, just to be sure that
+everyone is on the same page, here's a quick overview of Python terms:
+
+module
+ The basic unit of code reusability in Python: a block of code imported by
+ some other code. Three types of modules are important to us here: pure
+ Python modules, extension modules and packages.
+
+pure Python module
+ A module written in Python and contained in a single :file:`.py` file (and
+ possibly associated :file:`.pyc` and/or :file:`.pyo` files). Sometimes
+ referred to as a "pure module."
+
+extension module
+ A module written in the low-level language of the Python implementation: C/C++
+ for Python, Java for Jython. Typically contained in a single dynamically
+ loaded pre-compiled file, e.g. a shared object (:file:`.so`) file for Python
+ extensions on Unix, a DLL (given the :file:`.pyd` extension) for Python
+ extensions on Windows, or a Java class file for Jython extensions. Note that
+ currently Distutils only handles C/C++ extensions for Python.
+
+package
+ A module that contains other modules, typically contained in a directory of
+ the filesystem and distinguished from other directories by the presence of a
+ file :file:`__init__.py`.
+
+root package
+ The root of the hierarchy of packages. (This isn't really a package,
+ since it doesn't have an :file:`__init__.py` file. But... we have to
+ call it something, right?) The vast majority of the standard library is
+ in the root package, as are many small standalone third-party modules that
+ don't belong to a larger module collection. Unlike regular packages,
+ modules in the root package can be found in many directories: in fact,
+ every directory listed in ``sys.path`` contributes modules to the root
+ package.
+
+
+.. _packaging-term:
+
+Distutils-specific terminology
+==============================
+
+The following terms apply more specifically to the domain of distributing Python
+modules using Distutils:
+
+module distribution
+ A collection of Python modules distributed together as a single downloadable
+ resource and meant to be installed all as one. Examples of some well-known
+ module distributions are NumPy, SciPy, PIL (the Python Imaging
+ Library) or mxBase. (Module distributions would be called a *package*,
+ except that term is already taken in the Python context: a single module
+ distribution may contain zero, one, or many Python packages.)
+
+pure module distribution
+ A module distribution that contains only pure Python modules and packages.
+ Sometimes referred to as a "pure distribution."
+
+non-pure module distribution
+ A module distribution that contains at least one extension module. Sometimes
+ referred to as a "non-pure distribution."
+
+distribution root
+ The top-level directory of your source tree (or source distribution). The
+ directory where :file:`setup.py` exists. Generally :file:`setup.py` will
+ be run from this directory.
diff --git a/Doc/packaging/packageindex.rst b/Doc/packaging/packageindex.rst
new file mode 100644
index 0000000..cd1d598
--- /dev/null
+++ b/Doc/packaging/packageindex.rst
@@ -0,0 +1,104 @@
+.. _packaging-package-index:
+
+**********************************
+Registering with the Package Index
+**********************************
+
+The Python Package Index (PyPI) holds metadata describing distributions
+packaged with packaging. The packaging command :command:`register` is used to
+submit your distribution's metadata to the index. It is invoked as follows::
+
+ python setup.py register
+
+Distutils will respond with the following prompt::
+
+ running register
+ We need to know who you are, so please choose either:
+ 1. use your existing login,
+ 2. register as a new user,
+ 3. have the server generate a new password for you (and email it to you), or
+ 4. quit
+ Your selection [default 1]:
+
+Note: if your username and password are saved locally, you will not see this
+menu.
+
+If you have not registered with PyPI, then you will need to do so now. You
+should choose option 2, and enter your details as required. Soon after
+submitting your details, you will receive an email which will be used to confirm
+your registration.
+
+Once you are registered, you may choose option 1 from the menu. You will be
+prompted for your PyPI username and password, and :command:`register` will then
+submit your metadata to the index.
+
+You may submit any number of versions of your distribution to the index. If you
+alter the metadata for a particular version, you may submit it again and the
+index will be updated.
+
+PyPI holds a record for each (name, version) combination submitted. The first
+user to submit information for a given name is designated the Owner of that
+name. They may submit changes through the :command:`register` command or through
+the web interface. They may also designate other users as Owners or Maintainers.
+Maintainers may edit the package information, but not designate other Owners or
+Maintainers.
+
+By default PyPI will list all versions of a given package. To hide certain
+versions, the Hidden property should be set to yes. This must be edited through
+the web interface.
+
+
+.. _packaging-pypirc:
+
+The .pypirc file
+================
+
+The format of the :file:`.pypirc` file is as follows::
+
+ [packaging]
+ index-servers =
+ pypi
+
+ [pypi]
+ repository: <repository-url>
+ username: <username>
+ password: <password>
+
+The *packaging* section defines a *index-servers* variable that lists the
+name of all sections describing a repository.
+
+Each section describing a repository defines three variables:
+
+- *repository*, that defines the url of the PyPI server. Defaults to
+ ``http://www.python.org/pypi``.
+- *username*, which is the registered username on the PyPI server.
+- *password*, that will be used to authenticate. If omitted the user
+ will be prompt to type it when needed.
+
+If you want to define another server a new section can be created and
+listed in the *index-servers* variable::
+
+ [packaging]
+ index-servers =
+ pypi
+ other
+
+ [pypi]
+ repository: <repository-url>
+ username: <username>
+ password: <password>
+
+ [other]
+ repository: http://example.com/pypi
+ username: <username>
+ password: <password>
+
+:command:`register` can then be called with the -r option to point the
+repository to work with::
+
+ python setup.py register -r http://example.com/pypi
+
+For convenience, the name of the section that describes the repository
+may also be used::
+
+ python setup.py register -r other
diff --git a/Doc/packaging/setupcfg.rst b/Doc/packaging/setupcfg.rst
new file mode 100644
index 0000000..a381017
--- /dev/null
+++ b/Doc/packaging/setupcfg.rst
@@ -0,0 +1,890 @@
+.. highlightlang:: cfg
+
+.. _setupcfg-spec:
+
+*******************************************
+Specification of the :file:`setup.cfg` file
+*******************************************
+
+:version: 0.9
+
+This document describes the :file:`setup.cfg`, an ini-style configuration file
+used by Packaging to replace the :file:`setup.py` file used by Distutils.
+This specification is language-agnostic, and will therefore repeat some
+information that's already documented for Python in the
+:class:`configparser.RawConfigParser` documentation.
+
+.. contents::
+ :depth: 3
+ :local:
+
+
+.. _setupcfg-syntax:
+
+Syntax
+======
+
+The ini-style format used in the configuration file is a simple collection of
+sections that group sets of key-value fields separated by ``=`` or ``:`` and
+optional whitespace. Lines starting with ``#`` or ``;`` are comments and will
+be ignored. Empty lines are also ignored. Example::
+
+ [section1]
+ # comment
+ name = value
+ name2 = "other value"
+
+ [section2]
+ foo = bar
+
+
+Parsing values
+---------------
+
+Here are a set of rules to parse values:
+
+- If a value is quoted with ``"`` chars, it's a string. If a quote character is
+ present in the quoted value, it can be escaped as ``\"`` or left as-is.
+
+- If the value is ``true``, ``t``, ``yes``, ``y`` (case-insensitive) or ``1``,
+ it's converted to the language equivalent of a ``True`` value; if it's
+ ``false``, ``f``, ``no``, ``n`` (case-insensitive) or ``0``, it's converted to
+ the equivalent of ``False``.
+
+- A value can contain multiple lines. When read, lines are converted into a
+ sequence of values. Each line after the first must start with a least one
+ space or tab character; this leading indentation will be stripped.
+
+- All other values are considered strings.
+
+Examples::
+
+ [section]
+ foo = one
+ two
+ three
+
+ bar = false
+ baz = 1.3
+ boo = "ok"
+ beee = "wqdqw pojpj w\"ddq"
+
+
+Extending files
+---------------
+
+A configuration file can be extended (i.e. included) by other files. For this,
+a ``DEFAULT`` section must contain an ``extends`` key whose value points to one
+or more files which will be merged into the current files by adding new sections
+and fields. If a file loaded by ``extends`` contains sections or keys that
+already exist in the original file, they will not override the previous values.
+
+Contents of :file:`one.cfg`::
+
+ [section1]
+ name = value
+
+ [section2]
+ foo = foo from one.cfg
+
+Contents of :file:`two.cfg`::
+
+ [DEFAULT]
+ extends = one.cfg
+
+ [section2]
+ foo = foo from two.cfg
+ baz = baz from two.cfg
+
+The result of parsing :file:`two.cfg` is equivalent to this file::
+
+ [section1]
+ name = value
+
+ [section2]
+ foo = foo from one.cfg
+ baz = baz from two.cfg
+
+Example use of multi-line notation to include more than one file::
+
+ [DEFAULT]
+ extends = one.cfg
+ two.cfg
+
+When several files are provided, they are processed sequentially, following the
+precedence rules explained above. This means that the list of files should go
+from most specialized to most common.
+
+**Tools will need to provide a way to produce a merged version of the
+file**. This will be useful to let users publish a single file.
+
+
+.. _setupcfg-sections:
+
+Description of sections and fields
+==================================
+
+Each section contains a description of its options.
+
+- Options that are marked *multi* can have multiple values, one value per
+ line.
+- Options that are marked *optional* can be omitted.
+- Options that are marked *environ* can use environment markers, as described
+ in :PEP:`345`.
+
+
+The sections are:
+
+global
+ Global options not related to one command.
+
+metadata
+ Name, version and other information defined by :PEP:`345`.
+
+files
+ Modules, scripts, data, documentation and other files to include in the
+ distribution.
+
+extension sections
+ Options used to build extension modules.
+
+command sections
+ Options given for specific commands, identical to those that can be given
+ on the command line.
+
+
+.. _setupcfg-section-global:
+
+Global options
+--------------
+
+Contains global options for Packaging. This section is shared with Distutils.
+
+
+commands
+ Defined Packaging command. A command is defined by its fully
+ qualified name. *optional*, *multi*
+
+ Examples::
+
+ [global]
+ commands =
+ package.setup.CustomSdistCommand
+ package.setup.BdistDeb
+
+compilers
+ Defined Packaging compiler. A compiler is defined by its fully
+ qualified name. *optional*, *multi*
+
+ Example::
+
+ [global]
+ compilers =
+ hotcompiler.SmartCCompiler
+
+setup_hooks
+ Defines a list of callables to be called right after the :file:`setup.cfg`
+ file is read, before any other processing. Each value is a Python dotted
+ name to an object, which has to be defined in a module present in the project
+ directory alonside :file:`setup.cfg` or on Python's :data:`sys.path` (see
+ :ref:`packaging-finding-hooks`). The callables are executed in the
+ order they're found in the file; if one of them cannot be found, tools should
+ not stop, but for example produce a warning and continue with the next line.
+ Each callable receives the configuration as a dictionary (keys are
+ :file:`setup.cfg` sections, values are dictionaries of fields) and can make
+ any change to it. *optional*, *multi*
+
+ Example::
+
+ [global]
+ setup_hooks = _setuphooks.customize_config
+
+
+
+.. _setupcfg-section-metadata:
+
+Metadata
+--------
+
+The metadata section contains the metadata for the project as described in
+:PEP:`345`. Field names are case-insensitive.
+
+Fields:
+
+name
+ Name of the project.
+
+version
+ Version of the project. Must comply with :PEP:`386`.
+
+platform
+ Platform specification describing an operating system
+ supported by the distribution which is not listed in the "Operating System"
+ Trove classifiers (:PEP:`301`). *optional*, *multi*
+
+supported-platform
+ Binary distributions containing a PKG-INFO file will
+ use the Supported-Platform field in their metadata to specify the OS and
+ CPU for which the binary distribution was compiled. The semantics of
+ the Supported-Platform field are free form. *optional*, *multi*
+
+summary
+ A one-line summary of what the distribution does.
+ (Used to be called *description* in Distutils1.)
+
+description
+ A longer description. (Used to be called *long_description*
+ in Distutils1.) A file can be provided in the *description-file* field.
+ *optional*
+
+keywords
+ A list of additional keywords to be used to assist searching
+ for the distribution in a larger catalog. Comma or space-separated.
+ *optional*
+
+home-page
+ The URL for the distribution's home page.
+
+download-url
+ The URL from which this version of the distribution
+ can be downloaded. *optional*
+
+author
+ Author's name. *optional*
+
+author-email
+ Author's e-mail. *optional*
+
+maintainer
+ Maintainer's name. *optional*
+
+maintainer-email
+ Maintainer's e-mail. *optional*
+
+license
+ A text indicating the term of uses, when a trove classifier does
+ not match. *optional*.
+
+classifiers
+ Classification for the distribution, as described in PEP 301.
+ *optional*, *multi*, *environ*
+
+requires-dist
+ name of another packaging project required as a dependency.
+ The format is *name (version)* where version is an optional
+ version declaration, as described in PEP 345. *optional*, *multi*, *environ*
+
+provides-dist
+ name of another packaging project contained within this
+ distribution. Same format than *requires-dist*. *optional*, *multi*,
+ *environ*
+
+obsoletes-dist
+ name of another packaging project this version obsoletes.
+ Same format than *requires-dist*. *optional*, *multi*, *environ*
+
+requires-python
+ Specifies the Python version the distribution requires. The value is a
+ comma-separated list of version predicates, as described in PEP 345.
+ *optional*, *environ*
+
+requires-externals
+ a dependency in the system. This field is free-form,
+ and just a hint for downstream maintainers. *optional*, *multi*,
+ *environ*
+
+project-url
+ A label, followed by a browsable URL for the project.
+ "label, url". The label is limited to 32 signs. *optional*, *multi*
+
+One extra field not present in PEP 345 is supported:
+
+description-file
+ Path to a text file that will be used to fill the ``description`` field.
+ Multiple values are accepted; they must be separated by whitespace.
+ ``description-file`` and ``description`` are mutually exclusive. *optional*
+
+
+
+Example::
+
+ [metadata]
+ name = pypi2rpm
+ version = 0.1
+ author = Tarek Ziadé
+ author-email = tarek@ziade.org
+ summary = Script that transforms an sdist archive into a RPM package
+ description-file = README
+ home-page = http://bitbucket.org/tarek/pypi2rpm/wiki/Home
+ project-url:
+ Repository, http://bitbucket.org/tarek/pypi2rpm/
+ RSS feed, https://bitbucket.org/tarek/pypi2rpm/rss
+ classifier =
+ Development Status :: 3 - Alpha
+ License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)
+
+You should not give any explicit value for metadata-version: it will be guessed
+from the fields present in the file.
+
+
+.. _setupcfg-section-files:
+
+Files
+-----
+
+This section describes the files included in the project.
+
+packages_root
+ the root directory containing all packages and modules
+ (default: current directory, i.e. the project's top-level
+ directory where :file:`setup.cfg` lives). *optional*
+
+packages
+ a list of packages the project includes *optional*, *multi*
+
+modules
+ a list of packages the project includes *optional*, *multi*
+
+scripts
+ a list of scripts the project includes *optional*, *multi*
+
+extra_files
+ a list of patterns for additional files to include in source distributions
+ (see :ref:`packaging-manifest`) *optional*, *multi*
+
+Example::
+
+ [files]
+ packages_root = src
+ packages =
+ pypi2rpm
+ pypi2rpm.command
+
+ scripts =
+ pypi2rpm/pypi2rpm.py
+
+ extra_files =
+ setup.py
+ README
+
+
+.. Note::
+ The :file:`setup.cfg` configuration file is included by default. Contrary to
+ Distutils, :file:`README` (or :file:`README.txt`) and :file:`setup.py` are
+ not included by default.
+
+
+Resources
+^^^^^^^^^
+
+This section describes the files used by the project which must not be installed
+in the same place that python modules or libraries, they are called
+**resources**. They are for example documentation files, script files,
+databases, etc...
+
+For declaring resources, you must use this notation::
+
+ source = destination
+
+Data-files are declared in the **resources** field in the **file** section, for
+example::
+
+ [files]
+ resources =
+ source1 = destination1
+ source2 = destination2
+
+The **source** part of the declaration are relative paths of resources files
+(using unix path separator **/**). For example, if you've this source tree::
+
+ foo/
+ doc/
+ doc.man
+ scripts/
+ foo.sh
+
+Your setup.cfg will look like::
+
+ [files]
+ resources =
+ doc/doc.man = destination_doc
+ scripts/foo.sh = destination_scripts
+
+The final paths where files will be placed are composed by : **source** +
+**destination**. In the previous example, **doc/doc.man** will be placed in
+**destination_doc/doc/doc.man** and **scripts/foo.sh** will be placed in
+**destination_scripts/scripts/foo.sh**. (If you want more control on the final
+path, take a look at :ref:`setupcfg-resources-base-prefix`).
+
+The **destination** part of resources declaration are paths with categories.
+Indeed, it's generally a bad idea to give absolute path as it will be cross
+incompatible. So, you must use resources categories in your **destination**
+declaration. Categories will be replaced by their real path at the installation
+time. Using categories is all benefit, your declaration will be simpler, cross
+platform and it will allow packager to place resources files where they want
+without breaking your code.
+
+Categories can be specified by using this syntax::
+
+ {category}
+
+Default categories are:
+
+* config
+* appdata
+* appdata.arch
+* appdata.persistent
+* appdata.disposable
+* help
+* icon
+* scripts
+* doc
+* info
+* man
+
+A special category also exists **{distribution.name}** that will be replaced by
+the name of the distribution, but as most of the defaults categories use them,
+so it's not necessary to add **{distribution.name}** into your destination.
+
+If you use categories in your declarations, and you are encouraged to do, final
+path will be::
+
+ source + destination_expanded
+
+.. _example_final_path:
+
+For example, if you have this setup.cfg::
+
+ [metadata]
+ name = foo
+
+ [files]
+ resources =
+ doc/doc.man = {doc}
+
+And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final
+path will be::
+
+ {datadir}/doc/foo/doc/doc.man
+
+Where {datafir} category will be platform-dependent.
+
+
+More control on source part
+"""""""""""""""""""""""""""
+
+Glob syntax
+'''''''''''
+
+When you declare source file, you can use a glob-like syntax to match multiples file, for example::
+
+ scripts/* = {script}
+
+Will match all the files in the scripts directory and placed them in the script category.
+
+Glob tokens are:
+
+ * ``*``: match all files.
+ * ``?``: match any character.
+ * ``**``: match any level of tree recursion (even 0).
+ * ``{}``: will match any part separated by comma (example: ``{sh,bat}``).
+
+.. TODO Add examples
+
+Order of declaration
+''''''''''''''''''''
+
+The order of declaration is important if one file match multiple rules. The last
+rules matched by file is used, this is useful if you have this source tree::
+
+ foo/
+ doc/
+ index.rst
+ setup.rst
+ documentation.txt
+ doc.tex
+ README
+
+And you want all the files in the doc directory to be placed in {doc} category,
+but README must be placed in {help} category, instead of listing all the files
+one by one, you can declare them in this way::
+
+ [files]
+ resources =
+ doc/* = {doc}
+ doc/README = {help}
+
+Exclude
+'''''''
+
+You can exclude some files of resources declaration by giving no destination, it
+can be useful if you have a non-resources file in the same directory of
+resources files::
+
+ foo/
+ doc/
+ RELEASES
+ doc.tex
+ documentation.txt
+ docu.rst
+
+Your **files** section will be::
+
+ [files]
+ resources =
+ doc/* = {doc}
+ doc/RELEASES =
+
+More control on destination part
+""""""""""""""""""""""""""""""""
+
+.. _setupcfg-resources-base-prefix:
+
+Defining a base prefix
+''''''''''''''''''''''
+
+When you define your resources, you can have more control of how the final path
+is computed.
+
+By default, the final path is::
+
+ destination + source
+
+This can generate long paths, for example (example_final_path_)::
+
+ {datadir}/doc/foo/doc/doc.man
+
+When you declare your source, you can use whitespace to split the source in
+**prefix** **suffix**. So, for example, if you have this source::
+
+ docs/ doc.man
+
+The **prefix** is "docs/" and the **suffix** is "doc.html".
+
+.. note::
+
+ Separator can be placed after a path separator or replace it. So these two
+ sources are equivalent::
+
+ docs/ doc.man
+ docs doc.man
+
+.. note::
+
+ Glob syntax is working the same way with standard source and split source.
+ So these rules::
+
+ docs/*
+ docs/ *
+ docs *
+
+ Will match all the files in the docs directory.
+
+When you use split source, the final path is computed this way::
+
+ destination + prefix
+
+So for example, if you have this setup.cfg::
+
+ [metadata]
+ name = foo
+
+ [files]
+ resources =
+ doc/ doc.man = {doc}
+
+And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final
+path will be::
+
+ {datadir}/doc/foo/doc.man
+
+
+Overwriting paths for categories
+""""""""""""""""""""""""""""""""
+
+This part is intended for system administrators or downstream OS packagers.
+
+The real paths of categories are registered in the *sysconfig.cfg* file
+installed in your python installation. This file uses an ini format too.
+The content of the file is organized into several sections:
+
+* globals: Standard categories's paths.
+* posix_prefix: Standard paths for categories and installation paths for posix
+ system.
+* other ones XXX
+
+Standard categories paths are platform independent, they generally refers to
+other categories, which are platform dependent. :mod:`sysconfig` will choose
+these category from sections matching os.name. For example::
+
+ doc = {datadir}/doc/{distribution.name}
+
+It refers to datadir category, which can be different between platforms. In
+posix system, it may be::
+
+ datadir = /usr/share
+
+So the final path will be::
+
+ doc = /usr/share/doc/{distribution.name}
+
+The platform-dependent categories are:
+
+* confdir
+* datadir
+* libdir
+* base
+
+
+Defining extra categories
+"""""""""""""""""""""""""
+
+.. TODO
+
+
+Examples
+""""""""
+
+These examples are incremental but work unitarily.
+
+Resources in root dir
+'''''''''''''''''''''
+
+Source tree::
+
+ babar-1.0/
+ README
+ babar.sh
+ launch.sh
+ babar.py
+
+:file:`setup.cfg`::
+
+ [files]
+ resources =
+ README = {doc}
+ *.sh = {scripts}
+
+So babar.sh and launch.sh will be placed in {scripts} directory.
+
+Now let's move all the scripts into a scripts directory.
+
+Resources in sub-directory
+''''''''''''''''''''''''''
+
+Source tree::
+
+ babar-1.1/
+ README
+ scripts/
+ babar.sh
+ launch.sh
+ LAUNCH
+ babar.py
+
+:file:`setup.cfg`::
+
+ [files]
+ resources =
+ README = {doc}
+ scripts/ LAUNCH = {doc}
+ scripts/ *.sh = {scripts}
+
+It's important to use the separator after scripts/ to install all the shell
+scripts into {scripts} instead of {scripts}/scripts.
+
+Now let's add some docs.
+
+Resources in multiple sub-directories
+'''''''''''''''''''''''''''''''''''''
+
+Source tree::
+
+ babar-1.2/
+ README
+ scripts/
+ babar.sh
+ launch.sh
+ LAUNCH
+ docs/
+ api
+ man
+ babar.py
+
+:file:`setup.cfg`::
+
+ [files]
+ resources =
+ README = {doc}
+ scripts/ LAUNCH = {doc}
+ scripts/ *.sh = {scripts}
+ doc/ * = {doc}
+ doc/ man = {man}
+
+You want to place all the file in the docs script into {doc} category, instead
+of man, which must be placed into {man} category, we will use the order of
+declaration of globs to choose the destination, the last glob that match the
+file is used.
+
+Now let's add some scripts for windows users.
+
+Complete example
+''''''''''''''''
+
+Source tree::
+
+ babar-1.3/
+ README
+ doc/
+ api
+ man
+ scripts/
+ babar.sh
+ launch.sh
+ babar.bat
+ launch.bat
+ LAUNCH
+
+:file:`setup.cfg`::
+
+ [files]
+ resources =
+ README = {doc}
+ scripts/ LAUNCH = {doc}
+ scripts/ *.{sh,bat} = {scripts}
+ doc/ * = {doc}
+ doc/ man = {man}
+
+We use brace expansion syntax to place all the shell and batch scripts into
+{scripts} category.
+
+
+.. _setupcfg-section-extensions:
+
+Extension modules sections
+--------------------------
+
+If a project includes extension modules written in C or C++, each one of them
+needs to have its options defined in a dedicated section. Here's an example::
+
+ [files]
+ packages = coconut
+
+ [extension: coconut._fastcoconut]
+ language = cxx
+ sources = cxx_src/cononut_utils.cxx
+ cxx_src/python_module.cxx
+ include_dirs = /usr/include/gecode
+ /usr/include/blitz
+ extra_compile_args =
+ -fPIC -O2
+ -DGECODE_VERSION=$(./gecode_version) -- sys.platform != 'win32'
+ /DGECODE_VERSION=win32 -- sys.platform == 'win32'
+
+The section name must start with ``extension:``; the right-hand part is used as
+the full name (including a parent package, if any) of the extension. Whitespace
+around the extension name is allowed. If the extension module is not standalone
+(e.g. ``_bisect``) but part of a package (e.g. ``thing._speedups``), the parent
+package must be listed in the ``packages`` field.
+Valid fields and their values are listed in the documentation of the
+:class:`packaging.compiler.extension.Extension` class; values documented as
+Python lists translate to multi-line values in the configuration file. In
+addition, multi-line values accept environment markers on each line, after a
+``--``.
+
+
+.. _setupcfg-section-commands:
+
+Commands sections
+-----------------
+
+To pass options to commands without having to type them on the command line
+for each invocation, you can write them in the :file:`setup.cfg` file, in a
+section named after the command. Example::
+
+ [sdist]
+ # special function to add custom files
+ manifest-builders = package.setup.list_extra_files
+
+ [build]
+ use-2to3 = True
+
+ [build_ext]
+ inplace = on
+
+ [check]
+ strict = on
+ all = on
+
+Option values given in the configuration file can be overriden on the command
+line. See :ref:`packaging-setup-config` for more information.
+
+These sections are also used to define :ref:`command hooks
+<packaging-command-hooks>`.
+
+
+.. _setupcfg-extensibility:
+
+Extensibility
+=============
+
+Every section can have fields that are not part of this specification. They are
+called **extensions**.
+
+An extension field starts with ``X-``. Example::
+
+ [metadata]
+ name = Distribute
+ X-Debian-Name = python-distribute
+
+
+.. _setupcfg-changes:
+
+Changes in the specification
+============================
+
+The versioning scheme for this specification is **MAJOR.MINOR**. Changes in the
+specification will cause the version number to be updated.
+
+Changes to the minor number reflect backwards-compatible changes:
+
+- New fields and sections (optional or mandatory) can be added.
+- Optional fields can be removed.
+
+The major number will be incremented for backwards-incompatible changes:
+
+- Mandatory fields or sections are removed.
+- Fields change their meaning.
+
+As a consequence, a tool written to consume 1.5 has these properties:
+
+- Can read 1.1, 1.2 and all versions < 1.5, since the tool knows what
+ optional fields weren't there.
+
+ .. XXX clarify
+
+- Can also read 1.6 and other 1.x versions: The tool will just ignore fields it
+ doesn't know about, even if they are mandatory in the new version. If
+ optional fields were removed, the tool will just consider them absent.
+
+- Cannot read 2.x and should refuse to interpret such files.
+
+A tool written to produce 1.x should have these properties:
+
+- Writes all mandatory fields.
+- May write optional fields.
+
+
+.. _setupcfg-acks:
+
+Acknowledgments
+===============
+
+This specification includes work and feedback from these people:
+
+- Tarek Ziadé
+- Julien Jehannet
+- Boris Feld
+- Éric Araujo
+
+(If your name is missing, please :ref:`let us know <reporting-bugs>`.)
diff --git a/Doc/packaging/setupscript.rst b/Doc/packaging/setupscript.rst
new file mode 100644
index 0000000..cafde20
--- /dev/null
+++ b/Doc/packaging/setupscript.rst
@@ -0,0 +1,693 @@
+.. _packaging-setup-script:
+
+************************
+Writing the Setup Script
+************************
+
+The setup script is the center of all activity in building, distributing, and
+installing modules using Distutils. The main purpose of the setup script is
+to describe your module distribution to Distutils, so that the various
+commands that operate on your modules do the right thing. As we saw in section
+:ref:`packaging-simple-example`, the setup script consists mainly of a
+call to :func:`setup` where the most information is supplied as
+keyword arguments to :func:`setup`.
+
+Here's a slightly more involved example, which we'll follow for the next couple
+of sections: a setup script that could be used for Packaging itself::
+
+ #!/usr/bin/env python
+
+ from packaging.core import setup, find_packages
+
+ setup(name='Packaging',
+ version='1.0',
+ summary='Python Distribution Utilities',
+ keywords=['packaging', 'packaging'],
+ author=u'Tarek Ziadé',
+ author_email='tarek@ziade.org',
+ home_page='http://bitbucket.org/tarek/packaging/wiki/Home',
+ license='PSF',
+ packages=find_packages())
+
+
+There are only two differences between this and the trivial one-file
+distribution presented in section :ref:`packaging-simple-example`: more
+metadata and the specification of pure Python modules by package rather than
+by module. This is important since Ristutils consist of a couple of dozen
+modules split into (so far) two packages; an explicit list of every module
+would be tedious to generate and difficult to maintain. For more information
+on the additional metadata, see section :ref:`packaging-metadata`.
+
+Note that any pathnames (files or directories) supplied in the setup script
+should be written using the Unix convention, i.e. slash-separated. The
+Distutils will take care of converting this platform-neutral representation into
+whatever is appropriate on your current platform before actually using the
+pathname. This makes your setup script portable across operating systems, which
+of course is one of the major goals of the Distutils. In this spirit, all
+pathnames in this document are slash-separated.
+
+This, of course, only applies to pathnames given to Distutils functions. If
+you, for example, use standard Python functions such as :func:`glob.glob` or
+:func:`os.listdir` to specify files, you should be careful to write portable
+code instead of hardcoding path separators::
+
+ glob.glob(os.path.join('mydir', 'subdir', '*.html'))
+ os.listdir(os.path.join('mydir', 'subdir'))
+
+
+.. _packaging-listing-packages:
+
+Listing whole packages
+======================
+
+The :option:`packages` option tells the Distutils to process (build, distribute,
+install, etc.) all pure Python modules found in each package mentioned in the
+:option:`packages` list. In order to do this, of course, there has to be a
+correspondence between package names and directories in the filesystem. The
+default correspondence is the most obvious one, i.e. package :mod:`packaging` is
+found in the directory :file:`packaging` relative to the distribution root.
+Thus, when you say ``packages = ['foo']`` in your setup script, you are
+promising that the Distutils will find a file :file:`foo/__init__.py` (which
+might be spelled differently on your system, but you get the idea) relative to
+the directory where your setup script lives. If you break this promise, the
+Distutils will issue a warning but still process the broken package anyway.
+
+If you use a different convention to lay out your source directory, that's no
+problem: you just have to supply the :option:`package_dir` option to tell the
+Distutils about your convention. For example, say you keep all Python source
+under :file:`lib`, so that modules in the "root package" (i.e., not in any
+package at all) are in :file:`lib`, modules in the :mod:`foo` package are in
+:file:`lib/foo`, and so forth. Then you would put ::
+
+ package_dir = {'': 'lib'}
+
+in your setup script. The keys to this dictionary are package names, and an
+empty package name stands for the root package. The values are directory names
+relative to your distribution root. In this case, when you say ``packages =
+['foo']``, you are promising that the file :file:`lib/foo/__init__.py` exists.
+
+Another possible convention is to put the :mod:`foo` package right in
+:file:`lib`, the :mod:`foo.bar` package in :file:`lib/bar`, etc. This would be
+written in the setup script as ::
+
+ package_dir = {'foo': 'lib'}
+
+A ``package: dir`` entry in the :option:`package_dir` dictionary implicitly
+applies to all packages below *package*, so the :mod:`foo.bar` case is
+automatically handled here. In this example, having ``packages = ['foo',
+'foo.bar']`` tells the Distutils to look for :file:`lib/__init__.py` and
+:file:`lib/bar/__init__.py`. (Keep in mind that although :option:`package_dir`
+applies recursively, you must explicitly list all packages in
+:option:`packages`: the Distutils will *not* recursively scan your source tree
+looking for any directory with an :file:`__init__.py` file.)
+
+
+.. _packaging-listing-modules:
+
+Listing individual modules
+==========================
+
+For a small module distribution, you might prefer to list all modules rather
+than listing packages---especially the case of a single module that goes in the
+"root package" (i.e., no package at all). This simplest case was shown in
+section :ref:`packaging-simple-example`; here is a slightly more involved
+example::
+
+ py_modules = ['mod1', 'pkg.mod2']
+
+This describes two modules, one of them in the "root" package, the other in the
+:mod:`pkg` package. Again, the default package/directory layout implies that
+these two modules can be found in :file:`mod1.py` and :file:`pkg/mod2.py`, and
+that :file:`pkg/__init__.py` exists as well. And again, you can override the
+package/directory correspondence using the :option:`package_dir` option.
+
+
+.. _packaging-describing-extensions:
+
+Describing extension modules
+============================
+
+Just as writing Python extension modules is a bit more complicated than writing
+pure Python modules, describing them to the Distutils is a bit more complicated.
+Unlike pure modules, it's not enough just to list modules or packages and expect
+the Distutils to go out and find the right files; you have to specify the
+extension name, source file(s), and any compile/link requirements (include
+directories, libraries to link with, etc.).
+
+.. XXX read over this section
+
+All of this is done through another keyword argument to :func:`setup`, the
+:option:`ext_modules` option. :option:`ext_modules` is just a list of
+:class:`Extension` instances, each of which describes a single extension module.
+Suppose your distribution includes a single extension, called :mod:`foo` and
+implemented by :file:`foo.c`. If no additional instructions to the
+compiler/linker are needed, describing this extension is quite simple::
+
+ Extension('foo', ['foo.c'])
+
+The :class:`Extension` class can be imported from :mod:`packaging.core` along
+with :func:`setup`. Thus, the setup script for a module distribution that
+contains only this one extension and nothing else might be::
+
+ from packaging.core import setup, Extension
+ setup(name='foo',
+ version='1.0',
+ ext_modules=[Extension('foo', ['foo.c'])])
+
+The :class:`Extension` class (actually, the underlying extension-building
+machinery implemented by the :command:`build_ext` command) supports a great deal
+of flexibility in describing Python extensions, which is explained in the
+following sections.
+
+
+Extension names and packages
+----------------------------
+
+The first argument to the :class:`Extension` constructor is always the name of
+the extension, including any package names. For example, ::
+
+ Extension('foo', ['src/foo1.c', 'src/foo2.c'])
+
+describes an extension that lives in the root package, while ::
+
+ Extension('pkg.foo', ['src/foo1.c', 'src/foo2.c'])
+
+describes the same extension in the :mod:`pkg` package. The source files and
+resulting object code are identical in both cases; the only difference is where
+in the filesystem (and therefore where in Python's namespace hierarchy) the
+resulting extension lives.
+
+If your distribution contains only one or more extension modules in a package,
+you need to create a :file:`{package}/__init__.py` file anyway, otherwise Python
+won't be able to import anything.
+
+If you have a number of extensions all in the same package (or all under the
+same base package), use the :option:`ext_package` keyword argument to
+:func:`setup`. For example, ::
+
+ setup(...,
+ ext_package='pkg',
+ ext_modules=[Extension('foo', ['foo.c']),
+ Extension('subpkg.bar', ['bar.c'])])
+
+will compile :file:`foo.c` to the extension :mod:`pkg.foo`, and :file:`bar.c` to
+:mod:`pkg.subpkg.bar`.
+
+
+Extension source files
+----------------------
+
+The second argument to the :class:`Extension` constructor is a list of source
+files. Since the Distutils currently only support C, C++, and Objective-C
+extensions, these are normally C/C++/Objective-C source files. (Be sure to use
+appropriate extensions to distinguish C++\ source files: :file:`.cc` and
+:file:`.cpp` seem to be recognized by both Unix and Windows compilers.)
+
+However, you can also include SWIG interface (:file:`.i`) files in the list; the
+:command:`build_ext` command knows how to deal with SWIG extensions: it will run
+SWIG on the interface file and compile the resulting C/C++ file into your
+extension.
+
+.. XXX SWIG support is rough around the edges and largely untested!
+
+This warning notwithstanding, options to SWIG can be currently passed like
+this::
+
+ setup(...,
+ ext_modules=[Extension('_foo', ['foo.i'],
+ swig_opts=['-modern', '-I../include'])],
+ py_modules=['foo'])
+
+Or on the command line like this::
+
+ > python setup.py build_ext --swig-opts="-modern -I../include"
+
+On some platforms, you can include non-source files that are processed by the
+compiler and included in your extension. Currently, this just means Windows
+message text (:file:`.mc`) files and resource definition (:file:`.rc`) files for
+Visual C++. These will be compiled to binary resource (:file:`.res`) files and
+linked into the executable.
+
+
+Preprocessor options
+--------------------
+
+Three optional arguments to :class:`Extension` will help if you need to specify
+include directories to search or preprocessor macros to define/undefine:
+``include_dirs``, ``define_macros``, and ``undef_macros``.
+
+For example, if your extension requires header files in the :file:`include`
+directory under your distribution root, use the ``include_dirs`` option::
+
+ Extension('foo', ['foo.c'], include_dirs=['include'])
+
+You can specify absolute directories there; if you know that your extension will
+only be built on Unix systems with X11R6 installed to :file:`/usr`, you can get
+away with ::
+
+ Extension('foo', ['foo.c'], include_dirs=['/usr/include/X11'])
+
+You should avoid this sort of non-portable usage if you plan to distribute your
+code: it's probably better to write C code like ::
+
+ #include <X11/Xlib.h>
+
+If you need to include header files from some other Python extension, you can
+take advantage of the fact that header files are installed in a consistent way
+by the Distutils :command:`install_header` command. For example, the Numerical
+Python header files are installed (on a standard Unix installation) to
+:file:`/usr/local/include/python1.5/Numerical`. (The exact location will differ
+according to your platform and Python installation.) Since the Python include
+directory---\ :file:`/usr/local/include/python1.5` in this case---is always
+included in the search path when building Python extensions, the best approach
+is to write C code like ::
+
+ #include <Numerical/arrayobject.h>
+
+.. TODO check if it's d2.sysconfig or the new sysconfig module now
+
+If you must put the :file:`Numerical` include directory right into your header
+search path, though, you can find that directory using the Distutils
+:mod:`packaging.sysconfig` module::
+
+ from packaging.sysconfig import get_python_inc
+ incdir = os.path.join(get_python_inc(plat_specific=1), 'Numerical')
+ setup(...,
+ Extension(..., include_dirs=[incdir]))
+
+Even though this is quite portable---it will work on any Python installation,
+regardless of platform---it's probably easier to just write your C code in the
+sensible way.
+
+You can define and undefine preprocessor macros with the ``define_macros`` and
+``undef_macros`` options. ``define_macros`` takes a list of ``(name, value)``
+tuples, where ``name`` is the name of the macro to define (a string) and
+``value`` is its value: either a string or ``None``. (Defining a macro ``FOO``
+to ``None`` is the equivalent of a bare ``#define FOO`` in your C source: with
+most compilers, this sets ``FOO`` to the string ``1``.) ``undef_macros`` is
+just a list of macros to undefine.
+
+For example::
+
+ Extension(...,
+ define_macros=[('NDEBUG', '1'),
+ ('HAVE_STRFTIME', None)],
+ undef_macros=['HAVE_FOO', 'HAVE_BAR'])
+
+is the equivalent of having this at the top of every C source file::
+
+ #define NDEBUG 1
+ #define HAVE_STRFTIME
+ #undef HAVE_FOO
+ #undef HAVE_BAR
+
+
+Library options
+---------------
+
+You can also specify the libraries to link against when building your extension,
+and the directories to search for those libraries. The ``libraries`` option is
+a list of libraries to link against, ``library_dirs`` is a list of directories
+to search for libraries at link-time, and ``runtime_library_dirs`` is a list of
+directories to search for shared (dynamically loaded) libraries at run-time.
+
+For example, if you need to link against libraries known to be in the standard
+library search path on target systems ::
+
+ Extension(...,
+ libraries=['gdbm', 'readline'])
+
+If you need to link with libraries in a non-standard location, you'll have to
+include the location in ``library_dirs``::
+
+ Extension(...,
+ library_dirs=['/usr/X11R6/lib'],
+ libraries=['X11', 'Xt'])
+
+(Again, this sort of non-portable construct should be avoided if you intend to
+distribute your code.)
+
+.. XXX Should mention clib libraries here or somewhere else!
+
+
+Other options
+-------------
+
+There are still some other options which can be used to handle special cases.
+
+The :option:`optional` option is a boolean; if it is true,
+a build failure in the extension will not abort the build process, but
+instead simply not install the failing extension.
+
+The :option:`extra_objects` option is a list of object files to be passed to the
+linker. These files must not have extensions, as the default extension for the
+compiler is used.
+
+:option:`extra_compile_args` and :option:`extra_link_args` can be used to
+specify additional command-line options for the respective compiler and linker
+command lines.
+
+:option:`export_symbols` is only useful on Windows. It can contain a list of
+symbols (functions or variables) to be exported. This option is not needed when
+building compiled extensions: Distutils will automatically add ``initmodule``
+to the list of exported symbols.
+
+The :option:`depends` option is a list of files that the extension depends on
+(for example header files). The build command will call the compiler on the
+sources to rebuild extension if any on this files has been modified since the
+previous build.
+
+Relationships between Distributions and Packages
+================================================
+
+.. FIXME rewrite to update to PEP 345 (but without dist/release confusion)
+
+A distribution may relate to packages in three specific ways:
+
+#. It can require packages or modules.
+
+#. It can provide packages or modules.
+
+#. It can obsolete packages or modules.
+
+These relationships can be specified using keyword arguments to the
+:func:`packaging.core.setup` function.
+
+Dependencies on other Python modules and packages can be specified by supplying
+the *requires* keyword argument to :func:`setup`. The value must be a list of
+strings. Each string specifies a package that is required, and optionally what
+versions are sufficient.
+
+To specify that any version of a module or package is required, the string
+should consist entirely of the module or package name. Examples include
+``'mymodule'`` and ``'xml.parsers.expat'``.
+
+If specific versions are required, a sequence of qualifiers can be supplied in
+parentheses. Each qualifier may consist of a comparison operator and a version
+number. The accepted comparison operators are::
+
+ < > ==
+ <= >= !=
+
+These can be combined by using multiple qualifiers separated by commas (and
+optional whitespace). In this case, all of the qualifiers must be matched; a
+logical AND is used to combine the evaluations.
+
+Let's look at a bunch of examples:
+
++-------------------------+----------------------------------------------+
+| Requires Expression | Explanation |
++=========================+==============================================+
+| ``==1.0`` | Only version ``1.0`` is compatible |
++-------------------------+----------------------------------------------+
+| ``>1.0, !=1.5.1, <2.0`` | Any version after ``1.0`` and before ``2.0`` |
+| | is compatible, except ``1.5.1`` |
++-------------------------+----------------------------------------------+
+
+Now that we can specify dependencies, we also need to be able to specify what we
+provide that other distributions can require. This is done using the *provides*
+keyword argument to :func:`setup`. The value for this keyword is a list of
+strings, each of which names a Python module or package, and optionally
+identifies the version. If the version is not specified, it is assumed to match
+that of the distribution.
+
+Some examples:
+
++---------------------+----------------------------------------------+
+| Provides Expression | Explanation |
++=====================+==============================================+
+| ``mypkg`` | Provide ``mypkg``, using the distribution |
+| | version |
++---------------------+----------------------------------------------+
+| ``mypkg (1.1)`` | Provide ``mypkg`` version 1.1, regardless of |
+| | the distribution version |
++---------------------+----------------------------------------------+
+
+A package can declare that it obsoletes other packages using the *obsoletes*
+keyword argument. The value for this is similar to that of the *requires*
+keyword: a list of strings giving module or package specifiers. Each specifier
+consists of a module or package name optionally followed by one or more version
+qualifiers. Version qualifiers are given in parentheses after the module or
+package name.
+
+The versions identified by the qualifiers are those that are obsoleted by the
+distribution being described. If no qualifiers are given, all versions of the
+named module or package are understood to be obsoleted.
+
+.. _packaging-installing-scripts:
+
+Installing Scripts
+==================
+
+So far we have been dealing with pure and non-pure Python modules, which are
+usually not run by themselves but imported by scripts.
+
+Scripts are files containing Python source code, intended to be started from the
+command line. Scripts don't require Distutils to do anything very complicated.
+The only clever feature is that if the first line of the script starts with
+``#!`` and contains the word "python", the Distutils will adjust the first line
+to refer to the current interpreter location. By default, it is replaced with
+the current interpreter location. The :option:`--executable` (or :option:`-e`)
+option will allow the interpreter path to be explicitly overridden.
+
+The :option:`scripts` option simply is a list of files to be handled in this
+way. From the PyXML setup script::
+
+ setup(...,
+ scripts=['scripts/xmlproc_parse', 'scripts/xmlproc_val'])
+
+All the scripts will also be added to the ``MANIFEST`` file if no template is
+provided. See :ref:`packaging-manifest`.
+
+.. _packaging-installing-package-data:
+
+Installing Package Data
+=======================
+
+Often, additional files need to be installed into a package. These files are
+often data that's closely related to the package's implementation, or text files
+containing documentation that might be of interest to programmers using the
+package. These files are called :dfn:`package data`.
+
+Package data can be added to packages using the ``package_data`` keyword
+argument to the :func:`setup` function. The value must be a mapping from
+package name to a list of relative path names that should be copied into the
+package. The paths are interpreted as relative to the directory containing the
+package (information from the ``package_dir`` mapping is used if appropriate);
+that is, the files are expected to be part of the package in the source
+directories. They may contain glob patterns as well.
+
+The path names may contain directory portions; any necessary directories will be
+created in the installation.
+
+For example, if a package should contain a subdirectory with several data files,
+the files can be arranged like this in the source tree::
+
+ setup.py
+ src/
+ mypkg/
+ __init__.py
+ module.py
+ data/
+ tables.dat
+ spoons.dat
+ forks.dat
+
+The corresponding call to :func:`setup` might be::
+
+ setup(...,
+ packages=['mypkg'],
+ package_dir={'mypkg': 'src/mypkg'},
+ package_data={'mypkg': ['data/*.dat']})
+
+
+All the files that match ``package_data`` will be added to the ``MANIFEST``
+file if no template is provided. See :ref:`packaging-manifest`.
+
+
+.. _packaging-additional-files:
+
+Installing Additional Files
+===========================
+
+The :option:`data_files` option can be used to specify additional files needed
+by the module distribution: configuration files, message catalogs, data files,
+anything which doesn't fit in the previous categories.
+
+:option:`data_files` specifies a sequence of (*directory*, *files*) pairs in the
+following way::
+
+ setup(...,
+ data_files=[('bitmaps', ['bm/b1.gif', 'bm/b2.gif']),
+ ('config', ['cfg/data.cfg']),
+ ('/etc/init.d', ['init-script'])])
+
+Note that you can specify the directory names where the data files will be
+installed, but you cannot rename the data files themselves.
+
+Each (*directory*, *files*) pair in the sequence specifies the installation
+directory and the files to install there. If *directory* is a relative path, it
+is interpreted relative to the installation prefix (Python's ``sys.prefix`` for
+pure-Python packages, ``sys.exec_prefix`` for packages that contain extension
+modules). Each file name in *files* is interpreted relative to the
+:file:`setup.py` script at the top of the package source distribution. No
+directory information from *files* is used to determine the final location of
+the installed file; only the name of the file is used.
+
+You can specify the :option:`data_files` options as a simple sequence of files
+without specifying a target directory, but this is not recommended, and the
+:command:`install_dist` command will print a warning in this case. To install data
+files directly in the target directory, an empty string should be given as the
+directory.
+
+All the files that match ``data_files`` will be added to the ``MANIFEST`` file
+if no template is provided. See :ref:`packaging-manifest`.
+
+
+
+.. _packaging-metadata:
+
+Metadata reference
+==================
+
+The setup script may include additional metadata beyond the name and version.
+This table describes required and additional information:
+
+.. TODO synchronize with setupcfg; link to it (but don't remove it, it's a
+ useful summary)
+
++----------------------+---------------------------+-----------------+--------+
+| Meta-Data | Description | Value | Notes |
++======================+===========================+=================+========+
+| ``name`` | name of the project | short string | \(1) |
++----------------------+---------------------------+-----------------+--------+
+| ``version`` | version of this release | short string | (1)(2) |
++----------------------+---------------------------+-----------------+--------+
+| ``author`` | project author's name | short string | \(3) |
++----------------------+---------------------------+-----------------+--------+
+| ``author_email`` | email address of the | email address | \(3) |
+| | project author | | |
++----------------------+---------------------------+-----------------+--------+
+| ``maintainer`` | project maintainer's name | short string | \(3) |
++----------------------+---------------------------+-----------------+--------+
+| ``maintainer_email`` | email address of the | email address | \(3) |
+| | project maintainer | | |
++----------------------+---------------------------+-----------------+--------+
+| ``home_page`` | home page for the project | URL | \(1) |
++----------------------+---------------------------+-----------------+--------+
+| ``summary`` | short description of the | short string | |
+| | project | | |
++----------------------+---------------------------+-----------------+--------+
+| ``description`` | longer description of the | long string | \(5) |
+| | project | | |
++----------------------+---------------------------+-----------------+--------+
+| ``download_url`` | location where the | URL | |
+| | project may be downloaded | | |
++----------------------+---------------------------+-----------------+--------+
+| ``classifiers`` | a list of classifiers | list of strings | \(4) |
++----------------------+---------------------------+-----------------+--------+
+| ``platforms`` | a list of platforms | list of strings | |
++----------------------+---------------------------+-----------------+--------+
+| ``license`` | license for the release | short string | \(6) |
++----------------------+---------------------------+-----------------+--------+
+
+Notes:
+
+(1)
+ These fields are required.
+
+(2)
+ It is recommended that versions take the form *major.minor[.patch[.sub]]*.
+
+(3)
+ Either the author or the maintainer must be identified.
+
+(4)
+ The list of classifiers is available from the `PyPI website
+ <http://pypi.python.org/pypi>`_. See also :mod:`packaging.create`.
+
+(5)
+ The ``description`` field is used by PyPI when you are registering a
+ release, to build its PyPI page.
+
+(6)
+ The ``license`` field is a text indicating the license covering the
+ distribution where the license is not a selection from the "License" Trove
+ classifiers. See the ``Classifier`` field. Notice that
+ there's a ``licence`` distribution option which is deprecated but still
+ acts as an alias for ``license``.
+
+'short string'
+ A single line of text, not more than 200 characters.
+
+'long string'
+ Multiple lines of plain text in reStructuredText format (see
+ http://docutils.sf.net/).
+
+'list of strings'
+ See below.
+
+In Python 2.x, "string value" means a unicode object. If a byte string (str or
+bytes) is given, it has to be valid ASCII.
+
+.. TODO move this section to the version document, keep a summary, add a link
+
+Encoding the version information is an art in itself. Python projects generally
+adhere to the version format *major.minor[.patch][sub]*. The major number is 0
+for initial, experimental releases of software. It is incremented for releases
+that represent major milestones in a project. The minor number is incremented
+when important new features are added to the project. The patch number
+increments when bug-fix releases are made. Additional trailing version
+information is sometimes used to indicate sub-releases. These are
+"a1,a2,...,aN" (for alpha releases, where functionality and API may change),
+"b1,b2,...,bN" (for beta releases, which only fix bugs) and "pr1,pr2,...,prN"
+(for final pre-release release testing). Some examples:
+
+0.1.0
+ the first, experimental release of a project
+
+1.0.1a2
+ the second alpha release of the first patch version of 1.0
+
+:option:`classifiers` are specified in a Python list::
+
+ setup(...,
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Environment :: Console',
+ 'Environment :: Web Environment',
+ 'Intended Audience :: End Users/Desktop',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'License :: OSI Approved :: Python Software Foundation License',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: POSIX',
+ 'Programming Language :: Python',
+ 'Topic :: Communications :: Email',
+ 'Topic :: Office/Business',
+ 'Topic :: Software Development :: Bug Tracking',
+ ])
+
+
+Debugging the setup script
+==========================
+
+Sometimes things go wrong, and the setup script doesn't do what the developer
+wants.
+
+Distutils catches any exceptions when running the setup script, and print a
+simple error message before the script is terminated. The motivation for this
+behaviour is to not confuse administrators who don't know much about Python and
+are trying to install a project. If they get a big long traceback from deep
+inside the guts of Distutils, they may think the project or the Python
+installation is broken because they don't read all the way down to the bottom
+and see that it's a permission problem.
+
+.. FIXME DISTUTILS_DEBUG is dead, document logging/warnings here
+
+On the other hand, this doesn't help the developer to find the cause of the
+failure. For this purpose, the DISTUTILS_DEBUG environment variable can be set
+to anything except an empty string, and Packaging will now print detailed
+information about what it is doing, and prints the full traceback in case an
+exception occurs.
diff --git a/Doc/packaging/sourcedist.rst b/Doc/packaging/sourcedist.rst
new file mode 100644
index 0000000..2cedc15
--- /dev/null
+++ b/Doc/packaging/sourcedist.rst
@@ -0,0 +1,266 @@
+.. _packaging-source-dist:
+
+******************************
+Creating a Source Distribution
+******************************
+
+As shown in section :ref:`packaging-simple-example`, you use the :command:`sdist` command
+to create a source distribution. In the simplest case, ::
+
+ python setup.py sdist
+
+(assuming you haven't specified any :command:`sdist` options in the setup script
+or config file), :command:`sdist` creates the archive of the default format for
+the current platform. The default format is a gzip'ed tar file
+(:file:`.tar.gz`) on Unix, and ZIP file on Windows.
+
+You can specify as many formats as you like using the :option:`--formats`
+option, for example::
+
+ python setup.py sdist --formats=gztar,zip
+
+to create a gzipped tarball and a zip file. The available formats are:
+
++-----------+-------------------------+---------+
+| Format | Description | Notes |
++===========+=========================+=========+
+| ``zip`` | zip file (:file:`.zip`) | (1),(3) |
++-----------+-------------------------+---------+
+| ``gztar`` | gzip'ed tar file | \(2) |
+| | (:file:`.tar.gz`) | |
++-----------+-------------------------+---------+
+| ``bztar`` | bzip2'ed tar file | |
+| | (:file:`.tar.bz2`) | |
++-----------+-------------------------+---------+
+| ``tar`` | tar file (:file:`.tar`) | |
++-----------+-------------------------+---------+
+
+Notes:
+
+(1)
+ default on Windows
+
+(2)
+ default on Unix
+
+(3)
+ requires either external :program:`zip` utility or :mod:`zipfile` module (part
+ of the standard Python library since Python 1.6)
+
+When using any ``tar`` format (``gztar``, ``bztar`` or
+``tar``) under Unix, you can specify the ``owner`` and ``group`` names
+that will be set for each member of the archive.
+
+For example, if you want all files of the archive to be owned by root::
+
+ python setup.py sdist --owner=root --group=root
+
+
+.. _packaging-manifest:
+
+Specifying the files to distribute
+==================================
+
+If you don't supply an explicit list of files (or instructions on how to
+generate one), the :command:`sdist` command puts a minimal default set into the
+source distribution:
+
+* all Python source files implied by the :option:`py_modules` and
+ :option:`packages` options
+
+* all C source files mentioned in the :option:`ext_modules` or
+ :option:`libraries` options
+
+* scripts identified by the :option:`scripts` option
+ See :ref:`packaging-installing-scripts`.
+
+* anything that looks like a test script: :file:`test/test\*.py` (currently, the
+ Packaging don't do anything with test scripts except include them in source
+ distributions, but in the future there will be a standard for testing Python
+ module distributions)
+
+* the configuration file :file:`setup.cfg`
+
+* all files that matches the ``package_data`` metadata.
+ See :ref:`packaging-installing-package-data`.
+
+* all files that matches the ``data_files`` metadata.
+ See :ref:`packaging-additional-files`.
+
+Contrary to Distutils, :file:`README` (or :file:`README.txt`) and
+:file:`setup.py` are not included by default.
+
+Sometimes this is enough, but usually you will want to specify additional files
+to distribute. The typical way to do this is to write a *manifest template*,
+called :file:`MANIFEST.in` by default. The manifest template is just a list of
+instructions for how to generate your manifest file, :file:`MANIFEST`, which is
+the exact list of files to include in your source distribution. The
+:command:`sdist` command processes this template and generates a manifest based
+on its instructions and what it finds in the filesystem.
+
+If you prefer to roll your own manifest file, the format is simple: one filename
+per line, regular files (or symlinks to them) only. If you do supply your own
+:file:`MANIFEST`, you must specify everything: the default set of files
+described above does not apply in this case.
+
+:file:`MANIFEST` files start with a comment indicating they are generated.
+Files without this comment are not overwritten or removed.
+
+See :ref:`packaging-manifest-template` section for a syntax reference.
+
+
+.. _packaging-manifest-options:
+
+Manifest-related options
+========================
+
+The normal course of operations for the :command:`sdist` command is as follows:
+
+* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in`
+ and create the manifest
+
+* if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest
+ with just the default file set
+
+* if either :file:`MANIFEST.in` or the setup script (:file:`setup.py`) are more
+ recent than :file:`MANIFEST`, recreate :file:`MANIFEST` by reading
+ :file:`MANIFEST.in`
+
+* use the list of files now in :file:`MANIFEST` (either just generated or read
+ in) to create the source distribution archive(s)
+
+There are a couple of options that modify this behaviour. First, use the
+:option:`--no-defaults` and :option:`--no-prune` to disable the standard
+"include" and "exclude" sets.
+
+Second, you might just want to (re)generate the manifest, but not create a
+source distribution::
+
+ python setup.py sdist --manifest-only
+
+:option:`-o` is a shortcut for :option:`--manifest-only`.
+
+
+.. _packaging-manifest-template:
+
+The MANIFEST.in template
+========================
+
+A :file:`MANIFEST.in` file can be added in a project to define the list of
+files to include in the distribution built by the :command:`sdist` command.
+
+When :command:`sdist` is run, it will look for the :file:`MANIFEST.in` file
+and interpret it to generate the :file:`MANIFEST` file that contains the
+list of files that will be included in the package.
+
+This mechanism can be used when the default list of files is not enough.
+(See :ref:`packaging-manifest`).
+
+Principle
+---------
+
+The manifest template has one command per line, where each command specifies a
+set of files to include or exclude from the source distribution. For an
+example, let's look at the Packaging' own manifest template::
+
+ include *.txt
+ recursive-include examples *.txt *.py
+ prune examples/sample?/build
+
+The meanings should be fairly clear: include all files in the distribution root
+matching :file:`\*.txt`, all files anywhere under the :file:`examples` directory
+matching :file:`\*.txt` or :file:`\*.py`, and exclude all directories matching
+:file:`examples/sample?/build`. All of this is done *after* the standard
+include set, so you can exclude files from the standard set with explicit
+instructions in the manifest template. (Or, you can use the
+:option:`--no-defaults` option to disable the standard set entirely.)
+
+The order of commands in the manifest template matters: initially, we have the
+list of default files as described above, and each command in the template adds
+to or removes from that list of files. Once we have fully processed the
+manifest template, we remove files that should not be included in the source
+distribution:
+
+* all files in the Packaging "build" tree (default :file:`build/`)
+
+* all files in directories named :file:`RCS`, :file:`CVS`, :file:`.svn`,
+ :file:`.hg`, :file:`.git`, :file:`.bzr` or :file:`_darcs`
+
+Now we have our complete list of files, which is written to the manifest for
+future reference, and then used to build the source distribution archive(s).
+
+You can disable the default set of included files with the
+:option:`--no-defaults` option, and you can disable the standard exclude set
+with :option:`--no-prune`.
+
+Following the Packaging' own manifest template, let's trace how the
+:command:`sdist` command builds the list of files to include in the Packaging
+source distribution:
+
+#. include all Python source files in the :file:`packaging` and
+ :file:`packaging/command` subdirectories (because packages corresponding to
+ those two directories were mentioned in the :option:`packages` option in the
+ setup script---see section :ref:`packaging-setup-script`)
+
+#. include :file:`README.txt`, :file:`setup.py`, and :file:`setup.cfg` (standard
+ files)
+
+#. include :file:`test/test\*.py` (standard files)
+
+#. include :file:`\*.txt` in the distribution root (this will find
+ :file:`README.txt` a second time, but such redundancies are weeded out later)
+
+#. include anything matching :file:`\*.txt` or :file:`\*.py` in the sub-tree
+ under :file:`examples`,
+
+#. exclude all files in the sub-trees starting at directories matching
+ :file:`examples/sample?/build`\ ---this may exclude files included by the
+ previous two steps, so it's important that the ``prune`` command in the manifest
+ template comes after the ``recursive-include`` command
+
+#. exclude the entire :file:`build` tree, and any :file:`RCS`, :file:`CVS`,
+ :file:`.svn`, :file:`.hg`, :file:`.git`, :file:`.bzr` and :file:`_darcs`
+ directories
+
+Just like in the setup script, file and directory names in the manifest template
+should always be slash-separated; the Packaging will take care of converting
+them to the standard representation on your platform. That way, the manifest
+template is portable across operating systems.
+
+Commands
+--------
+
+The manifest template commands are:
+
++-------------------------------------------+-----------------------------------------------+
+| Command | Description |
++===========================================+===============================================+
+| :command:`include pat1 pat2 ...` | include all files matching any of the listed |
+| | patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`exclude pat1 pat2 ...` | exclude all files matching any of the listed |
+| | patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`recursive-include dir pat1 pat2 | include all files under *dir* matching any of |
+| ...` | the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`recursive-exclude dir pat1 pat2 | exclude all files under *dir* matching any of |
+| ...` | the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`global-include pat1 pat2 ...` | include all files anywhere in the source tree |
+| | matching --- & any of the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`global-exclude pat1 pat2 ...` | exclude all files anywhere in the source tree |
+| | matching --- & any of the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`prune dir` | exclude all files under *dir* |
++-------------------------------------------+-----------------------------------------------+
+| :command:`graft dir` | include all files under *dir* |
++-------------------------------------------+-----------------------------------------------+
+
+The patterns here are Unix-style "glob" patterns: ``*`` matches any sequence of
+regular filename characters, ``?`` matches any single regular filename
+character, and ``[range]`` matches any of the characters in *range* (e.g.,
+``a-z``, ``a-zA-Z``, ``a-f0-9_.``). The definition of "regular filename
+character" is platform-specific: on Unix it is anything except slash; on Windows
+anything except backslash or colon.
diff --git a/Doc/packaging/tutorial.rst b/Doc/packaging/tutorial.rst
new file mode 100644
index 0000000..04f41e5
--- /dev/null
+++ b/Doc/packaging/tutorial.rst
@@ -0,0 +1,112 @@
+==================
+Packaging tutorial
+==================
+
+Welcome to the Packaging tutorial! We will learn how to use Packaging
+to package your project.
+
+.. TODO merge with introduction.rst
+
+
+Getting started
+---------------
+
+Packaging works with the *setup.cfg* file. It contains all the metadata for
+your project, as defined in PEP 345, but also declare what your project
+contains.
+
+Let's say you have a project called *CLVault* containing one package called
+*clvault*, and a few scripts inside. You can use the *pysetup* script to create
+a *setup.cfg* file for the project. The script will ask you a few questions::
+
+ $ mkdir CLVault
+ $ cd CLVault
+ $ pysetup create
+ Project name [CLVault]:
+ Current version number: 0.1
+ Package description:
+ >Command-line utility to store and retrieve passwords
+ Author name: Tarek Ziade
+ Author e-mail address: tarek@ziade.org
+ Project Home Page: http://bitbucket.org/tarek/clvault
+ Do you want to add a package ? (y/n): y
+ Package name: clvault
+ Do you want to add a package ? (y/n): n
+ Do you want to set Trove classifiers? (y/n): y
+ Please select the project status:
+
+ 1 - Planning
+ 2 - Pre-Alpha
+ 3 - Alpha
+ 4 - Beta
+ 5 - Production/Stable
+ 6 - Mature
+ 7 - Inactive
+
+ Status: 3
+ What license do you use: GPL
+ Matching licenses:
+
+ 1) License :: OSI Approved :: GNU General Public License (GPL)
+ 2) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
+
+ Type the number of the license you wish to use or ? to try again:: 1
+ Do you want to set other trove identifiers (y/n) [n]: n
+ Wrote "setup.cfg".
+
+
+A setup.cfg file is created, containing the metadata of your project and the
+list of the packages it contains::
+
+ $ cat setup.cfg
+ [metadata]
+ name = CLVault
+ version = 0.1
+ author = Tarek Ziade
+ author_email = tarek@ziade.org
+ description = Command-line utility to store and retrieve passwords
+ home_page = http://bitbucket.org/tarek/clvault
+
+ classifier = Development Status :: 3 - Alpha
+ License :: OSI Approved :: GNU General Public License (GPL)
+
+ [files]
+ packages = clvault
+
+
+Our project will depend on the *keyring* project. Let's add it in the
+[metadata] section::
+
+ [metadata]
+ ...
+ requires_dist =
+ keyring
+
+
+Running commands
+----------------
+
+You can run useful commands on your project once the setup.cfg file is ready:
+
+- sdist: creates a source distribution
+- register: register your project to PyPI
+- upload: upload the distribution to PyPI
+- install_dist: install it
+
+All commands are run using the run script::
+
+ $ pysetup run install_dist
+ $ pysetup run sdist
+ $ pysetup run upload
+
+If you want to push a source distribution of your project to PyPI, do::
+
+ $ pysetup run sdist register upload
+
+
+Installing the project
+----------------------
+
+The project can be installed by manually running the packaging install command::
+
+ $ pysetup run install_dist
diff --git a/Doc/packaging/uploading.rst b/Doc/packaging/uploading.rst
new file mode 100644
index 0000000..297518b
--- /dev/null
+++ b/Doc/packaging/uploading.rst
@@ -0,0 +1,80 @@
+.. _packaging-package-upload:
+
+***************************************
+Uploading Packages to the Package Index
+***************************************
+
+The Python Package Index (PyPI) not only stores the package info, but also the
+package data if the author of the package wishes to. The packaging command
+:command:`upload` pushes the distribution files to PyPI.
+
+The command is invoked immediately after building one or more distribution
+files. For example, the command ::
+
+ python setup.py sdist bdist_wininst upload
+
+will cause the source distribution and the Windows installer to be uploaded to
+PyPI. Note that these will be uploaded even if they are built using an earlier
+invocation of :file:`setup.py`, but that only distributions named on the command
+line for the invocation including the :command:`upload` command are uploaded.
+
+The :command:`upload` command uses the username, password, and repository URL
+from the :file:`$HOME/.pypirc` file (see section :ref:`packaging-pypirc` for more on this
+file). If a :command:`register` command was previously called in the same
+command, and if the password was entered in the prompt, :command:`upload` will
+reuse the entered password. This is useful if you do not want to store a clear
+text password in the :file:`$HOME/.pypirc` file.
+
+You can specify another PyPI server with the :option:`--repository=*url*`
+option::
+
+ python setup.py sdist bdist_wininst upload -r http://example.com/pypi
+
+See section :ref:`packaging-pypirc` for more on defining several servers.
+
+You can use the :option:`--sign` option to tell :command:`upload` to sign each
+uploaded file using GPG (GNU Privacy Guard). The :program:`gpg` program must
+be available for execution on the system :envvar:`PATH`. You can also specify
+which key to use for signing using the :option:`--identity=*name*` option.
+
+Other :command:`upload` options include :option:`--repository=<url>` or
+:option:`--repository=<section>` where *url* is the url of the server and
+*section* the name of the section in :file:`$HOME/.pypirc`, and
+:option:`--show-response` (which displays the full response text from the PyPI
+server for help in debugging upload problems).
+
+PyPI package display
+====================
+
+The ``description`` field plays a special role at PyPI. It is used by
+the server to display a home page for the registered package.
+
+If you use the `reStructuredText <http://docutils.sourceforge.net/rst.html>`_
+syntax for this field, PyPI will parse it and display an HTML output for
+the package home page.
+
+The ``description`` field can be filled from a text file located in the
+project::
+
+ from packaging.core import setup
+
+ fp = open('README.txt')
+ try:
+ description = fp.read()
+ finally:
+ fp.close()
+
+ setup(name='Packaging',
+ description=description)
+
+In that case, :file:`README.txt` is a regular reStructuredText text file located
+in the root of the package besides :file:`setup.py`.
+
+To prevent registering broken reStructuredText content, you can use the
+:program:`rst2html` program that is provided by the :mod:`docutils` package
+and check the ``description`` from the command line::
+
+ $ python setup.py --description | rst2html.py > output.html
+
+:mod:`docutils` will display a warning if there's something wrong with your
+syntax.
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 7dcd459..1f1a660 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -276,16 +276,16 @@ Sequences
single: integer
single: Unicode
- The items of a string object are Unicode code units. A Unicode code
- unit is represented by a string object of one item and can hold either
- a 16-bit or 32-bit value representing a Unicode ordinal (the maximum
- value for the ordinal is given in ``sys.maxunicode``, and depends on
- how Python is configured at compile time). Surrogate pairs may be
- present in the Unicode object, and will be reported as two separate
- items. The built-in functions :func:`chr` and :func:`ord` convert
- between code units and nonnegative integers representing the Unicode
- ordinals as defined in the Unicode Standard 3.0. Conversion from and to
- other encodings are possible through the string method :meth:`encode`.
+ A string is a sequence of values that represent Unicode codepoints.
+ All the codepoints in range ``U+0000 - U+10FFFF`` can be represented
+ in a string. Python doesn't have a :c:type:`chr` type, and
+ every character in the string is represented as a string object
+ with length ``1``. The built-in function :func:`ord` converts a
+ character to its codepoint (as an integer); :func:`chr` converts
+ an integer in range ``0 - 10FFFF`` to the corresponding character.
+ :meth:`str.encode` can be used to convert a :class:`str` to
+ :class:`bytes` using the given encoding, and :meth:`bytes.decode` can
+ be used to achieve the opposite.
Tuples
.. index::
@@ -448,6 +448,11 @@ Callable types
+-------------------------+-------------------------------+-----------+
| :attr:`__name__` | The function's name | Writable |
+-------------------------+-------------------------------+-----------+
+ | :attr:`__qualname__` | The function's | Writable |
+ | | :term:`qualified name` | |
+ | | | |
+ | | .. versionadded:: 3.3 | |
+ +-------------------------+-------------------------------+-----------+
| :attr:`__module__` | The name of the module the | Writable |
| | function was defined in, or | |
| | ``None`` if unavailable. | |
@@ -1272,7 +1277,27 @@ Basic customization
inheritance of :meth:`__hash__` will be blocked, just as if :attr:`__hash__`
had been explicitly set to :const:`None`.
- See also the :option:`-R` command-line option.
+
+ .. note::
+
+ Note by default the :meth:`__hash__` values of str, bytes and datetime
+ objects are "salted" with an unpredictable random value. Although they
+ remain constant within an individual Python process, they are not
+ predictable between repeated invocations of Python.
+
+ This is intended to provide protection against a denial-of-service caused
+ by carefully-chosen inputs that exploit the worst case performance of a
+ dict insertion, O(n^2) complexity. See
+ http://www.ocert.org/advisories/ocert-2011-003.html for details.
+
+ Changing hash values affects the order in which keys are retrieved from a
+ dict. Note Python has never made guarantees about this ordering (and it
+ typically varies between 32-bit and 64-bit builds).
+
+ See also :envvar:`PYTHONHASHSEED`.
+
+ .. versionchanged:: 3.3
+ Hash randomization is enabled by default.
.. method:: object.__bool__(self)
@@ -1353,7 +1378,8 @@ access (use of, assignment to, or deletion of ``x.name``) for class instances.
.. method:: object.__dir__(self)
- Called when :func:`dir` is called on the object. A list must be returned.
+ Called when :func:`dir` is called on the object. A sequence must be
+ returned. :func:`dir` converts the returned sequence to a list and sorts it.
.. _descriptors:
diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst
index 5b68468..9624b60 100644
--- a/Doc/reference/expressions.rst
+++ b/Doc/reference/expressions.rst
@@ -318,7 +318,7 @@ Yield expressions
.. productionlist::
yield_atom: "(" `yield_expression` ")"
- yield_expression: "yield" [`expression_list`]
+ yield_expression: "yield" [`expression_list` | "from" `expression`]
The :keyword:`yield` expression is only used when defining a generator function,
and can only be used in the body of a function definition. Using a
@@ -336,7 +336,10 @@ the internal evaluation stack. When the execution is resumed by calling one of
the generator's methods, the function can proceed exactly as if the
:keyword:`yield` expression was just another external call. The value of the
:keyword:`yield` expression after resuming depends on the method which resumed
-the execution.
+the execution. If :meth:`__next__` is used (typically via either a
+:keyword:`for` or the :func:`next` builtin) then the result is :const:`None`,
+otherwise, if :meth:`send` is used, then the result will be the value passed
+in to that method.
.. index:: single: coroutine
@@ -346,12 +349,32 @@ suspended. The only difference is that a generator function cannot control
where should the execution continue after it yields; the control is always
transferred to the generator's caller.
-The :keyword:`yield` statement is allowed in the :keyword:`try` clause of a
+:keyword:`yield` expressions are allowed in the :keyword:`try` clause of a
:keyword:`try` ... :keyword:`finally` construct. If the generator is not
resumed before it is finalized (by reaching a zero reference count or by being
garbage collected), the generator-iterator's :meth:`close` method will be
called, allowing any pending :keyword:`finally` clauses to execute.
+When ``yield from <expr>`` is used, it treats the supplied expression as
+a subiterator. All values produced by that subiterator are passed directly
+to the caller of the current generator's methods. Any values passed in with
+:meth:`send` and any exceptions passed in with :meth:`throw` are passed to
+the underlying iterator if it has the appropriate methods. If this is not the
+case, then :meth:`send` will raise :exc:`AttributeError` or :exc:`TypeError`,
+while :meth:`throw` will just raise the passed in exception immediately.
+
+When the underlying iterator is complete, the :attr:`~StopIteration.value`
+attribute of the raised :exc:`StopIteration` instance becomes the value of
+the yield expression. It can be either set explicitly when raising
+:exc:`StopIteration`, or automatically when the sub-iterator is a generator
+(by returning a value from the sub-generator).
+
+ .. versionchanged:: 3.3
+ Added ``yield from <expr>`` to delegate control flow to a subiterator
+
+The parentheses can be omitted when the :keyword:`yield` expression is the
+sole expression on the right hand side of an assignment statement.
+
.. index:: object: generator
The following generator's methods can be used to control the execution of a
@@ -444,6 +467,10 @@ generator functions::
The proposal to enhance the API and syntax of generators, making them
usable as simple coroutines.
+ :pep:`0380` - Syntax for Delegating to a Subgenerator
+ The proposal to introduce the :token:`yield_from` syntax, making delegation
+ to sub-generators easy.
+
.. _primaries:
diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst
index 4b49738..c94a47f 100644
--- a/Doc/reference/lexical_analysis.rst
+++ b/Doc/reference/lexical_analysis.rst
@@ -401,7 +401,7 @@ String literals are described by the following lexical definitions:
.. productionlist::
stringliteral: [`stringprefix`](`shortstring` | `longstring`)
- stringprefix: "r" | "R"
+ stringprefix: "r" | "u" | "ur" | "R" | "U" | "UR" | "Ur" | "uR"
shortstring: "'" `shortstringitem`* "'" | '"' `shortstringitem`* '"'
longstring: "'''" `longstringitem`* "'''" | '"""' `longstringitem`* '"""'
shortstringitem: `shortstringchar` | `stringescapeseq`
@@ -412,7 +412,7 @@ String literals are described by the following lexical definitions:
.. productionlist::
bytesliteral: `bytesprefix`(`shortbytes` | `longbytes`)
- bytesprefix: "b" | "B" | "br" | "Br" | "bR" | "BR"
+ bytesprefix: "b" | "B" | "br" | "Br" | "bR" | "BR" | "rb" | "rB" | "Rb" | "RB"
shortbytes: "'" `shortbytesitem`* "'" | '"' `shortbytesitem`* '"'
longbytes: "'''" `longbytesitem`* "'''" | '"""' `longbytesitem`* '"""'
shortbytesitem: `shortbyteschar` | `bytesescapeseq`
@@ -441,11 +441,23 @@ instance of the :class:`bytes` type instead of the :class:`str` type. They
may only contain ASCII characters; bytes with a numeric value of 128 or greater
must be expressed with escapes.
+As of Python 3.3 it is possible again to prefix unicode strings with a
+``u`` prefix to simplify maintenance of dual 2.x and 3.x codebases.
+
Both string and bytes literals may optionally be prefixed with a letter ``'r'``
or ``'R'``; such strings are called :dfn:`raw strings` and treat backslashes as
literal characters. As a result, in string literals, ``'\U'`` and ``'\u'``
escapes in raw strings are not treated specially.
+ .. versionadded:: 3.3
+ The ``'rb'`` prefix of raw bytes literals has been added as a synonym
+ of ``'br'``.
+
+ .. versionadded:: 3.3
+ Support for the unicode legacy literal (``u'value'``) and other
+ versions were reintroduced to simplify the maintenance of dual
+ Python 2.x and 3.x codebases. See :pep:`414` for more information.
+
In triple-quoted strings, unescaped newlines and quotes are allowed (and are
retained), except that three unescaped quotes in a row terminate the string. (A
"quote" is the character used to open the string, i.e. either ``'`` or ``"``.)
@@ -492,13 +504,13 @@ Escape sequences only recognized in string literals are:
+-----------------+---------------------------------+-------+
| Escape Sequence | Meaning | Notes |
+=================+=================================+=======+
-| ``\N{name}`` | Character named *name* in the | |
+| ``\N{name}`` | Character named *name* in the | \(4) |
| | Unicode database | |
+-----------------+---------------------------------+-------+
-| ``\uxxxx`` | Character with 16-bit hex value | \(4) |
+| ``\uxxxx`` | Character with 16-bit hex value | \(5) |
| | *xxxx* | |
+-----------------+---------------------------------+-------+
-| ``\Uxxxxxxxx`` | Character with 32-bit hex value | \(5) |
+| ``\Uxxxxxxxx`` | Character with 32-bit hex value | \(6) |
| | *xxxxxxxx* | |
+-----------------+---------------------------------+-------+
@@ -516,10 +528,14 @@ Notes:
with the given value.
(4)
+ .. versionchanged:: 3.3
+ Support for name aliases [#]_ has been added.
+
+(5)
Individual code units which form parts of a surrogate pair can be encoded using
this escape sequence. Exactly four hex digits are required.
-(5)
+(6)
Any Unicode character can be encoded this way, but characters outside the Basic
Multilingual Plane (BMP) will be encoded using a surrogate pair if Python is
compiled to use 16-bit code units (the default). Exactly eight hex digits
@@ -706,3 +722,8 @@ The following printing ASCII characters are not used in Python. Their
occurrence outside string literals and comments is an unconditional error::
$ ? `
+
+
+.. rubric:: Footnotes
+
+.. [#] http://www.unicode.org/Public/6.1.0/ucd/NameAliases.txt
diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst
index 34ed92f..3bd0894 100644
--- a/Doc/reference/simple_stmts.rst
+++ b/Doc/reference/simple_stmts.rst
@@ -425,10 +425,10 @@ When :keyword:`return` passes control out of a :keyword:`try` statement with a
:keyword:`finally` clause, that :keyword:`finally` clause is executed before
really leaving the function.
-In a generator function, the :keyword:`return` statement is not allowed to
-include an :token:`expression_list`. In that context, a bare :keyword:`return`
-indicates that the generator is done and will cause :exc:`StopIteration` to be
-raised.
+In a generator function, the :keyword:`return` statement indicates that the
+generator is done and will cause :exc:`StopIteration` to be raised. The returned
+value (if any) is used as an argument to construct :exc:`StopIteration` and
+becomes the :attr:`StopIteration.value` attribute.
.. _yield:
@@ -450,6 +450,7 @@ The :keyword:`yield` statement is only used when defining a generator function,
and is only used in the body of the generator function. Using a :keyword:`yield`
statement in a function definition is sufficient to cause that definition to
create a generator function instead of a normal function.
+
When a generator function is called, it returns an iterator known as a generator
iterator, or more commonly, a generator. The body of the generator function is
executed by calling the :func:`next` function on the generator repeatedly until
@@ -469,14 +470,28 @@ resumed before it is finalized (by reaching a zero reference count or by being
garbage collected), the generator-iterator's :meth:`close` method will be
called, allowing any pending :keyword:`finally` clauses to execute.
+When ``yield from <expr>`` is used, it treats the supplied expression as
+a subiterator, producing values from it until the underlying iterator is
+exhausted.
+
+ .. versionchanged:: 3.3
+ Added ``yield from <expr>`` to delegate control flow to a subiterator
+
+For full details of :keyword:`yield` semantics, refer to the :ref:`yieldexpr`
+section.
+
.. seealso::
:pep:`0255` - Simple Generators
The proposal for adding generators and the :keyword:`yield` statement to Python.
:pep:`0342` - Coroutines via Enhanced Generators
- The proposal that, among other generator enhancements, proposed allowing
- :keyword:`yield` to appear inside a :keyword:`try` ... :keyword:`finally` block.
+ The proposal to enhance the API and syntax of generators, making them
+ usable as simple coroutines.
+
+ :pep:`0380` - Syntax for Delegating to a Subgenerator
+ The proposal to introduce the :token:`yield_from` syntax, making delegation
+ to sub-generators easy.
.. _raise:
diff --git a/Doc/tools/sphinxext/indexcontent.html b/Doc/tools/sphinxext/indexcontent.html
index 7f85470..abe17f3 100644
--- a/Doc/tools/sphinxext/indexcontent.html
+++ b/Doc/tools/sphinxext/indexcontent.html
@@ -20,10 +20,10 @@
<span class="linkdescr">tutorial for C/C++ programmers</span></p>
<p class="biglink"><a class="biglink" href="{{ pathto("c-api/index") }}">Python/C API</a><br/>
<span class="linkdescr">reference for C/C++ programmers</span></p>
- <p class="biglink"><a class="biglink" href="{{ pathto("install/index") }}">Installing Python Modules</a><br/>
- <span class="linkdescr">information for installers &amp; sys-admins</span></p>
- <p class="biglink"><a class="biglink" href="{{ pathto("distutils/index") }}">Distributing Python Modules</a><br/>
- <span class="linkdescr">sharing modules with others</span></p>
+ <p class="biglink"><a class="biglink" href="{{ pathto("install/index") }}">Installing Python Projects</a><br/>
+ <span class="linkdescr">finding and installing modules and applications</span></p>
+ <p class="biglink"><a class="biglink" href="{{ pathto("packaging/index") }}">Distributing Python Projects</a><br/>
+ <span class="linkdescr">packaging and distributing modules and applications</span></p>
<p class="biglink"><a class="biglink" href="{{ pathto("faq/index") }}">FAQs</a><br/>
<span class="linkdescr">frequently asked questions (with answers!)</span></p>
</td></tr>
diff --git a/Doc/tools/sphinxext/indexsidebar.html b/Doc/tools/sphinxext/indexsidebar.html
index 672492e..3ad24f9 100644
--- a/Doc/tools/sphinxext/indexsidebar.html
+++ b/Doc/tools/sphinxext/indexsidebar.html
@@ -3,7 +3,7 @@
<h3>Docs for other versions</h3>
<ul>
<li><a href="http://docs.python.org/2.7/">Python 2.7 (stable)</a></li>
- <li><a href="http://docs.python.org/3.1/">Python 3.1 (stable)</a></li>
+ <li><a href="http://docs.python.org/3.2/">Python 3.2 (stable)</a></li>
<li><a href="http://www.python.org/doc/versions/">Old versions</a></li>
</ul>
diff --git a/Doc/tools/sphinxext/pyspecific.py b/Doc/tools/sphinxext/pyspecific.py
index 4329281..f359530 100644
--- a/Doc/tools/sphinxext/pyspecific.py
+++ b/Doc/tools/sphinxext/pyspecific.py
@@ -5,12 +5,12 @@
Sphinx extension with Python doc-specific markup.
- :copyright: 2008, 2009, 2010 by Georg Brandl.
+ :copyright: 2008, 2009, 2010, 2011, 2012 by Georg Brandl.
:license: Python license.
"""
ISSUE_URI = 'http://bugs.python.org/issue%s'
-SOURCE_URI = 'http://hg.python.org/cpython/file/3.2/%s'
+SOURCE_URI = 'http://hg.python.org/cpython/file/default/%s'
from docutils import nodes, utils
from sphinx.util.nodes import split_explicit_title
@@ -201,11 +201,12 @@ class PydocTopicsBuilder(Builder):
document.append(doctree.ids[labelid])
destination = StringOutput(encoding='utf-8')
writer.write(document, destination)
- self.topics[label] = str(writer.output)
+ self.topics[label] = writer.output.encode('utf-8')
def finish(self):
f = open(path.join(self.outdir, 'topics.py'), 'w')
try:
+ f.write('# -*- coding: utf-8 -*-\n')
f.write('# Autogenerated by Sphinx on %s\n' % asctime())
f.write('topics = ' + pformat(self.topics) + '\n')
finally:
diff --git a/Doc/tools/sphinxext/susp-ignored.csv b/Doc/tools/sphinxext/susp-ignored.csv
index 5076aed..a3d8d0b 100644
--- a/Doc/tools/sphinxext/susp-ignored.csv
+++ b/Doc/tools/sphinxext/susp-ignored.csv
@@ -1,16 +1,24 @@
c-api/arg,,:ref,"PyArg_ParseTuple(args, ""O|O:ref"", &object, &callback)"
c-api/list,,:high,list[low:high]
c-api/list,,:high,list[low:high] = itemlist
+c-api/sequence,,:i2,del o[i1:i2]
c-api/sequence,,:i2,o[i1:i2]
c-api/sequence,,:i2,o[i1:i2] = v
-c-api/sequence,,:i2,del o[i1:i2]
c-api/unicode,,:end,str[start:end]
+c-api/unicode,,:start,unicode[start:start+length]
+distutils/examples,267,`,This is the description of the ``foobar`` package.
distutils/setupscript,,::,
extending/embedding,,:numargs,"if(!PyArg_ParseTuple(args, "":numargs""))"
-extending/extending,,:set,"if (PyArg_ParseTuple(args, ""O:set_callback"", &temp)) {"
extending/extending,,:myfunction,"PyArg_ParseTuple(args, ""D:myfunction"", &c);"
+extending/extending,,:set,"if (PyArg_ParseTuple(args, ""O:set_callback"", &temp)) {"
extending/newtypes,,:call,"if (!PyArg_ParseTuple(args, ""sss:call"", &arg1, &arg2, &arg3)) {"
extending/windows,,:initspam,/export:initspam
+faq/programming,,:chr,">=4.0) or 1+f(xc,yc,x*x-y*y+xc,2.0*x*y+yc,k-1,f):f(xc,yc,x,y,k,f):chr("
+faq/programming,,::,for x in sequence[::-1]:
+faq/programming,,:reduce,"print((lambda Ru,Ro,Iu,Io,IM,Sx,Sy:reduce(lambda x,y:x+y,map(lambda y,"
+faq/programming,,:reduce,"Sx=Sx,Sy=Sy:reduce(lambda x,y:x+y,map(lambda x,xc=Ru,yc=yc,Ru=Ru,Ro=Ro,"
+faq/windows,229,:EOF,@setlocal enableextensions & python -x %~f0 %* & goto :EOF
+faq/windows,393,:REG,.py :REG_SZ: c:\<path to python>\python.exe -u %s %s
howto/cporting,,:add,"if (!PyArg_ParseTuple(args, ""ii:add_ints"", &one, &two))"
howto/cporting,,:encode,"if (!PyArg_ParseTuple(args, ""O:encode_object"", &myobj))"
howto/cporting,,:say,"if (!PyArg_ParseTuple(args, ""U:say_hello"", &name))"
@@ -22,19 +30,53 @@ howto/curses,,:magenta,"They are: 0:black, 1:red, 2:green, 3:yellow, 4:blue, 5:m
howto/curses,,:red,"They are: 0:black, 1:red, 2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and"
howto/curses,,:white,"7:white."
howto/curses,,:yellow,"They are: 0:black, 1:red, 2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and"
+howto/logging,,:And,"WARNING:And this, too"
+howto/logging,,:And,"WARNING:root:And this, too"
+howto/logging,,:Doing,INFO:root:Doing something
+howto/logging,,:Finished,INFO:root:Finished
+howto/logging,,:logger,severity:logger name:message
+howto/logging,,:Look,WARNING:root:Look before you leap!
+howto/logging,,:message,severity:logger name:message
+howto/logging,,:root,DEBUG:root:This message should go to the log file
+howto/logging,,:root,INFO:root:Doing something
+howto/logging,,:root,INFO:root:Finished
+howto/logging,,:root,INFO:root:So should this
+howto/logging,,:root,INFO:root:Started
+howto/logging,,:root,"WARNING:root:And this, too"
+howto/logging,,:root,WARNING:root:Look before you leap!
+howto/logging,,:root,WARNING:root:Watch out!
+howto/logging,,:So,INFO:root:So should this
+howto/logging,,:So,INFO:So should this
+howto/logging,,:Started,INFO:root:Started
+howto/logging,,:This,DEBUG:root:This message should go to the log file
+howto/logging,,:This,DEBUG:This message should appear on the console
+howto/logging,,:Watch,WARNING:root:Watch out!
+howto/pyporting,75,::,# make sure to use :: Python *and* :: Python :: 3 so
+howto/pyporting,75,::,"'Programming Language :: Python',"
+howto/pyporting,75,::,'Programming Language :: Python :: 3'
howto/regex,,::,
howto/regex,,:foo,(?:foo)
howto/urllib2,,:example,"for example ""joe@password:example.com"""
howto/webservers,,.. image:,.. image:: http.png
library/audioop,,:ipos,"# factor = audioop.findfactor(in_test[ipos*2:ipos*2+len(out_test)],"
+library/bisect,32,:hi,all(val >= x for val in a[i:hi])
+library/bisect,42,:hi,all(val > x for val in a[i:hi])
+library/configparser,,:home,my_dir: ${Common:home_dir}/twosheds
+library/configparser,,:option,${section:option}
+library/configparser,,:path,python_dir: ${Frameworks:path}/Python/Versions/${Frameworks:Python}
+library/configparser,,:Python,python_dir: ${Frameworks:path}/Python/Versions/${Frameworks:Python}
+library/configparser,,`,# Set the optional `raw` argument of get() to True if you wish to disable
+library/configparser,,:system,path: ${Common:system_dir}/Library/Frameworks/
+library/configparser,,`,# The optional `fallback` argument can be used to provide a fallback value
+library/configparser,,`,# The optional `vars` argument is a dict with members that will take
library/datetime,,:MM,
library/datetime,,:SS,
library/decimal,,:optional,"trailneg:optional trailing minus indicator"
library/difflib,,:ahi,a[alo:ahi]
library/difflib,,:bhi,b[blo:bhi]
+library/difflib,,:i1,
library/difflib,,:i2,
library/difflib,,:j2,
-library/difflib,,:i1,
library/dis,,:TOS,
library/dis,,`,TOS = `TOS`
library/doctest,,`,``factorial`` from the ``example`` module:
@@ -44,96 +86,164 @@ library/functions,,:step,a[start:stop:step]
library/functions,,:stop,"a[start:stop, i]"
library/functions,,:stop,a[start:stop:step]
library/hotshot,,:lineno,"ncalls tottime percall cumtime percall filename:lineno(function)"
+library/http.client,52,:port,host:port
library/httplib,,:port,host:port
-library/imaplib,,:MM,"""DD-Mmm-YYYY HH:MM:SS +HHMM"""
-library/imaplib,,:SS,"""DD-Mmm-YYYY HH:MM:SS +HHMM"""
-library/itertools,,:stop,elements from seq[start:stop:step]
+library/imaplib,,:MM,"""DD-Mmm-YYYY HH:MM:SS"
+library/imaplib,,:SS,"""DD-Mmm-YYYY HH:MM:SS"
library/itertools,,:step,elements from seq[start:stop:step]
+library/itertools,,:stop,elements from seq[start:stop:step]
library/linecache,,:sys,"sys:x:3:3:sys:/dev:/bin/sh"
library/logging,,:And,
+library/logging,,:Doing,INFO:root:Doing something
+library/logging,,:Finished,INFO:root:Finished
+library/logging,,:logger,severity:logger name:message
+library/logging,,:Look,WARNING:root:Look before you leap!
+library/logging,,:message,severity:logger name:message
library/logging,,:package1,
library/logging,,:package2,
+library/logging,,:port,host:port
library/logging,,:root,
+library/logging,,:So,INFO:root:So should this
+library/logging,,:So,INFO:So should this
+library/logging,,:Started,INFO:root:Started
library/logging,,:This,
-library/logging,,:port,host:port
+library/logging,,:Watch,WARNING:root:Watch out!
+library/logging.handlers,,:port,host:port
library/mmap,,:i2,obj[i1:i2]
-library/multiprocessing,,:queue,">>> QueueManager.register('get_queue', callable=lambda:queue)"
-library/multiprocessing,,`,">>> l._callmethod('__getitem__', (20,)) # equiv to `l[20]`"
-library/multiprocessing,,`,">>> l._callmethod('__getslice__', (2, 7)) # equiv to `l[2:7]`"
-library/multiprocessing,,`,# `BaseManager`.
-library/multiprocessing,,`,# `Pool.imap()` (which will save on the amount of code needed anyway).
+library/multiprocessing,,`,# Add more tasks using `put()`
library/multiprocessing,,`,# A test file for the `multiprocessing` package
library/multiprocessing,,`,# A test of `multiprocessing.Pool` class
-library/multiprocessing,,`,# Add more tasks using `put()`
+library/multiprocessing,,`,# `BaseManager`.
+library/multiprocessing,,`,`Cluster` is a subclass of `SyncManager` so it allows creation of
library/multiprocessing,,`,# create server for a `HostManager` object
library/multiprocessing,,`,# Depends on `multiprocessing` package -- tested with `processing-0.60`
+library/multiprocessing,,`,`hostname` gives the name of the host. If hostname is not
library/multiprocessing,,`,# in the original order then consider using `Pool.map()` or
+library/multiprocessing,,`,">>> l._callmethod('__getitem__', (20,)) # equiv to `l[20]`"
+library/multiprocessing,,`,">>> l._callmethod('__getslice__', (2, 7)) # equiv to `l[2:7]`"
library/multiprocessing,,`,# Not sure if we should synchronize access to `socket.accept()` method by
library/multiprocessing,,`,# object. (We import `multiprocessing.reduction` to enable this pickling.)
+library/multiprocessing,,`,# `Pool.imap()` (which will save on the amount of code needed anyway).
+library/multiprocessing,,:queue,">>> QueueManager.register('get_queue', callable=lambda:queue)"
library/multiprocessing,,`,# register the Foo class; make `f()` and `g()` accessible via proxy
library/multiprocessing,,`,# register the Foo class; make `g()` and `_h()` accessible via proxy
library/multiprocessing,,`,# register the generator function baz; use `GeneratorProxy` to make proxies
-library/multiprocessing,,`,`Cluster` is a subclass of `SyncManager` so it allows creation of
-library/multiprocessing,,`,`hostname` gives the name of the host. If hostname is not
library/multiprocessing,,`,`slots` is used to specify the number of slots for processes on
+library/nntplib,,:bytes,:bytes
+library/nntplib,,:bytes,"['xref', 'from', ':lines', ':bytes', 'references', 'date', 'message-id', 'subject']"
+library/nntplib,,:lines,:lines
+library/nntplib,,:lines,"['xref', 'from', ':lines', ':bytes', 'references', 'date', 'message-id', 'subject']"
library/optparse,,:len,"del parser.rargs[:len(value)]"
library/os.path,,:foo,c:foo
library/parser,,`,"""Make a function that raises an argument to the exponent `exp`."""
+library/pdb,,:lineno,filename:lineno
+library/pdb,,:lineno,[filename:lineno | bpnumber [bpnumber ...]]
+library/pickle,,:memory,"conn = sqlite3.connect("":memory:"")"
library/posix,,`,"CFLAGS=""`getconf LFS_CFLAGS`"" OPT=""-g -O2 $CFLAGS"""
-library/profile,,:lineno,ncalls tottime percall cumtime percall filename:lineno(function)
+library/pprint,209,::,"'classifiers': ['Development Status :: 4 - Beta',"
+library/pprint,209,::,"'Intended Audience :: Developers',"
+library/pprint,209,::,"'License :: OSI Approved :: MIT License',"
+library/pprint,209,::,"'Natural Language :: English',"
+library/pprint,209,::,"'Operating System :: OS Independent',"
+library/pprint,209,::,"'Programming Language :: Python',"
+library/pprint,209,::,"'Programming Language :: Python :: 2',"
+library/pprint,209,::,"'Programming Language :: Python :: 2.6',"
+library/pprint,209,::,"'Programming Language :: Python :: 2.7',"
+library/pprint,209,::,"'Topic :: Software Development :: Libraries',"
+library/pprint,209,::,"'Topic :: Software Development :: Libraries :: Python Modules'],"
library/profile,,:lineno,filename:lineno(function)
+library/profile,,:lineno,ncalls tottime percall cumtime percall filename:lineno(function)
+library/profile,,:lineno,"(sort by filename:lineno),"
library/pyexpat,,:elem1,<py:elem1 />
library/pyexpat,,:py,"xmlns:py = ""http://www.python.org/ns/"">"
library/repr,,`,"return `obj`"
library/smtplib,,:port,"as well as a regular host:port server."
+library/smtplib,,:port,method must support that as well as a regular host:port
+library/socket,,::,"(10, 1, 6, '', ('2001:888:2000:d::a2', 80, 0, 0))]"
library/socket,,::,'5aef:2b::8'
-library/sqlite3,,:memory,
+library/socket,,:can,"return (can_id, can_dlc, data[:can_dlc])"
+library/socket,,:len,fds.fromstring(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+library/sqlite3,,:age,"cur.execute(""select * from people where name_last=:who and age=:age"", {""who"": who, ""age"": age})"
library/sqlite3,,:age,"select name_last, age from people where name_last=:who and age=:age"
-library/sqlite3,,:who,"select name_last, age from people where name_last=:who and age=:age"
-library/ssl,,:My,"Organization Name (eg, company) [Internet Widgits Pty Ltd]:My Organization, Inc."
+library/sqlite3,,:memory,
+library/sqlite3,,:who,"cur.execute(""select * from people where name_last=:who and age=:age"", {""who"": who, ""age"": age})"
library/ssl,,:My,"Organizational Unit Name (eg, section) []:My Group"
+library/ssl,,:My,"Organization Name (eg, company) [Internet Widgits Pty Ltd]:My Organization, Inc."
library/ssl,,:myserver,"Common Name (eg, YOUR name) []:myserver.mygroup.myorganization.com"
library/ssl,,:MyState,State or Province Name (full name) [Some-State]:MyState
library/ssl,,:ops,Email Address []:ops@myserver.mygroup.myorganization.com
library/ssl,,:Some,"Locality Name (eg, city) []:Some City"
library/ssl,,:US,Country Name (2 letter code) [AU]:US
+library/stdtypes,,::,>>> a[::-1].tolist()
+library/stdtypes,,::,>>> a[::2].tolist()
+library/stdtypes,,:end,s[start:end]
+library/stdtypes,,::,>>> hash(v[::-2]) == hash(b'abcefg'[::-2])
library/stdtypes,,:len,s[len(s):len(s)]
-library/stdtypes,,:len,s[len(s):len(s)]
+library/stdtypes,,::,>>> y = m[::2]
library/string,,:end,s[start:end]
-library/string,,:end,s[start:end]
-library/subprocess,,`,"output=`mycmd myarg`"
library/subprocess,,`,"output=`dmesg | grep hda`"
+library/subprocess,,`,"output=`mycmd myarg`"
+library/tarfile,,:bz2,
library/tarfile,,:compression,filemode[:compression]
library/tarfile,,:gz,
-library/tarfile,,:bz2,
+library/tarfile,,:xz,'a:xz'
+library/tarfile,,:xz,'r:xz'
+library/tarfile,,:xz,'w:xz'
library/time,,:mm,
library/time,,:ss,
library/turtle,,::,Example::
-library/urllib,,:port,:port
library/urllib2,,:password,"""joe:password@python.org"""
+library/urllib,,:port,:port
+library/urllib.request,,:close,Connection:close
+library/urllib.request,,:lang,"xmlns=""http://www.w3.org/1999/xhtml"" xml:lang=""en"" lang=""en"">\n\n<head>\n"
+library/urllib.request,,:password,"""joe:password@python.org"""
library/uuid,,:uuid,urn:uuid:12345678-1234-5678-1234-567812345678
-library/xmlrpclib,,:pass,http://user:pass@host:port/path
-library/xmlrpclib,,:pass,user:pass
-library/xmlrpclib,,:port,http://user:pass@host:port/path
+library/xmlrpc.client,,:pass,http://user:pass@host:port/path
+library/xmlrpc.client,,:pass,user:pass
+library/xmlrpc.client,,:port,http://user:pass@host:port/path
+license,,`,"``Software''), to deal in the Software without restriction, including"
+license,,`,"THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND,"
+license,,`,* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+license,,`,THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+license,,`,* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
license,,`,THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND
license,,:zooko,mailto:zooko@zooko.com
-license,,`,THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-reference/datamodel,,:step,a[i:j:step]
+packaging/examples,,`,This is the description of the ``foobar`` project.
+packaging/setupcfg,,::,Development Status :: 3 - Alpha
+packaging/setupcfg,,::,License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)
+packaging/setupscript,,::,"'Development Status :: 4 - Beta',"
+packaging/setupscript,,::,"'Environment :: Console',"
+packaging/setupscript,,::,"'Environment :: Web Environment',"
+packaging/setupscript,,::,"'Intended Audience :: Developers',"
+packaging/setupscript,,::,"'Intended Audience :: End Users/Desktop',"
+packaging/setupscript,,::,"'Intended Audience :: System Administrators',"
+packaging/setupscript,,::,"'License :: OSI Approved :: Python Software Foundation License',"
+packaging/setupscript,,::,"'Operating System :: MacOS :: MacOS X',"
+packaging/setupscript,,::,"'Operating System :: Microsoft :: Windows',"
+packaging/setupscript,,::,"'Operating System :: POSIX',"
+packaging/setupscript,,::,"'Programming Language :: Python',"
+packaging/setupscript,,::,"'Topic :: Communications :: Email',"
+packaging/setupscript,,::,"'Topic :: Office/Business',"
+packaging/setupscript,,::,"'Topic :: Software Development :: Bug Tracking',"
+packaging/tutorial,,::,1) License :: OSI Approved :: GNU General Public License (GPL)
+packaging/tutorial,,::,2) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
+packaging/tutorial,,::,classifier = Development Status :: 3 - Alpha
+packaging/tutorial,,::,License :: OSI Approved :: GNU General Public License (GPL)
+packaging/tutorial,,::,Type the number of the license you wish to use or ? to try again:: 1
reference/datamodel,,:max,
-reference/expressions,,:index,x[index:index]
+reference/datamodel,,:step,a[i:j:step]
reference/expressions,,:datum,{key:datum...}
reference/expressions,,`,`expressions...`
+reference/expressions,,:index,x[index:index]
reference/grammar,,:output,#diagram:output
reference/grammar,,:rules,#diagram:rules
-reference/grammar,,:token,#diagram:token
reference/grammar,,`,'`' testlist1 '`'
-reference/lexical_analysis,,:fileencoding,# vim:fileencoding=<encoding-name>
+reference/grammar,,:token,#diagram:token
reference/lexical_analysis,,`,", : . ` = ;"
-tutorial/datastructures,,:value,key:value pairs within the braces adds initial key:value pairs
+reference/lexical_analysis,,`,$ ? `
+reference/lexical_analysis,,:fileencoding,# vim:fileencoding=<encoding-name>
tutorial/datastructures,,:value,It is also possible to delete a key:value
-tutorial/stdlib2,,:start,"fields = struct.unpack('<IIIHH', data[start:start+16])"
-tutorial/stdlib2,,:start,extra = data[start:start+extra_size]
-tutorial/stdlib2,,:start,filename = data[start:start+filenamesize]
+tutorial/datastructures,,:value,key:value pairs within the braces adds initial key:value pairs
tutorial/stdlib2,,:config,"logging.warning('Warning:config file %s not found', 'server.conf')"
tutorial/stdlib2,,:config,WARNING:root:Warning:config file server.conf not found
tutorial/stdlib2,,:Critical,CRITICAL:root:Critical error -- shutting down
@@ -141,15 +251,16 @@ tutorial/stdlib2,,:Error,ERROR:root:Error occurred
tutorial/stdlib2,,:root,CRITICAL:root:Critical error -- shutting down
tutorial/stdlib2,,:root,ERROR:root:Error occurred
tutorial/stdlib2,,:root,WARNING:root:Warning:config file server.conf not found
+tutorial/stdlib2,,:start,extra = data[start:start+extra_size]
+tutorial/stdlib2,,:start,"fields = struct.unpack('<IIIHH', data[start:start+16])"
+tutorial/stdlib2,,:start,filename = data[start:start+filenamesize]
tutorial/stdlib2,,:Warning,WARNING:root:Warning:config file server.conf not found
-using/cmdline,,:line,file:line: category: message
using/cmdline,,:category,action:message:category:module:line
+using/cmdline,,:errorhandler,:errorhandler
using/cmdline,,:line,action:message:category:module:line
+using/cmdline,,:line,file:line: category: message
using/cmdline,,:message,action:message:category:module:line
using/cmdline,,:module,action:message:category:module:line
-using/cmdline,,:errorhandler,:errorhandler
-using/windows,162,`,`` this fixes syntax highlighting errors in some editors due to the \\\\ hackery
-using/windows,170,`,``
whatsnew/2.0,418,:len,
whatsnew/2.3,,::,
whatsnew/2.3,,:config,
@@ -163,113 +274,26 @@ whatsnew/2.4,,:System,
whatsnew/2.5,,:memory,:memory:
whatsnew/2.5,,:step,[start:stop:step]
whatsnew/2.5,,:stop,[start:stop:step]
-distutils/examples,267,`,This is the description of the ``foobar`` package.
-faq/programming,,:reduce,"print((lambda Ru,Ro,Iu,Io,IM,Sx,Sy:reduce(lambda x,y:x+y,map(lambda y,"
-faq/programming,,:reduce,"Sx=Sx,Sy=Sy:reduce(lambda x,y:x+y,map(lambda x,xc=Ru,yc=yc,Ru=Ru,Ro=Ro,"
-faq/programming,,:chr,">=4.0) or 1+f(xc,yc,x*x-y*y+xc,2.0*x*y+yc,k-1,f):f(xc,yc,x,y,k,f):chr("
-faq/programming,,::,for x in sequence[::-1]:
-faq/windows,229,:EOF,@setlocal enableextensions & python -x %~f0 %* & goto :EOF
-faq/windows,393,:REG,.py :REG_SZ: c:\<path to python>\python.exe -u %s %s
-library/bisect,32,:hi,all(val >= x for val in a[i:hi])
-library/bisect,42,:hi,all(val > x for val in a[i:hi])
-library/http.client,52,:port,host:port
-library/nntplib,,:bytes,:bytes
-library/nntplib,,:lines,:lines
-library/nntplib,,:lines,"['xref', 'from', ':lines', ':bytes', 'references', 'date', 'message-id', 'subject']"
-library/nntplib,,:bytes,"['xref', 'from', ':lines', ':bytes', 'references', 'date', 'message-id', 'subject']"
-library/pickle,,:memory,"conn = sqlite3.connect("":memory:"")"
-library/profile,,:lineno,"(sort by filename:lineno),"
-library/socket,,::,"(10, 1, 6, '', ('2001:888:2000:d::a2', 80, 0, 0))]"
-library/stdtypes,,:end,s[start:end]
-library/stdtypes,,:end,s[start:end]
-library/urllib.request,,:close,Connection:close
-library/urllib.request,,:password,"""joe:password@python.org"""
-library/urllib.request,,:lang,"xmlns=""http://www.w3.org/1999/xhtml"" xml:lang=""en"" lang=""en"">\n\n<head>\n"
-library/xmlrpc.client,103,:pass,http://user:pass@host:port/path
-library/xmlrpc.client,103,:port,http://user:pass@host:port/path
-library/xmlrpc.client,103,:pass,user:pass
-license,,`,* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
-license,,`,* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
-license,,`,"``Software''), to deal in the Software without restriction, including"
-license,,`,"THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND,"
-reference/lexical_analysis,704,`,$ ? `
+whatsnew/2.7,1619,::,"ParseResult(scheme='http', netloc='[1080::8:800:200C:417A]',"
+whatsnew/2.7,1619,::,>>> urlparse.urlparse('http://[1080::8:800:200C:417A]/foo')
whatsnew/2.7,735,:Sunday,'2009:4:Sunday'
-whatsnew/2.7,862,::,"export PYTHONWARNINGS=all,error:::Cookie:0"
whatsnew/2.7,862,:Cookie,"export PYTHONWARNINGS=all,error:::Cookie:0"
-whatsnew/2.7,1619,::,>>> urlparse.urlparse('http://[1080::8:800:200C:417A]/foo')
-whatsnew/2.7,1619,::,"ParseResult(scheme='http', netloc='[1080::8:800:200C:417A]',"
-library/configparser,,`,# Set the optional `raw` argument of get() to True if you wish to disable
-library/configparser,,`,# The optional `vars` argument is a dict with members that will take
-library/configparser,,`,# The optional `fallback` argument can be used to provide a fallback value
-library/configparser,,:option,${section:option}
-library/configparser,,:system,path: ${Common:system_dir}/Library/Frameworks/
-library/configparser,,:home,my_dir: ${Common:home_dir}/twosheds
-library/configparser,,:path,python_dir: ${Frameworks:path}/Python/Versions/${Frameworks:Python}
-library/configparser,,:Python,python_dir: ${Frameworks:path}/Python/Versions/${Frameworks:Python}
-library/pdb,,:lineno,[filename:lineno | bpnumber [bpnumber ...]]
-library/pdb,,:lineno,filename:lineno
-library/logging,,:Watch,WARNING:root:Watch out!
-library/logging,,:So,INFO:root:So should this
-library/logging,,:Started,INFO:root:Started
-library/logging,,:Doing,INFO:root:Doing something
-library/logging,,:Finished,INFO:root:Finished
-library/logging,,:Look,WARNING:root:Look before you leap!
-library/logging,,:So,INFO:So should this
-library/logging,,:logger,severity:logger name:message
-library/logging,,:message,severity:logger name:message
-whatsnew/3.2,,:directory,... ${buildout:directory}/downloads/dist
-whatsnew/3.2,,:location,... zope9-location = ${zope9:location}
-whatsnew/3.2,,:prefix,... zope-conf = ${custom:prefix}/etc/zope.conf
-howto/logging,,:root,WARNING:root:Watch out!
-howto/logging,,:Watch,WARNING:root:Watch out!
-howto/logging,,:root,DEBUG:root:This message should go to the log file
-howto/logging,,:This,DEBUG:root:This message should go to the log file
-howto/logging,,:root,INFO:root:So should this
-howto/logging,,:So,INFO:root:So should this
-howto/logging,,:root,"WARNING:root:And this, too"
-howto/logging,,:And,"WARNING:root:And this, too"
-howto/logging,,:root,INFO:root:Started
-howto/logging,,:Started,INFO:root:Started
-howto/logging,,:root,INFO:root:Doing something
-howto/logging,,:Doing,INFO:root:Doing something
-howto/logging,,:root,INFO:root:Finished
-howto/logging,,:Finished,INFO:root:Finished
-howto/logging,,:root,WARNING:root:Look before you leap!
-howto/logging,,:Look,WARNING:root:Look before you leap!
-howto/logging,,:This,DEBUG:This message should appear on the console
-howto/logging,,:So,INFO:So should this
-howto/logging,,:And,"WARNING:And this, too"
-howto/logging,,:logger,severity:logger name:message
-howto/logging,,:message,severity:logger name:message
-library/logging.handlers,,:port,host:port
-library/imaplib,116,:MM,"""DD-Mmm-YYYY HH:MM:SS"
-library/imaplib,116,:SS,"""DD-Mmm-YYYY HH:MM:SS"
-whatsnew/3.2,,::,"$ export PYTHONWARNINGS='ignore::RuntimeWarning::,once::UnicodeWarning::'"
-howto/pyporting,75,::,# make sure to use :: Python *and* :: Python :: 3 so
-howto/pyporting,75,::,"'Programming Language :: Python',"
-howto/pyporting,75,::,'Programming Language :: Python :: 3'
-whatsnew/3.2,,:gz,">>> with tarfile.open(name='myarchive.tar.gz', mode='w:gz') as tf:"
-whatsnew/3.2,,:directory,${buildout:directory}/downloads/dist
-whatsnew/3.2,,:location,zope9-location = ${zope9:location}
-whatsnew/3.2,,:prefix,zope-conf = ${custom:prefix}/etc/zope.conf
-whatsnew/3.2,,:beef,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
-whatsnew/3.2,,:cafe,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
+whatsnew/2.7,862,::,"export PYTHONWARNINGS=all,error:::Cookie:0"
+whatsnew/3.2,,:affe,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
whatsnew/3.2,,:affe,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
-whatsnew/3.2,,:deaf,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
-whatsnew/3.2,,:feed,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
whatsnew/3.2,,:beef,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
+whatsnew/3.2,,:beef,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
whatsnew/3.2,,:cafe,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
-whatsnew/3.2,,:affe,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
+whatsnew/3.2,,:cafe,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
whatsnew/3.2,,:deaf,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
+whatsnew/3.2,,:deaf,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
+whatsnew/3.2,,:directory,... ${buildout:directory}/downloads/dist
+whatsnew/3.2,,:directory,${buildout:directory}/downloads/dist
+whatsnew/3.2,,::,"$ export PYTHONWARNINGS='ignore::RuntimeWarning::,once::UnicodeWarning::'"
whatsnew/3.2,,:feed,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
-library/pprint,209,::,"'classifiers': ['Development Status :: 4 - Beta',"
-library/pprint,209,::,"'Intended Audience :: Developers',"
-library/pprint,209,::,"'License :: OSI Approved :: MIT License',"
-library/pprint,209,::,"'Natural Language :: English',"
-library/pprint,209,::,"'Operating System :: OS Independent',"
-library/pprint,209,::,"'Programming Language :: Python',"
-library/pprint,209,::,"'Programming Language :: Python :: 2',"
-library/pprint,209,::,"'Programming Language :: Python :: 2.6',"
-library/pprint,209,::,"'Programming Language :: Python :: 2.7',"
-library/pprint,209,::,"'Topic :: Software Development :: Libraries',"
-library/pprint,209,::,"'Topic :: Software Development :: Libraries :: Python Modules'],"
+whatsnew/3.2,,:feed,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/')
+whatsnew/3.2,,:gz,">>> with tarfile.open(name='myarchive.tar.gz', mode='w:gz') as tf:"
+whatsnew/3.2,,:location,... zope9-location = ${zope9:location}
+whatsnew/3.2,,:location,zope9-location = ${zope9:location}
+whatsnew/3.2,,:prefix,... zope-conf = ${custom:prefix}/etc/zope.conf
+whatsnew/3.2,,:prefix,zope-conf = ${custom:prefix}/etc/zope.conf
diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst
index 5ce3669..b4f09c2 100644
--- a/Doc/tutorial/classes.rst
+++ b/Doc/tutorial/classes.rst
@@ -184,7 +184,6 @@ The output of the example code is:
.. code-block:: none
-
After local assignment: test spam
After nonlocal assignment: nonlocal spam
After global assignment: nonlocal spam
@@ -698,9 +697,9 @@ example, the following code will print B, C, D in that order::
class D(C):
pass
- for c in [B, C, D]:
+ for cls in [B, C, D]:
try:
- raise c()
+ raise cls()
except D:
print("D")
except C:
diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst
index 5fb72fd..83b3012 100644
--- a/Doc/tutorial/datastructures.rst
+++ b/Doc/tutorial/datastructures.rst
@@ -19,13 +19,13 @@ objects:
.. method:: list.append(x)
:noindex:
- Add an item to the end of the list; equivalent to ``a[len(a):] = [x]``.
+ Add an item to the end of the list. Equivalent to ``a[len(a):] = [x]``.
.. method:: list.extend(L)
:noindex:
- Extend the list by appending all the items in the given list; equivalent to
+ Extend the list by appending all the items in the given list. Equivalent to
``a[len(a):] = L``.
@@ -40,8 +40,8 @@ objects:
.. method:: list.remove(x)
:noindex:
- Remove the first item from the list whose value is *x*. It is an error if there
- is no such item.
+ Remove the first item from the list whose value is *x*. It is an error if
+ there is no such item.
.. method:: list.pop([i])
@@ -70,13 +70,14 @@ objects:
.. method:: list.sort()
:noindex:
- Sort the items of the list, in place.
+ Sort the items of the list in place.
.. method:: list.reverse()
:noindex:
- Reverse the elements of the list, in place.
+ Reverse the elements of the list in place.
+
An example that uses most of the list methods::
@@ -99,6 +100,10 @@ An example that uses most of the list methods::
>>> a
[-1, 1, 66.25, 333, 333, 1234.5]
+You might have noticed that methods like ``insert``, ``remove`` or ``sort`` that
+modify the list have no return value printed -- they return ``None``. [1]_ This
+is a design principle for all mutable data structures in Python.
+
.. _tut-lists-as-stacks:
@@ -468,7 +473,7 @@ using a non-existent key.
Performing ``list(d.keys())`` on a dictionary returns a list of all the keys
used in the dictionary, in arbitrary order (if you want it sorted, just use
-``sorted(d.keys())`` instead). [1]_ To check whether a single key is in the
+``sorted(d.keys())`` instead). [2]_ To check whether a single key is in the
dictionary, use the :keyword:`in` keyword.
Here is a small example using a dictionary::
@@ -652,6 +657,9 @@ interpreter will raise a :exc:`TypeError` exception.
.. rubric:: Footnotes
-.. [1] Calling ``d.keys()`` will return a :dfn:`dictionary view` object. It
+.. [1] Other languages may return the mutated object, which allows method
+ chaining, such as ``d->insert("a")->remove("b")->sort();``.
+
+.. [2] Calling ``d.keys()`` will return a :dfn:`dictionary view` object. It
supports operations like membership test and iteration, but its contents
are not independent of the original dictionary -- it is only a *view*.
diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst
index 2338465..8f08cd8 100644
--- a/Doc/tutorial/interpreter.rst
+++ b/Doc/tutorial/interpreter.rst
@@ -10,11 +10,11 @@ Using the Python Interpreter
Invoking the Interpreter
========================
-The Python interpreter is usually installed as :file:`/usr/local/bin/python3.2`
+The Python interpreter is usually installed as :file:`/usr/local/bin/python3.3`
on those machines where it is available; putting :file:`/usr/local/bin` in your
Unix shell's search path makes it possible to start it by typing the command ::
- python3.2
+ python3.3
to the shell. [#]_ Since the choice of the directory where the interpreter lives
is an installation option, other places are possible; check with your local
@@ -22,11 +22,11 @@ Python guru or system administrator. (E.g., :file:`/usr/local/python` is a
popular alternative location.)
On Windows machines, the Python installation is usually placed in
-:file:`C:\\Python32`, though you can change this when you're running the
+:file:`C:\\Python33`, though you can change this when you're running the
installer. To add this directory to your path, you can type the following
command into the command prompt in a DOS box::
- set path=%path%;C:\python32
+ set path=%path%;C:\python33
Typing an end-of-file character (:kbd:`Control-D` on Unix, :kbd:`Control-Z` on
Windows) at the primary prompt causes the interpreter to exit with a zero exit
@@ -93,8 +93,8 @@ with the *secondary prompt*, by default three dots (``...``). The interpreter
prints a welcome message stating its version number and a copyright notice
before printing the first prompt::
- $ python3.2
- Python 3.2 (py3k, Sep 12 2007, 12:21:02)
+ $ python3.3
+ Python 3.3 (py3k, Sep 12 2007, 12:21:02)
[GCC 3.4.6 20060404 (Red Hat 3.4.6-8)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>>
@@ -147,7 +147,7 @@ Executable Python Scripts
On BSD'ish Unix systems, Python scripts can be made directly executable, like
shell scripts, by putting the line ::
- #! /usr/bin/env python3.2
+ #! /usr/bin/env python3.3
(assuming that the interpreter is on the user's :envvar:`PATH`) at the beginning
of the script and giving the file an executable mode. The ``#!`` must be the
diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst
index 9729743..500ca7f 100644
--- a/Doc/tutorial/stdlib.rst
+++ b/Doc/tutorial/stdlib.rst
@@ -15,7 +15,7 @@ operating system::
>>> import os
>>> os.getcwd() # Return the current working directory
- 'C:\\Python31'
+ 'C:\\Python33'
>>> os.chdir('/server/accesslogs') # Change current working directory
>>> os.system('mkdir today') # Run the command mkdir in the system shell
0
diff --git a/Doc/tutorial/stdlib2.rst b/Doc/tutorial/stdlib2.rst
index fe7f027..85c88dc 100644
--- a/Doc/tutorial/stdlib2.rst
+++ b/Doc/tutorial/stdlib2.rst
@@ -141,7 +141,9 @@ standard size and in little-endian byte order::
import struct
- data = open('myfile.zip', 'rb').read()
+ with open('myfile.zip', 'rb') as f:
+ data = f.read()
+
start = 0
for i in range(3): # show the first 3 file headers
start += 14
@@ -271,7 +273,7 @@ applications include caching objects that are expensive to create::
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
d['primary'] # entry was automatically removed
- File "C:/python31/lib/weakref.py", line 46, in __getitem__
+ File "C:/python33/lib/weakref.py", line 46, in __getitem__
o = self.data[key]()
KeyError: 'primary'
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
index 0d12126..40e850e 100644
--- a/Doc/using/cmdline.rst
+++ b/Doc/using/cmdline.rst
@@ -24,7 +24,7 @@ Command line
When invoking Python, you may specify any of these options::
- python [-bBdEhiORqsSuvVWx?] [-c command | -m module-name | script | - ] [args]
+ python [-bBdEhiOqsSuvVWx?] [-c command | -m module-name | script | - ] [args]
The most common use case is, of course, a simple invocation of a script::
@@ -263,7 +263,9 @@ Miscellaneous options
.. cmdoption:: -S
Disable the import of the module :mod:`site` and the site-dependent
- manipulations of :data:`sys.path` that it entails.
+ manipulations of :data:`sys.path` that it entails. Also disable these
+ manipulations if :mod:`site` is explicitly imported later (call
+ :func:`site.main` if you want them to be triggered).
.. cmdoption:: -u
@@ -472,7 +474,7 @@ These environment variables influence Python's behavior.
.. envvar:: PYTHONCASEOK
If this is set, Python ignores case in :keyword:`import` statements. This
- only works on Windows, OS X, and OS/2.
+ only works on Windows and OS X.
.. envvar:: PYTHONDONTWRITEBYTECODE
@@ -484,9 +486,8 @@ These environment variables influence Python's behavior.
.. envvar:: PYTHONHASHSEED
- If this variable is set to ``random``, the effect is the same as specifying
- the :option:`-R` option: a random value is used to seed the hashes of str,
- bytes and datetime objects.
+ If this variable is set to ``random``, a random value is used to seed the
+ hashes of str, bytes and datetime objects.
If :envvar:`PYTHONHASHSEED` is set to an integer value, it is used as a fixed
seed for generating the hash() of the types covered by the hash
@@ -497,8 +498,7 @@ These environment variables influence Python's behavior.
values.
The integer must be a decimal number in the range [0,4294967295]. Specifying
- the value 0 will lead to the same hash values as when hash randomization is
- disabled.
+ the value 0 will disable hash randomization.
.. versionadded:: 3.2.3
@@ -528,8 +528,8 @@ These environment variables influence Python's behavior.
Defines the :data:`user base directory <site.USER_BASE>`, which is used to
compute the path of the :data:`user site-packages directory <site.USER_SITE>`
- and :ref:`Distutils installation paths <inst-alt-install-user>` for ``python
- setup.py install --user``.
+ and :ref:`Packaging installation paths <packaging-alt-install-user>` for
+ ``pysetup run install_dist --user``.
.. seealso::
@@ -548,6 +548,14 @@ These environment variables influence Python's behavior.
separated string, it is equivalent to specifying :option:`-W` multiple
times.
+.. envvar:: PYTHONFAULTHANDLER
+
+ If this environment variable is set, :func:`faulthandler.enable` is called
+ at startup: install a handler for :const:`SIGSEGV`, :const:`SIGFPE`,
+ :const:`SIGABRT`, :const:`SIGBUS` and :const:`SIGILL` signals to dump the
+ Python traceback. This is equivalent to :option:`-X` ``faulthandler``
+ option.
+
Debug-mode variables
~~~~~~~~~~~~~~~~~~~~
diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst
index 0553ec3..99827ff 100644
--- a/Doc/whatsnew/3.2.rst
+++ b/Doc/whatsnew/3.2.rst
@@ -270,7 +270,7 @@ launch of four parallel threads for copying files::
e.submit(shutil.copy, 'src1.txt', 'dest1.txt')
e.submit(shutil.copy, 'src2.txt', 'dest2.txt')
e.submit(shutil.copy, 'src3.txt', 'dest3.txt')
- e.submit(shutil.copy, 'src4.txt', 'dest4.txt')
+ e.submit(shutil.copy, 'src3.txt', 'dest4.txt')
.. seealso::
@@ -2354,7 +2354,7 @@ A number of small performance enhancements have been added:
(Contributed by Antoine Pitrou; :issue:`3001`.)
* The fast-search algorithm in stringlib is now used by the :meth:`split`,
- :meth:`splitlines` and :meth:`replace` methods on
+ :meth:`rsplit`, :meth:`splitlines` and :meth:`replace` methods on
:class:`bytes`, :class:`bytearray` and :class:`str` objects. Likewise, the
algorithm is also used by :meth:`rfind`, :meth:`rindex`, :meth:`rsplit` and
:meth:`rpartition`.
@@ -2471,14 +2471,14 @@ Code Repository
In addition to the existing Subversion code repository at http://svn.python.org
there is now a `Mercurial <http://mercurial.selenic.com/>`_ repository at
-http://hg.python.org/\.
+http://hg.python.org/ .
After the 3.2 release, there are plans to switch to Mercurial as the primary
repository. This distributed version control system should make it easier for
members of the community to create and share external changesets. See
:pep:`385` for details.
-To learn the new version control system, see the `tutorial by Joel
+To learn to use the new version control system, see the `tutorial by Joel
Spolsky <http://hginit.com>`_ or the `Guide to Mercurial Workflows
<http://mercurial.selenic.com/guide/>`_.
diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst
new file mode 100644
index 0000000..ae7b3af
--- /dev/null
+++ b/Doc/whatsnew/3.3.rst
@@ -0,0 +1,1182 @@
+****************************
+ What's New In Python 3.3
+****************************
+
+:Author: Raymond Hettinger
+:Release: |release|
+:Date: |today|
+
+.. Rules for maintenance:
+
+ * Anyone can add text to this document. Do not spend very much time
+ on the wording of your changes, because your text will probably
+ get rewritten to some degree.
+
+ * The maintainer will go through Misc/NEWS periodically and add
+ changes; it's therefore more important to add your changes to
+ Misc/NEWS than to this file.
+
+ * This is not a complete list of every single change; completeness
+ is the purpose of Misc/NEWS. Some changes I consider too small
+ or esoteric to include. If such a change is added to the text,
+ I'll just remove it. (This is another reason you shouldn't spend
+ too much time on writing your addition.)
+
+ * If you want to draw your new text to the attention of the
+ maintainer, add 'XXX' to the beginning of the paragraph or
+ section.
+
+ * It's OK to just add a fragmentary note about a change. For
+ example: "XXX Describe the transmogrify() function added to the
+ socket module." The maintainer will research the change and
+ write the necessary text.
+
+ * You can comment out your additions if you like, but it's not
+ necessary (especially when a final release is some months away).
+
+ * Credit the author of a patch or bugfix. Just the name is
+ sufficient; the e-mail address isn't necessary.
+
+ * It's helpful to add the bug/patch number as a comment:
+
+ XXX Describe the transmogrify() function added to the socket
+ module.
+ (Contributed by P.Y. Developer in :issue:`12345`.)
+
+ This saves the maintainer the effort of going through the Mercurial log
+ when researching a change.
+
+This article explains the new features in Python 3.3, compared to 3.2.
+
+
+.. pep-3118-update:
+
+PEP 3118: New memoryview implementation and buffer protocol documentation
+=========================================================================
+
+:issue:`10181` - memoryview bug fixes and features.
+ Written by Stefan Krah.
+
+The new memoryview implementation comprehensively fixes all ownership and
+lifetime issues of dynamically allocated fields in the Py_buffer struct
+that led to multiple crash reports. Additionally, several functions that
+crashed or returned incorrect results for non-contiguous or multi-dimensional
+input have been fixed.
+
+The memoryview object now has a PEP-3118 compliant getbufferproc()
+that checks the consumer's request type. Many new features have been
+added, most of them work in full generality for non-contiguous arrays
+and arrays with suboffsets.
+
+The documentation has been updated, clearly spelling out responsibilities
+for both exporters and consumers. Buffer request flags are grouped into
+basic and compound flags. The memory layout of non-contiguous and
+multi-dimensional NumPy-style arrays is explained.
+
+Features
+--------
+
+* All native single character format specifiers in struct module syntax
+ (optionally prefixed with '@') are now supported.
+
+* With some restrictions, the cast() method allows changing of format and
+ shape of C-contiguous arrays.
+
+* Multi-dimensional list representations are supported for any array type.
+
+* Multi-dimensional comparisons are supported for any array type.
+
+* All array types are hashable if the exporting object is hashable
+ and the view is read-only. (Contributed by Antoine Pitrou in
+ :issue:`13411`)
+
+
+* Arbitrary slicing of any 1-D arrays type is supported. For example, it
+ is now possible to reverse a memoryview in O(1) by using a negative step.
+
+API changes
+-----------
+
+* The maximum number of dimensions is officially limited to 64.
+
+* The representation of empty shape, strides and suboffsets is now
+ an empty tuple instead of None.
+
+* Accessing a memoryview element with format 'B' (unsigned bytes)
+ now returns an integer (in accordance with the struct module syntax).
+ For returning a bytes object the view must be cast to 'c' first.
+
+* For further changes see `Build and C API Changes`_ and `Porting C code`_ .
+
+.. _pep-393:
+
+PEP 393: Flexible String Representation
+=======================================
+
+The Unicode string type is changed to support multiple internal
+representations, depending on the character with the largest Unicode ordinal
+(1, 2, or 4 bytes) in the represented string. This allows a space-efficient
+representation in common cases, but gives access to full UCS-4 on all
+systems. For compatibility with existing APIs, several representations may
+exist in parallel; over time, this compatibility should be phased out.
+
+On the Python side, there should be no downside to this change.
+
+On the C API side, PEP 393 is fully backward compatible. The legacy API
+should remain available at least five years. Applications using the legacy
+API will not fully benefit of the memory reduction, or - worse - may use
+a bit more memory, because Python may have to maintain two versions of each
+string (in the legacy format and in the new efficient storage).
+
+Functionality
+-------------
+
+Changes introduced by :pep:`393` are the following:
+
+* Python now always supports the full range of Unicode codepoints, including
+ non-BMP ones (i.e. from ``U+0000`` to ``U+10FFFF``). The distinction between
+ narrow and wide builds no longer exists and Python now behaves like a wide
+ build, even under Windows.
+
+* With the death of narrow builds, the problems specific to narrow builds have
+ also been fixed, for example:
+
+ * :func:`len` now always returns 1 for non-BMP characters,
+ so ``len('\U0010FFFF') == 1``;
+
+ * surrogate pairs are not recombined in string literals,
+ so ``'\uDBFF\uDFFF' != '\U0010FFFF'``;
+
+ * indexing or slicing non-BMP characters returns the expected value,
+ so ``'\U0010FFFF'[0]`` now returns ``'\U0010FFFF'`` and not ``'\uDBFF'``;
+
+ * all other functions in the standard library now correctly handle
+ non-BMP codepoints.
+
+* The value of :data:`sys.maxunicode` is now always ``1114111`` (``0x10FFFF``
+ in hexadecimal). The :c:func:`PyUnicode_GetMax` function still returns
+ either ``0xFFFF`` or ``0x10FFFF`` for backward compatibility, and it should
+ not be used with the new Unicode API (see :issue:`13054`).
+
+* The :file:`./configure` flag ``--with-wide-unicode`` has been removed.
+
+Performance and resource usage
+------------------------------
+
+The storage of Unicode strings now depends on the highest codepoint in the string:
+
+* pure ASCII and Latin1 strings (``U+0000-U+00FF``) use 1 byte per codepoint;
+
+* BMP strings (``U+0000-U+FFFF``) use 2 bytes per codepoint;
+
+* non-BMP strings (``U+10000-U+10FFFF``) use 4 bytes per codepoint.
+
+The net effect is that for most applications, memory usage of string
+storage should decrease significantly - especially compared to former
+wide unicode builds - as, in many cases, strings will be pure ASCII
+even in international contexts (because many strings store non-human
+language data, such as XML fragments, HTTP headers, JSON-encoded data,
+etc.). We also hope that it will, for the same reasons, increase CPU
+cache efficiency on non-trivial applications. The memory usage of
+Python 3.3 is two to three times smaller than Python 3.2, and a little
+bit better than Python 2.7, on a Django benchmark (see the PEP for
+details).
+
+
+PEP 3151: Reworking the OS and IO exception hierarchy
+=====================================================
+
+:pep:`3151` - Reworking the OS and IO exception hierarchy
+ PEP written and implemented by Antoine Pitrou.
+
+The hierarchy of exceptions raised by operating system errors is now both
+simplified and finer-grained.
+
+You don't have to worry anymore about choosing the appropriate exception
+type between :exc:`OSError`, :exc:`IOError`, :exc:`EnvironmentError`,
+:exc:`WindowsError`, :exc:`mmap.error`, :exc:`socket.error` or
+:exc:`select.error`. All these exception types are now only one:
+:exc:`OSError`. The other names are kept as aliases for compatibility
+reasons.
+
+Also, it is now easier to catch a specific error condition. Instead of
+inspecting the ``errno`` attribute (or ``args[0]``) for a particular
+constant from the :mod:`errno` module, you can catch the adequate
+:exc:`OSError` subclass. The available subclasses are the following:
+
+* :exc:`BlockingIOError`
+* :exc:`ChildProcessError`
+* :exc:`ConnectionError`
+* :exc:`FileExistsError`
+* :exc:`FileNotFoundError`
+* :exc:`InterruptedError`
+* :exc:`IsADirectoryError`
+* :exc:`NotADirectoryError`
+* :exc:`PermissionError`
+* :exc:`ProcessLookupError`
+* :exc:`TimeoutError`
+
+And the :exc:`ConnectionError` itself has finer-grained subclasses:
+
+* :exc:`BrokenPipeError`
+* :exc:`ConnectionAbortedError`
+* :exc:`ConnectionRefusedError`
+* :exc:`ConnectionResetError`
+
+Thanks to the new exceptions, common usages of the :mod:`errno` can now be
+avoided. For example, the following code written for Python 3.2::
+
+ from errno import ENOENT, EACCES, EPERM
+
+ try:
+ with open("document.txt") as f:
+ content = f.read()
+ except IOError as err:
+ if err.errno == ENOENT:
+ print("document.txt file is missing")
+ elif err.errno in (EACCES, EPERM):
+ print("You are not allowed to read document.txt")
+ else:
+ raise
+
+can now be written without the :mod:`errno` import and without manual
+inspection of exception attributes::
+
+ try:
+ with open("document.txt") as f:
+ content = f.read()
+ except FileNotFoundError:
+ print("document.txt file is missing")
+ except PermissionError:
+ print("You are not allowed to read document.txt")
+
+
+PEP 380: Syntax for Delegating to a Subgenerator
+================================================
+
+:pep:`380` - Syntax for Delegating to a Subgenerator
+ PEP written by Greg Ewing.
+
+PEP 380 adds the ``yield from`` expression, allowing a generator to delegate
+part of its operations to another generator. This allows a section of code
+containing 'yield' to be factored out and placed in another generator.
+Additionally, the subgenerator is allowed to return with a value, and the
+value is made available to the delegating generator.
+
+While designed primarily for use in delegating to a subgenerator, the ``yield
+from`` expression actually allows delegation to arbitrary subiterators.
+
+For simple iterators, ``yield from iterable`` is essentially just a shortened
+form of ``for item in iterable: yield item``::
+
+ >>> def g(x):
+ ... yield from range(x, 0, -1)
+ ... yield from range(x)
+ ...
+ >>> list(g(5))
+ [5, 4, 3, 2, 1, 0, 1, 2, 3, 4]
+
+However, unlike an ordinary loop, ``yield from`` allows subgenerators to
+receive sent and thrown values directly from the calling scope, and
+return a final value to the outer generator::
+
+ >>> def accumulate(start=0):
+ ... tally = start
+ ... while 1:
+ ... next = yield
+ ... if next is None:
+ ... return tally
+ ... tally += next
+ ...
+ >>> def gather_tallies(tallies, start=0):
+ ... while 1:
+ ... tally = yield from accumulate()
+ ... tallies.append(tally)
+ ...
+ >>> tallies = []
+ >>> acc = gather_tallies(tallies)
+ >>> next(acc) # Ensure the accumulator is ready to accept values
+ >>> for i in range(10):
+ ... acc.send(i)
+ ...
+ >>> acc.send(None) # Finish the first tally
+ >>> for i in range(5):
+ ... acc.send(i)
+ ...
+ >>> acc.send(None) # Finish the second tally
+ >>> tallies
+ [45, 10]
+
+The main principle driving this change is to allow even generators that are
+designed to be used with the ``send`` and ``throw`` methods to be split into
+multiple subgenerators as easily as a single large function can be split into
+multiple subfunctions.
+
+(Implementation by Greg Ewing, integrated into 3.3 by Renaud Blanch, Ryan
+Kelly and Nick Coghlan, documentation by Zbigniew Jędrzejewski-Szmek and
+Nick Coghlan)
+
+
+PEP 409: Suppressing exception context
+======================================
+
+:pep:`409` - Suppressing exception context
+ PEP written by Ethan Furman, implemented by Ethan Furman and Nick Coghlan.
+
+PEP 409 introduces new syntax that allows the display of the chained
+exception context to be disabled. This allows cleaner error messages in
+applications that convert between exception types::
+
+ >>> class D:
+ ... def __init__(self, extra):
+ ... self._extra_attributes = extra
+ ... def __getattr__(self, attr):
+ ... try:
+ ... return self._extra_attributes[attr]
+ ... except KeyError:
+ ... raise AttributeError(attr) from None
+ ...
+ >>> D({}).x
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "<stdin>", line 8, in __getattr__
+ AttributeError: x
+
+Without the ``from None`` suffix to suppress the cause, the original
+exception would be displayed by default::
+
+ >>> class C:
+ ... def __init__(self, extra):
+ ... self._extra_attributes = extra
+ ... def __getattr__(self, attr):
+ ... try:
+ ... return self._extra_attributes[attr]
+ ... except KeyError:
+ ... raise AttributeError(attr)
+ ...
+ >>> C({}).x
+ Traceback (most recent call last):
+ File "<stdin>", line 6, in __getattr__
+ KeyError: 'x'
+
+ During handling of the above exception, another exception occurred:
+
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "<stdin>", line 8, in __getattr__
+ AttributeError: x
+
+No debugging capability is lost, as the original exception context remains
+available if needed (for example, if an intervening library has incorrectly
+suppressed valuable underlying details)::
+
+ >>> try:
+ ... D({}).x
+ ... except AttributeError as exc:
+ ... print(repr(exc.__context__))
+ ...
+ KeyError('x',)
+
+
+PEP 414: Explicit Unicode literals
+======================================
+
+:pep:`414` - Explicit Unicode literals
+ PEP written by Armin Ronacher.
+
+To ease the transition from Python 2 for Unicode aware Python applications
+that make heavy use of Unicode literals, Python 3.3 once again supports the
+"``u``" prefix for string literals. This prefix has no semantic significance
+in Python 3, it is provided solely to reduce the number of purely mechanical
+changes in migrating to Python 3, making it easier for developers to focus on
+the more significant semantic changes (such as the stricter default
+separation of binary and text data).
+
+
+PEP 3155: Qualified name for classes and functions
+==================================================
+
+:pep:`3155` - Qualified name for classes and functions
+ PEP written and implemented by Antoine Pitrou.
+
+Functions and class objects have a new ``__qualname__`` attribute representing
+the "path" from the module top-level to their definition. For global functions
+and classes, this is the same as ``__name__``. For other functions and classes,
+it provides better information about where they were actually defined, and
+how they might be accessible from the global scope.
+
+Example with (non-bound) methods::
+
+ >>> class C:
+ ... def meth(self):
+ ... pass
+ >>> C.meth.__name__
+ 'meth'
+ >>> C.meth.__qualname__
+ 'C.meth'
+
+Example with nested classes::
+
+ >>> class C:
+ ... class D:
+ ... def meth(self):
+ ... pass
+ ...
+ >>> C.D.__name__
+ 'D'
+ >>> C.D.__qualname__
+ 'C.D'
+ >>> C.D.meth.__name__
+ 'meth'
+ >>> C.D.meth.__qualname__
+ 'C.D.meth'
+
+Example with nested functions::
+
+ >>> def outer():
+ ... def inner():
+ ... pass
+ ... return inner
+ ...
+ >>> outer().__name__
+ 'inner'
+ >>> outer().__qualname__
+ 'outer.<locals>.inner'
+
+The string representation of those objects is also changed to include the
+new, more precise information::
+
+ >>> str(C.D)
+ "<class '__main__.C.D'>"
+ >>> str(C.D.meth)
+ '<function C.D.meth at 0x7f46b9fe31e0>'
+
+
+Other Language Changes
+======================
+
+Some smaller changes made to the core Python language are:
+
+* Added support for Unicode name aliases and named sequences.
+ Both :func:`unicodedata.lookup()` and ``'\N{...}'`` now resolve name aliases,
+ and :func:`unicodedata.lookup()` resolves named sequences too.
+
+ (Contributed by Ezio Melotti in :issue:`12753`)
+
+* Equality comparisons on :func:`range` objects now return a result reflecting
+ the equality of the underlying sequences generated by those range objects.
+
+ (:issue:`13201`)
+
+* The ``count()``, ``find()``, ``rfind()``, ``index()`` and ``rindex()``
+ methods of :class:`bytes` and :class:`bytearray` objects now accept an
+ integer between 0 and 255 as their first argument.
+
+ (:issue:`12170`)
+
+* A dict lookup now raises a :exc:`RuntimeError` if the dict is modified during
+ the lookup. If you implement your own comparison function for objects used
+ as dict keys and the dict is shared by multiple threads, access to the dict
+ should be protected by a lock.
+
+ (:issue:`14205`)
+
+
+New and Improved Modules
+========================
+
+abc
+---
+
+Improved support for abstract base classes containing descriptors composed with
+abstract methods. The recommended approach to declaring abstract descriptors is
+now to provide :attr:`__isabstractmethod__` as a dynamically updated
+property. The built-in descriptors have been updated accordingly.
+
+ * :class:`abc.abstractproperty` has been deprecated, use :class:`property`
+ with :func:`abc.abstractmethod` instead.
+ * :class:`abc.abstractclassmethod` has been deprecated, use
+ :class:`classmethod` with :func:`abc.abstractmethod` instead.
+ * :class:`abc.abstractstaticmethod` has been deprecated, use
+ :class:`staticmethod` with :func:`abc.abstractmethod` instead.
+
+(Contributed by Darren Dale in :issue:`11610`)
+
+array
+-----
+
+The :mod:`array` module supports the :c:type:`long long` type using ``q`` and
+``Q`` type codes.
+
+(Contributed by Oren Tirosh and Hirokazu Yamamoto in :issue:`1172711`)
+
+
+bz2
+---
+
+The :mod:`bz2` module has been rewritten from scratch. In the process, several
+new features have been added:
+
+* :class:`bz2.BZ2File` can now read from and write to arbitrary file-like
+ objects, by means of its constructor's *fileobj* argument.
+
+ (Contributed by Nadeem Vawda in :issue:`5863`)
+
+* :class:`bz2.BZ2File` and :func:`bz2.decompress` can now decompress
+ multi-stream inputs (such as those produced by the :program:`pbzip2` tool).
+ :class:`bz2.BZ2File` can now also be used to create this type of file, using
+ the ``'a'`` (append) mode.
+
+ (Contributed by Nir Aides in :issue:`1625`)
+
+* :class:`bz2.BZ2File` now implements all of the :class:`io.BufferedIOBase` API,
+ except for the :meth:`detach` and :meth:`truncate` methods.
+
+
+codecs
+------
+
+The :mod:`~encodings.mbcs` codec has been rewritten to handle correctly
+``replace`` and ``ignore`` error handlers on all Windows versions. The
+:mod:`~encodings.mbcs` codec now supports all error handlers, instead of only
+``replace`` to encode and ``ignore`` to decode.
+
+A new Windows-only codec has been added: ``cp65001`` (:issue:`13216`). It is the
+Windows code page 65001 (Windows UTF-8, ``CP_UTF8``). For example, it is used
+by ``sys.stdout`` if the console output code page is set to cp65001 (e.g., using
+``chcp 65001`` command).
+
+Multibyte CJK decoders now resynchronize faster. They only ignore the first
+byte of an invalid byte sequence. For example, ``b'\xff\n'.decode('gb2312',
+'replace')`` now returns a ``\n`` after the replacement character.
+
+(:issue:`12016`)
+
+Incremental CJK codec encoders are no longer reset at each call to their
+encode() methods. For example::
+
+ $ ./python -q
+ >>> import codecs
+ >>> encoder = codecs.getincrementalencoder('hz')('strict')
+ >>> b''.join(encoder.encode(x) for x in '\u52ff\u65bd\u65bc\u4eba\u3002 Bye.')
+ b'~{NpJ)l6HK!#~} Bye.'
+
+This example gives ``b'~{Np~}~{J)~}~{l6~}~{HK~}~{!#~} Bye.'`` with older Python
+versions.
+
+(:issue:`12100`)
+
+The ``unicode_internal`` codec has been deprecated.
+
+crypt
+-----
+
+Addition of salt and modular crypt format and the :func:`~crypt.mksalt`
+function to the :mod:`crypt` module.
+
+(:issue:`10924`)
+
+curses
+------
+
+ * If the :mod:`curses` module is linked to the ncursesw library, use Unicode
+ functions when Unicode strings or characters are passed (e.g.
+ :c:func:`waddwstr`), and bytes functions otherwise (e.g. :c:func:`waddstr`).
+ * Use the locale encoding instead of ``utf-8`` to encode Unicode strings.
+ * :class:`curses.window` has a new :attr:`curses.window.encoding` attribute.
+ * The :class:`curses.window` class has a new :meth:`~curses.window.get_wch`
+ method to get a wide character
+ * The :mod:`curses` module has a new :meth:`~curses.unget_wch` function to
+ push a wide character so the next :meth:`~curses.window.get_wch` will return
+ it
+
+(Contributed by Iñigo Serna in :issue:`6755`)
+
+faulthandler
+------------
+
+New module: :mod:`faulthandler`.
+
+ * :envvar:`PYTHONFAULTHANDLER`
+ * :option:`-X` ``faulthandler``
+
+ftplib
+------
+
+The :class:`~ftplib.FTP_TLS` class now provides a new
+:func:`~ftplib.FTP_TLS.ccc` function to revert control channel back to
+plaintext. This can be useful to take advantage of firewalls that know how to
+handle NAT with non-secure FTP without opening fixed ports.
+
+(Contributed by Giampaolo Rodolà in :issue:`12139`)
+
+
+imaplib
+-------
+
+The :class:`~imaplib.IMAP4_SSL` constructor now accepts an SSLContext
+parameter to control parameters of the secure channel.
+
+(Contributed by Sijin Joseph in :issue:`8808`)
+
+
+io
+--
+
+The :func:`~io.open` function has a new ``'x'`` mode that can be used to
+exclusively create a new file, and raise a :exc:`FileExistsError` if the file
+already exists. It is based on the C11 'x' mode to fopen().
+
+(Contributed by David Townshend in :issue:`12760`)
+
+
+lzma
+----
+
+The newly-added :mod:`lzma` module provides data compression and decompression
+using the LZMA algorithm, including support for the ``.xz`` and ``.lzma``
+file formats.
+
+(Contributed by Nadeem Vawda and Per Øyvind Karlsen in :issue:`6715`)
+
+
+math
+----
+
+The :mod:`math` module has a new function:
+
+ * :func:`~math.log2`: return the base-2 logarithm of *x*
+ (Written by Mark Dickinson in :issue:`11888`).
+
+
+nntplib
+-------
+
+The :class:`nntplib.NNTP` class now supports the context manager protocol to
+unconditionally consume :exc:`socket.error` exceptions and to close the NNTP
+connection when done::
+
+ >>> from nntplib import NNTP
+ >>> with NNTP('news.gmane.org') as n:
+ ... n.group('gmane.comp.python.committers')
+ ...
+ ('211 1755 1 1755 gmane.comp.python.committers', 1755, 1, 1755, 'gmane.comp.python.committers')
+ >>>
+
+(Contributed by Giampaolo Rodolà in :issue:`9795`)
+
+
+os
+--
+
+* The :mod:`os` module has a new :func:`~os.pipe2` function that makes it
+ possible to create a pipe with :data:`~os.O_CLOEXEC` or
+ :data:`~os.O_NONBLOCK` flags set atomically. This is especially useful to
+ avoid race conditions in multi-threaded programs.
+
+* The :mod:`os` module has a new :func:`~os.sendfile` function which provides
+ an efficent "zero-copy" way for copying data from one file (or socket)
+ descriptor to another. The phrase "zero-copy" refers to the fact that all of
+ the copying of data between the two descriptors is done entirely by the
+ kernel, with no copying of data into userspace buffers. :func:`~os.sendfile`
+ can be used to efficiently copy data from a file on disk to a network socket,
+ e.g. for downloading a file.
+
+ (Patch submitted by Ross Lagerwall and Giampaolo Rodolà in :issue:`10882`.)
+
+* The :mod:`os` module has two new functions: :func:`~os.getpriority` and
+ :func:`~os.setpriority`. They can be used to get or set process
+ niceness/priority in a fashion similar to :func:`os.nice` but extended to all
+ processes instead of just the current one.
+
+ (Patch submitted by Giampaolo Rodolà in :issue:`10784`.)
+
+* The :mod:`os` module has a new :func:`~os.fwalk` function similar to
+ :func:`~os.walk` except that it also yields file descriptors referring to the
+ directories visited. This is especially useful to avoid symlink races.
+
+* "at" functions (:issue:`4761`):
+
+ * :func:`~os.faccessat`
+ * :func:`~os.fchmodat`
+ * :func:`~os.fchownat`
+ * :func:`~os.fstatat`
+ * :func:`~os.futimesat`
+ * :func:`~os.linkat`
+ * :func:`~os.mkdirat`
+ * :func:`~os.mkfifoat`
+ * :func:`~os.mknodat`
+ * :func:`~os.openat`
+ * :func:`~os.readlinkat`
+ * :func:`~os.renameat`
+ * :func:`~os.symlinkat`
+ * :func:`~os.unlinkat`
+ * :func:`~os.utimensat`
+
+* extended attributes (:issue:`12720`):
+
+ * :func:`~os.fgetxattr`
+ * :func:`~os.flistxattr`
+ * :func:`~os.fremovexattr`
+ * :func:`~os.fsetxattr`
+ * :func:`~os.getxattr`
+ * :func:`~os.lgetxattr`
+ * :func:`~os.listxattr`
+ * :func:`~os.llistxattr`
+ * :func:`~os.lremovexattr`
+ * :func:`~os.lsetxattr`
+ * :func:`~os.removexattr`
+ * :func:`~os.setxattr`
+
+* Scheduler functions (:issue:`12655`):
+
+ * :func:`~os.sched_get_priority_max`
+ * :func:`~os.sched_get_priority_min`
+ * :func:`~os.sched_getaffinity`
+ * :func:`~os.sched_getparam`
+ * :func:`~os.sched_getscheduler`
+ * :func:`~os.sched_rr_get_interval`
+ * :func:`~os.sched_setaffinity`
+ * :func:`~os.sched_setparam`
+ * :func:`~os.sched_setscheduler`
+ * :func:`~os.sched_yield`
+
+* Add some extra posix functions to the os module (:issue:`10812`):
+
+ * :func:`~os.fexecve`
+ * :func:`~os.futimens`
+ * :func:`~os.futimes`
+ * :func:`~os.lockf`
+ * :func:`~os.lutimes`
+ * :func:`~os.posix_fadvise`
+ * :func:`~os.posix_fallocate`
+ * :func:`~os.pread`
+ * :func:`~os.pwrite`
+ * :func:`~os.readv`
+ * :func:`~os.sync`
+ * :func:`~os.truncate`
+ * :func:`~os.waitid`
+ * :func:`~os.writev`
+
+* Other new functions:
+
+ * :func:`~os.flistdir` (:issue:`10755`)
+ * :func:`~os.getgrouplist` (:issue:`9344`)
+
+
+packaging
+---------
+
+:mod:`distutils` has undergone additions and refactoring under a new name,
+:mod:`packaging`, to allow developers to break backward compatibility.
+:mod:`distutils` is still provided in the standard library, but users are
+encouraged to transition to :mod:`packaging`. For older versions of Python, a
+backport compatible with 2.4+ and 3.1+ will be made available on PyPI under the
+name :mod:`distutils2`.
+
+.. TODO add examples and howto to the packaging docs and link to them
+
+
+pdb
+---
+
+* Tab-completion is now available not only for command names, but also their
+ arguments. For example, for the ``break`` command, function and file names
+ are completed. (Contributed by Georg Brandl in :issue:`14210`)
+
+
+pydoc
+-----
+
+The Tk GUI and the :func:`~pydoc.serve` function have been removed from the
+:mod:`pydoc` module: ``pydoc -g`` and :func:`~pydoc.serve` have been deprecated
+in Python 3.2.
+
+
+sched
+-----
+
+* :meth:`~sched.scheduler.run` now accepts a *blocking* parameter which when
+ set to False makes the method execute the scheduled events due to expire
+ soonest (if any) and then return immediately.
+ This is useful in case you want to use the :class:`~sched.scheduler` in
+ non-blocking applications. (Contributed by Giampaolo Rodolà in :issue:`13449`)
+
+* :class:`~sched.scheduler` class can now be safely used in multi-threaded
+ environments. (Contributed by Josiah Carlson and Giampaolo Rodolà in
+ :issue:`8684`)
+
+* *timefunc* and *delayfunct* parameters of :class:`~sched.scheduler` class
+ constructor are now optional and defaults to :func:`time.time` and
+ :func:`time.sleep` respectively. (Contributed by Chris Clark in
+ :issue:`13245`)
+
+* :meth:`~sched.scheduler.enter` and :meth:`~sched.scheduler.enterabs`
+ *argument* parameter is now optional. (Contributed by Chris Clark in
+ :issue:`13245`)
+
+* :meth:`~sched.scheduler.enter` and :meth:`~sched.scheduler.enterabs`
+ now accept a *kwargs* parameter. (Contributed by Chris Clark in
+ :issue:`13245`)
+
+
+shutil
+------
+
+* The :mod:`shutil` module has these new fuctions:
+
+ * :func:`~shutil.disk_usage`: provides total, used and free disk space
+ statistics. (Contributed by Giampaolo Rodolà in :issue:`12442`)
+ * :func:`~shutil.chown`: allows one to change user and/or group of the given
+ path also specifying the user/group names and not only their numeric
+ ids. (Contributed by Sandro Tosi in :issue:`12191`)
+
+
+signal
+------
+
+* The :mod:`signal` module has new functions:
+
+ * :func:`~signal.pthread_sigmask`: fetch and/or change the signal mask of the
+ calling thread (Contributed by Jean-Paul Calderone in :issue:`8407`) ;
+ * :func:`~signal.pthread_kill`: send a signal to a thread ;
+ * :func:`~signal.sigpending`: examine pending functions ;
+ * :func:`~signal.sigwait`: wait a signal.
+ * :func:`~signal.sigwaitinfo`: wait for a signal, returning detailed
+ information about it.
+ * :func:`~signal.sigtimedwait`: like :func:`~signal.sigwaitinfo` but with a
+ timeout.
+
+* The signal handler writes the signal number as a single byte instead of
+ a nul byte into the wakeup file descriptor. So it is possible to wait more
+ than one signal and know which signals were raised.
+
+* :func:`signal.signal` and :func:`signal.siginterrupt` raise an OSError,
+ instead of a RuntimeError: OSError has an errno attribute.
+
+smtplib
+-------
+
+The :class:`~smtplib.SMTP_SSL` constructor and the :meth:`~smtplib.SMTP.starttls`
+method now accept an SSLContext parameter to control parameters of the secure
+channel.
+
+(Contributed by Kasun Herath in :issue:`8809`)
+
+
+socket
+------
+
+* The :class:`~socket.socket` class now exposes additional methods to process
+ ancillary data when supported by the underlying platform:
+
+ * :func:`~socket.socket.sendmsg`
+ * :func:`~socket.socket.recvmsg`
+ * :func:`~socket.socket.recvmsg_into`
+
+ (Contributed by David Watson in :issue:`6560`, based on an earlier patch by
+ Heiko Wundram)
+
+* The :class:`~socket.socket` class now supports the PF_CAN protocol family
+ (http://en.wikipedia.org/wiki/Socketcan), on Linux
+ (http://lwn.net/Articles/253425).
+
+ (Contributed by Matthias Fuchs, updated by Tiago Gonçalves in :issue:`10141`)
+
+* The :class:`~socket.socket` class now supports the PF_RDS protocol family
+ (http://en.wikipedia.org/wiki/Reliable_Datagram_Sockets and
+ http://oss.oracle.com/projects/rds/).
+
+
+ssl
+---
+
+* The :mod:`ssl` module has two new random generation functions:
+
+ * :func:`~ssl.RAND_bytes`: generate cryptographically strong
+ pseudo-random bytes.
+ * :func:`~ssl.RAND_pseudo_bytes`: generate pseudo-random bytes.
+
+ (Contributed by Victor Stinner in :issue:`12049`)
+
+* The :mod:`ssl` module now exposes a finer-grained exception hierarchy
+ in order to make it easier to inspect the various kinds of errors.
+
+ (Contributed by Antoine Pitrou in :issue:`11183`)
+
+* :meth:`~ssl.SSLContext.load_cert_chain` now accepts a *password* argument
+ to be used if the private key is encrypted.
+
+ (Contributed by Adam Simpkins in :issue:`12803`)
+
+* Diffie-Hellman key exchange, both regular and Elliptic Curve-based, is
+ now supported through the :meth:`~ssl.SSLContext.load_dh_params` and
+ :meth:`~ssl.SSLContext.set_ecdh_curve` methods.
+
+ (Contributed by Antoine Pitrou in :issue:`13626` and :issue:`13627`)
+
+* SSL sockets have a new :meth:`~ssl.SSLSocket.get_channel_binding` method
+ allowing the implementation of certain authentication mechanisms such as
+ SCRAM-SHA-1-PLUS.
+
+ (Contributed by Jacek Konieczny in :issue:`12551`)
+
+* You can query the SSL compression algorithm used by an SSL socket, thanks
+ to its new :meth:`~ssl.SSLSocket.compression` method.
+
+ (Contributed by Antoine Pitrou in :issue:`13634`)
+
+
+sys
+---
+
+* The :mod:`sys` module has a new :data:`~sys.thread_info` :term:`struct
+ sequence` holding informations about the thread implementation.
+
+ (:issue:`11223`)
+
+
+time
+----
+
+The :mod:`time` module has new functions:
+
+* :func:`~time.clock_getres` and :func:`~time.clock_gettime` functions and
+ ``CLOCK_xxx`` constants.
+* :func:`~time.steady`.
+
+(Contributed by Victor Stinner in :issue:`10278`)
+
+
+urllib
+------
+
+The :class:`~urllib.request.Request` class, now accepts a *method* argument
+used by :meth:`~urllib.request.Request.get_method` to determine what HTTP method
+should be used. For example, this will send a ``'HEAD'`` request::
+
+ >>> urlopen(Request('http://www.python.org', method='HEAD'))
+
+(:issue:`1673007`)
+
+
+Optimizations
+=============
+
+Major performance enhancements have been added:
+
+* Thanks to the :pep:`393`, some operations on Unicode strings has been optimized:
+
+ * the memory footprint is divided by 2 to 4 depending on the text
+ * encode an ASCII string to UTF-8 doesn't need to encode characters anymore,
+ the UTF-8 representation is shared with the ASCII representation
+ * the UTF-8 encoder has been optimized
+ * repeating a single ASCII letter and getting a substring of a ASCII strings
+ is 4 times faster
+
+
+Build and C API Changes
+=======================
+
+Changes to Python's build process and to the C API include:
+
+* New :pep:`3118` related function:
+
+ * :c:func:`PyMemoryView_FromMemory`
+
+* The :pep:`393` added new Unicode types, macros and functions:
+
+ * High-level API:
+
+ * :c:func:`PyUnicode_CopyCharacters`
+ * :c:func:`PyUnicode_FindChar`
+ * :c:func:`PyUnicode_GetLength`, :c:macro:`PyUnicode_GET_LENGTH`
+ * :c:func:`PyUnicode_New`
+ * :c:func:`PyUnicode_Substring`
+ * :c:func:`PyUnicode_ReadChar`, :c:func:`PyUnicode_WriteChar`
+
+ * Low-level API:
+
+ * :c:type:`Py_UCS1`, :c:type:`Py_UCS2`, :c:type:`Py_UCS4` types
+ * :c:type:`PyASCIIObject` and :c:type:`PyCompactUnicodeObject` structures
+ * :c:macro:`PyUnicode_READY`
+ * :c:func:`PyUnicode_FromKindAndData`
+ * :c:func:`PyUnicode_AsUCS4`, :c:func:`PyUnicode_AsUCS4Copy`
+ * :c:macro:`PyUnicode_DATA`, :c:macro:`PyUnicode_1BYTE_DATA`,
+ :c:macro:`PyUnicode_2BYTE_DATA`, :c:macro:`PyUnicode_4BYTE_DATA`
+ * :c:macro:`PyUnicode_KIND` with :c:type:`PyUnicode_Kind` enum:
+ :c:data:`PyUnicode_WCHAR_KIND`, :c:data:`PyUnicode_1BYTE_KIND`,
+ :c:data:`PyUnicode_2BYTE_KIND`, :c:data:`PyUnicode_4BYTE_KIND`
+ * :c:macro:`PyUnicode_READ`, :c:macro:`PyUnicode_READ_CHAR`, :c:macro:`PyUnicode_WRITE`
+ * :c:macro:`PyUnicode_MAX_CHAR_VALUE`
+
+
+
+Deprecated
+==========
+
+Unsupported Operating Systems
+-----------------------------
+
+OS/2 and VMS are no longer supported due to the lack of a maintainer.
+
+Windows 2000 and Windows platforms which set ``COMSPEC`` to ``command.com``
+are no longer supported due to maintenance burden.
+
+
+Deprecated Python modules, functions and methods
+------------------------------------------------
+
+* The :mod:`packaging` module replaces the :mod:`distutils` module
+* The ``unicode_internal`` codec has been deprecated because of the
+ :pep:`393`, use UTF-8, UTF-16 (``utf-16-le`` or ``utf-16-be``), or UTF-32
+ (``utf-32-le`` or ``utf-32-be``)
+* :meth:`ftplib.FTP.nlst` and :meth:`ftplib.FTP.dir`: use
+ :meth:`ftplib.FTP.mlsd`
+* :func:`platform.popen`: use the :mod:`subprocess` module. Check especially
+ the :ref:`subprocess-replacements` section.
+* :issue:`13374`: The Windows bytes API has been deprecated in the :mod:`os`
+ module. Use Unicode filenames, instead of bytes filenames, to not depend on
+ the ANSI code page anymore and to support any filename.
+* :issue:`13988`: The :mod:`xml.etree.cElementTree` module is deprecated. The
+ accelerator is used automatically whenever available.
+
+
+Deprecated functions and types of the C API
+-------------------------------------------
+
+The :c:type:`Py_UNICODE` has been deprecated by the :pep:`393` and will be
+removed in Python 4. All functions using this type are deprecated:
+
+Unicode functions and methods using :c:type:`Py_UNICODE` and
+:c:type:`Py_UNICODE*` types:
+
+ * :c:macro:`PyUnicode_FromUnicode`: use :c:func:`PyUnicode_FromWideChar` or
+ :c:func:`PyUnicode_FromKindAndData`
+ * :c:macro:`PyUnicode_AS_UNICODE`, :c:func:`PyUnicode_AsUnicode`,
+ :c:func:`PyUnicode_AsUnicodeAndSize`: use :c:func:`PyUnicode_AsWideCharString`
+ * :c:macro:`PyUnicode_AS_DATA`: use :c:macro:`PyUnicode_DATA` with
+ :c:macro:`PyUnicode_READ` and :c:macro:`PyUnicode_WRITE`
+ * :c:macro:`PyUnicode_GET_SIZE`, :c:func:`PyUnicode_GetSize`: use
+ :c:macro:`PyUnicode_GET_LENGTH` or :c:func:`PyUnicode_GetLength`
+ * :c:macro:`PyUnicode_GET_DATA_SIZE`: use
+ ``PyUnicode_GET_LENGTH(str) * PyUnicode_KIND(str)`` (only work on ready
+ strings)
+ * :c:func:`PyUnicode_AsUnicodeCopy`: use :c:func:`PyUnicode_AsUCS4Copy` or
+ :c:func:`PyUnicode_AsWideCharString`
+ * :c:func:`PyUnicode_GetMax`
+
+
+Functions and macros manipulating Py_UNICODE* strings:
+
+ * :c:macro:`Py_UNICODE_strlen`: use :c:func:`PyUnicode_GetLength` or
+ :c:macro:`PyUnicode_GET_LENGTH`
+ * :c:macro:`Py_UNICODE_strcat`: use :c:func:`PyUnicode_CopyCharacters` or
+ :c:func:`PyUnicode_FromFormat`
+ * :c:macro:`Py_UNICODE_strcpy`, :c:macro:`Py_UNICODE_strncpy`,
+ :c:macro:`Py_UNICODE_COPY`: use :c:func:`PyUnicode_CopyCharacters` or
+ :c:func:`PyUnicode_Substring`
+ * :c:macro:`Py_UNICODE_strcmp`: use :c:func:`PyUnicode_Compare`
+ * :c:macro:`Py_UNICODE_strncmp`: use :c:func:`PyUnicode_Tailmatch`
+ * :c:macro:`Py_UNICODE_strchr`, :c:macro:`Py_UNICODE_strrchr`: use
+ :c:func:`PyUnicode_FindChar`
+ * :c:macro:`Py_UNICODE_FILL`: use :c:func:`PyUnicode_Fill`
+ * :c:macro:`Py_UNICODE_MATCH`
+
+Encoders:
+
+ * :c:func:`PyUnicode_Encode`: use :c:func:`PyUnicode_AsEncodedObject`
+ * :c:func:`PyUnicode_EncodeUTF7`
+ * :c:func:`PyUnicode_EncodeUTF8`: use :c:func:`PyUnicode_AsUTF8` or
+ :c:func:`PyUnicode_AsUTF8String`
+ * :c:func:`PyUnicode_EncodeUTF32`
+ * :c:func:`PyUnicode_EncodeUTF16`
+ * :c:func:`PyUnicode_EncodeUnicodeEscape:` use
+ :c:func:`PyUnicode_AsUnicodeEscapeString`
+ * :c:func:`PyUnicode_EncodeRawUnicodeEscape:` use
+ :c:func:`PyUnicode_AsRawUnicodeEscapeString`
+ * :c:func:`PyUnicode_EncodeLatin1`: use :c:func:`PyUnicode_AsLatin1String`
+ * :c:func:`PyUnicode_EncodeASCII`: use :c:func:`PyUnicode_AsASCIIString`
+ * :c:func:`PyUnicode_EncodeCharmap`
+ * :c:func:`PyUnicode_TranslateCharmap`
+ * :c:func:`PyUnicode_EncodeMBCS`: use :c:func:`PyUnicode_AsMBCSString` or
+ :c:func:`PyUnicode_EncodeCodePage` (with ``CP_ACP`` code_page)
+ * :c:func:`PyUnicode_EncodeDecimal`,
+ :c:func:`PyUnicode_TransformDecimalToASCII`
+
+
+Porting to Python 3.3
+=====================
+
+This section lists previously described changes and other bugfixes
+that may require changes to your code.
+
+Porting Python code
+-------------------
+
+.. XXX add a point about hash randomization and that it's always on in 3.3
+
+* :issue:`14205`: A dict lookup now raises a :exc:`RuntimeError` if the dict is
+ modified during the lookup. If you implement your own comparison function for
+ objects used as dict keys and the dict is shared by multiple threads, access
+ to the dict should be protected by a lock.
+
+* :issue:`12326`: On Linux, sys.platform doesn't contain the major version
+ anymore. It is now always 'linux', instead of 'linux2' or 'linux3' depending
+ on the Linux version used to build Python. Replace sys.platform == 'linux2'
+ with sys.platform.startswith('linux'), or directly sys.platform == 'linux' if
+ you don't need to support older Python versions.
+
+* :issue:`13847`, :issue:`14180`: :mod:`time` and :mod:`datetime`:
+ :exc:`OverflowError` is now raised instead of :exc:`ValueError` if a
+ timestamp is out of range. :exc:`OSError` is now raised if C functions
+ :c:func:`gmtime` or :c:func:`localtime` failed.
+
+Porting C code
+--------------
+
+* In the course of changes to the buffer API the undocumented
+ :c:member:`~Py_buffer.smalltable` member of the
+ :c:type:`Py_buffer` structure has been removed and the
+ layout of the :c:type:`PyMemoryViewObject` has changed.
+
+ All extensions relying on the relevant parts in ``memoryobject.h``
+ or ``object.h`` must be rebuilt.
+
+* Due to :ref:`PEP 393 <pep-393>`, the :c:type:`Py_UNICODE` type and all
+ functions using this type are deprecated (but will stay available for
+ at least five years). If you were using low-level Unicode APIs to
+ construct and access unicode objects and you want to benefit of the
+ memory footprint reduction provided by the PEP 393, you have to convert
+ your code to the new :doc:`Unicode API <../c-api/unicode>`.
+
+ However, if you only have been using high-level functions such as
+ :c:func:`PyUnicode_Concat()`, :c:func:`PyUnicode_Join` or
+ :c:func:`PyUnicode_FromFormat()`, your code will automatically take
+ advantage of the new unicode representations.
+
+Building C extensions
+---------------------
+
+* The range of possible file names for C extensions has been narrowed.
+ Very rarely used spellings have been suppressed: under POSIX, files
+ named ``xxxmodule.so``, ``xxxmodule.abi3.so`` and
+ ``xxxmodule.cpython-*.so`` are no longer recognized as implementing
+ the ``xxx`` module. If you had been generating such files, you have
+ to switch to the other spellings (i.e., remove the ``module`` string
+ from the file names).
+
+ (implemented in :issue:`14040`.)
+
+
+Other issues
+------------
+
+.. Issue #11591: When :program:`python` was started with :option:`-S`,
+ ``import site`` will not add site-specific paths to the module search
+ paths. In previous versions, it did. See changeset for doc changes in
+ various files. Contributed by Carl Meyer with editions by Éric Araujo.
+
+.. Issue #10998: the -Q command-line flag and related artifacts have been
+ removed. Code checking sys.flags.division_warning will need updating.
+ Contributed by Éric Araujo.
diff --git a/Doc/whatsnew/index.rst b/Doc/whatsnew/index.rst
index 8220bd2..c60818a 100644
--- a/Doc/whatsnew/index.rst
+++ b/Doc/whatsnew/index.rst
@@ -11,6 +11,7 @@ anyone wishing to stay up-to-date after a new release.
.. toctree::
:maxdepth: 2
+ 3.3.rst
3.2.rst
3.1.rst
3.0.rst
diff --git a/Grammar/Grammar b/Grammar/Grammar
index cea68de..d7aaffd 100644
--- a/Grammar/Grammar
+++ b/Grammar/Grammar
@@ -13,7 +13,7 @@
# Start symbols for the grammar:
# single_input is a single interactive statement;
# file_input is a module or sequence of commands read from an input file;
-# eval_input is the input for the eval() and input() functions.
+# eval_input is the input for the eval() functions.
# NB: compound_stmt in single_input is followed by extra NEWLINE!
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
file_input: (NEWLINE | stmt)* ENDMARKER
@@ -129,4 +129,5 @@ comp_if: 'if' test_nocond [comp_iter]
# not used in grammar, but may appear in "node" passed from Parser to Compiler
encoding_decl: NAME
-yield_expr: 'yield' [testlist]
+yield_expr: 'yield' [yield_arg]
+yield_arg: 'from' test | testlist
diff --git a/Include/Python-ast.h b/Include/Python-ast.h
index 0ad788b..7ad6cb3 100644
--- a/Include/Python-ast.h
+++ b/Include/Python-ast.h
@@ -36,6 +36,8 @@ typedef struct _keyword *keyword_ty;
typedef struct _alias *alias_ty;
+typedef struct _withitem *withitem_ty;
+
enum _mod_kind {Module_kind=1, Interactive_kind=2, Expression_kind=3,
Suite_kind=4};
@@ -64,10 +66,9 @@ struct _mod {
enum _stmt_kind {FunctionDef_kind=1, ClassDef_kind=2, Return_kind=3,
Delete_kind=4, Assign_kind=5, AugAssign_kind=6, For_kind=7,
While_kind=8, If_kind=9, With_kind=10, Raise_kind=11,
- TryExcept_kind=12, TryFinally_kind=13, Assert_kind=14,
- Import_kind=15, ImportFrom_kind=16, Global_kind=17,
- Nonlocal_kind=18, Expr_kind=19, Pass_kind=20, Break_kind=21,
- Continue_kind=22};
+ Try_kind=12, Assert_kind=13, Import_kind=14,
+ ImportFrom_kind=15, Global_kind=16, Nonlocal_kind=17,
+ Expr_kind=18, Pass_kind=19, Break_kind=20, Continue_kind=21};
struct _stmt {
enum _stmt_kind kind;
union {
@@ -128,8 +129,7 @@ struct _stmt {
} If;
struct {
- expr_ty context_expr;
- expr_ty optional_vars;
+ asdl_seq *items;
asdl_seq *body;
} With;
@@ -142,12 +142,8 @@ struct _stmt {
asdl_seq *body;
asdl_seq *handlers;
asdl_seq *orelse;
- } TryExcept;
-
- struct {
- asdl_seq *body;
asdl_seq *finalbody;
- } TryFinally;
+ } Try;
struct {
expr_ty test;
@@ -184,10 +180,10 @@ struct _stmt {
enum _expr_kind {BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4,
IfExp_kind=5, Dict_kind=6, Set_kind=7, ListComp_kind=8,
SetComp_kind=9, DictComp_kind=10, GeneratorExp_kind=11,
- Yield_kind=12, Compare_kind=13, Call_kind=14, Num_kind=15,
- Str_kind=16, Bytes_kind=17, Ellipsis_kind=18,
- Attribute_kind=19, Subscript_kind=20, Starred_kind=21,
- Name_kind=22, List_kind=23, Tuple_kind=24};
+ Yield_kind=12, YieldFrom_kind=13, Compare_kind=14,
+ Call_kind=15, Num_kind=16, Str_kind=17, Bytes_kind=18,
+ Ellipsis_kind=19, Attribute_kind=20, Subscript_kind=21,
+ Starred_kind=22, Name_kind=23, List_kind=24, Tuple_kind=25};
struct _expr {
enum _expr_kind kind;
union {
@@ -253,6 +249,10 @@ struct _expr {
} Yield;
struct {
+ expr_ty value;
+ } YieldFrom;
+
+ struct {
expr_ty left;
asdl_int_seq *ops;
asdl_seq *comparators;
@@ -275,7 +275,7 @@ struct _expr {
} Str;
struct {
- string s;
+ bytes s;
} Bytes;
struct {
@@ -383,6 +383,11 @@ struct _alias {
identifier asname;
};
+struct _withitem {
+ expr_ty context_expr;
+ expr_ty optional_vars;
+};
+
#define Module(a0, a1) _Py_Module(a0, a1)
mod_ty _Py_Module(asdl_seq * body, PyArena *arena);
@@ -421,18 +426,16 @@ stmt_ty _Py_While(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno,
#define If(a0, a1, a2, a3, a4, a5) _Py_If(a0, a1, a2, a3, a4, a5)
stmt_ty _Py_If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno,
int col_offset, PyArena *arena);
-#define With(a0, a1, a2, a3, a4, a5) _Py_With(a0, a1, a2, a3, a4, a5)
-stmt_ty _Py_With(expr_ty context_expr, expr_ty optional_vars, asdl_seq * body,
- int lineno, int col_offset, PyArena *arena);
+#define With(a0, a1, a2, a3, a4) _Py_With(a0, a1, a2, a3, a4)
+stmt_ty _Py_With(asdl_seq * items, asdl_seq * body, int lineno, int col_offset,
+ PyArena *arena);
#define Raise(a0, a1, a2, a3, a4) _Py_Raise(a0, a1, a2, a3, a4)
stmt_ty _Py_Raise(expr_ty exc, expr_ty cause, int lineno, int col_offset,
PyArena *arena);
-#define TryExcept(a0, a1, a2, a3, a4, a5) _Py_TryExcept(a0, a1, a2, a3, a4, a5)
-stmt_ty _Py_TryExcept(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse,
- int lineno, int col_offset, PyArena *arena);
-#define TryFinally(a0, a1, a2, a3, a4) _Py_TryFinally(a0, a1, a2, a3, a4)
-stmt_ty _Py_TryFinally(asdl_seq * body, asdl_seq * finalbody, int lineno, int
- col_offset, PyArena *arena);
+#define Try(a0, a1, a2, a3, a4, a5, a6) _Py_Try(a0, a1, a2, a3, a4, a5, a6)
+stmt_ty _Py_Try(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse,
+ asdl_seq * finalbody, int lineno, int col_offset, PyArena
+ *arena);
#define Assert(a0, a1, a2, a3, a4) _Py_Assert(a0, a1, a2, a3, a4)
stmt_ty _Py_Assert(expr_ty test, expr_ty msg, int lineno, int col_offset,
PyArena *arena);
@@ -490,6 +493,9 @@ expr_ty _Py_GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int
col_offset, PyArena *arena);
#define Yield(a0, a1, a2, a3) _Py_Yield(a0, a1, a2, a3)
expr_ty _Py_Yield(expr_ty value, int lineno, int col_offset, PyArena *arena);
+#define YieldFrom(a0, a1, a2, a3) _Py_YieldFrom(a0, a1, a2, a3)
+expr_ty _Py_YieldFrom(expr_ty value, int lineno, int col_offset, PyArena
+ *arena);
#define Compare(a0, a1, a2, a3, a4, a5) _Py_Compare(a0, a1, a2, a3, a4, a5)
expr_ty _Py_Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators,
int lineno, int col_offset, PyArena *arena);
@@ -502,7 +508,7 @@ expr_ty _Py_Num(object n, int lineno, int col_offset, PyArena *arena);
#define Str(a0, a1, a2, a3) _Py_Str(a0, a1, a2, a3)
expr_ty _Py_Str(string s, int lineno, int col_offset, PyArena *arena);
#define Bytes(a0, a1, a2, a3) _Py_Bytes(a0, a1, a2, a3)
-expr_ty _Py_Bytes(string s, int lineno, int col_offset, PyArena *arena);
+expr_ty _Py_Bytes(bytes s, int lineno, int col_offset, PyArena *arena);
#define Ellipsis(a0, a1, a2) _Py_Ellipsis(a0, a1, a2)
expr_ty _Py_Ellipsis(int lineno, int col_offset, PyArena *arena);
#define Attribute(a0, a1, a2, a3, a4, a5) _Py_Attribute(a0, a1, a2, a3, a4, a5)
@@ -547,6 +553,9 @@ arg_ty _Py_arg(identifier arg, expr_ty annotation, PyArena *arena);
keyword_ty _Py_keyword(identifier arg, expr_ty value, PyArena *arena);
#define alias(a0, a1, a2) _Py_alias(a0, a1, a2)
alias_ty _Py_alias(identifier name, identifier asname, PyArena *arena);
+#define withitem(a0, a1, a2) _Py_withitem(a0, a1, a2)
+withitem_ty _Py_withitem(expr_ty context_expr, expr_ty optional_vars, PyArena
+ *arena);
PyObject* PyAST_mod2obj(mod_ty t);
mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);
diff --git a/Include/Python.h b/Include/Python.h
index 5972ffa..01b98f9 100644
--- a/Include/Python.h
+++ b/Include/Python.h
@@ -48,6 +48,7 @@
#include <assert.h>
#include "pyport.h"
+#include "pymacro.h"
#include "pyatomic.h"
@@ -126,43 +127,6 @@
#include "pystrcmp.h"
#include "dtoa.h"
#include "fileutils.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/* _Py_Mangle is defined in compile.c */
-#ifndef Py_LIMITED_API
-PyAPI_FUNC(PyObject*) _Py_Mangle(PyObject *p, PyObject *name);
-#endif
-
-#ifdef __cplusplus
-}
-#endif
-
-/* Argument must be a char or an int in [-128, 127] or [0, 255]. */
-#define Py_CHARMASK(c) ((unsigned char)((c) & 0xff))
-
#include "pyfpe.h"
-/* These definitions must match corresponding definitions in graminit.h.
- There's code in compile.c that checks that they are the same. */
-#define Py_single_input 256
-#define Py_file_input 257
-#define Py_eval_input 258
-
-#ifdef HAVE_PTH
-/* GNU pth user-space thread support */
-#include <pth.h>
-#endif
-
-/* Define macros for inline documentation. */
-#define PyDoc_VAR(name) static char name[]
-#define PyDoc_STRVAR(name,str) PyDoc_VAR(name) = PyDoc_STR(str)
-#ifdef WITH_DOC_STRINGS
-#define PyDoc_STR(str) str
-#else
-#define PyDoc_STR(str) ""
-#endif
-
#endif /* !Py_PYTHON_H */
diff --git a/Include/abstract.h b/Include/abstract.h
index 0fe0956..3a99c4e 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -7,6 +7,7 @@ extern "C" {
#ifdef PY_SSIZE_T_CLEAN
#define PyObject_CallFunction _PyObject_CallFunction_SizeT
#define PyObject_CallMethod _PyObject_CallMethod_SizeT
+#define _PyObject_CallMethodId _PyObject_CallMethodId_SizeT
#endif
/* Abstract Object Interface (many thanks to Jim Fulton) */
@@ -307,11 +308,22 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
Python expression: o.method(args).
*/
+ PyAPI_FUNC(PyObject *) _PyObject_CallMethodId(PyObject *o, _Py_Identifier *method,
+ char *format, ...);
+
+ /*
+ Like PyObject_CallMethod, but expect a _Py_Identifier* as the
+ method name.
+ */
+
PyAPI_FUNC(PyObject *) _PyObject_CallFunction_SizeT(PyObject *callable,
char *format, ...);
PyAPI_FUNC(PyObject *) _PyObject_CallMethod_SizeT(PyObject *o,
char *name,
char *format, ...);
+ PyAPI_FUNC(PyObject *) _PyObject_CallMethodId_SizeT(PyObject *o,
+ _Py_Identifier *name,
+ char *format, ...);
PyAPI_FUNC(PyObject *) PyObject_CallFunctionObjArgs(PyObject *callable,
...);
@@ -547,7 +559,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
/* Copy the data from the src buffer to the buffer of destination
*/
- PyAPI_FUNC(int) PyBuffer_IsContiguous(Py_buffer *view, char fort);
+ PyAPI_FUNC(int) PyBuffer_IsContiguous(const Py_buffer *view, char fort);
PyAPI_FUNC(void) PyBuffer_FillContiguousStrides(int ndims,
@@ -1014,7 +1026,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
PyAPI_FUNC(PyObject *) PySequence_Fast(PyObject *o, const char* m);
/*
- Returns the sequence, o, as a tuple, unless it's already a
+ Returns the sequence, o, as a list, unless it's already a
tuple or list. Use PySequence_Fast_GET_ITEM to access the
members of this list, and PySequence_Fast_GET_SIZE to get its length.
diff --git a/Include/asdl.h b/Include/asdl.h
index 9bb0697..42bbbf8 100644
--- a/Include/asdl.h
+++ b/Include/asdl.h
@@ -3,6 +3,7 @@
typedef PyObject * identifier;
typedef PyObject * string;
+typedef PyObject * bytes;
typedef PyObject * object;
/* It would be nice if the code generated by asdl_c.py was completely
diff --git a/Include/ast.h b/Include/ast.h
index a015336..055e8dc 100644
--- a/Include/ast.h
+++ b/Include/ast.h
@@ -4,6 +4,7 @@
extern "C" {
#endif
+PyAPI_FUNC(int) PyAST_Validate(mod_ty);
PyAPI_FUNC(mod_ty) PyAST_FromNode(
const node *n,
PyCompilerFlags *flags,
diff --git a/Include/code.h b/Include/code.h
index e773b6a..7c7e5bf 100644
--- a/Include/code.h
+++ b/Include/code.h
@@ -22,6 +22,7 @@ typedef struct {
PyObject *co_freevars; /* tuple of strings (free variable names) */
PyObject *co_cellvars; /* tuple of strings (cell variable names) */
/* The rest doesn't count for hash or comparisons */
+ unsigned char *co_cell2arg; /* Maps cell vars which are arguments. */
PyObject *co_filename; /* unicode (where it was loaded from) */
PyObject *co_name; /* unicode (name, for reference) */
int co_firstlineno; /* first source line number */
@@ -57,6 +58,11 @@ typedef struct {
#define CO_FUTURE_BARRY_AS_BDFL 0x40000
+/* This value is found in the co_cell2arg array when the associated cell
+ variable does not correspond to an argument. The maximum number of
+ arguments is 255 (indexed up to 254), so 255 work as a special flag.*/
+#define CO_CELL_NOT_AN_ARG 255
+
/* This should be defined if a future statement modifies the syntax.
For example, when a keyword is added.
*/
diff --git a/Include/codecs.h b/Include/codecs.h
index dff09e7..0d9e9b4 100644
--- a/Include/codecs.h
+++ b/Include/codecs.h
@@ -174,6 +174,8 @@ PyAPI_FUNC(PyObject *) PyCodec_XMLCharRefReplaceErrors(PyObject *exc);
/* replace the unicode encode error with backslash escapes (\x, \u and \U) */
PyAPI_FUNC(PyObject *) PyCodec_BackslashReplaceErrors(PyObject *exc);
+PyAPI_DATA(const char *) Py_hexdigits;
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/compile.h b/Include/compile.h
index bc53b39..ac2636d 100644
--- a/Include/compile.h
+++ b/Include/compile.h
@@ -1,7 +1,7 @@
-#ifndef Py_LIMITED_API
#ifndef Py_COMPILE_H
#define Py_COMPILE_H
+#ifndef Py_LIMITED_API
#include "code.h"
#ifdef __cplusplus
@@ -38,9 +38,19 @@ PyAPI_FUNC(PyCodeObject *) PyAST_CompileEx(
PyArena *arena);
PyAPI_FUNC(PyFutureFeatures *) PyFuture_FromAST(struct _mod *, const char *);
+/* _Py_Mangle is defined in compile.c */
+PyAPI_FUNC(PyObject*) _Py_Mangle(PyObject *p, PyObject *name);
#ifdef __cplusplus
}
#endif
-#endif /* !Py_COMPILE_H */
+
#endif /* !Py_LIMITED_API */
+
+/* These definitions must match corresponding definitions in graminit.h.
+ There's code in compile.c that checks that they are the same. */
+#define Py_single_input 256
+#define Py_file_input 257
+#define Py_eval_input 258
+
+#endif /* !Py_COMPILE_H */
diff --git a/Include/complexobject.h b/Include/complexobject.h
index c379b08..3e4ecff 100644
--- a/Include/complexobject.h
+++ b/Include/complexobject.h
@@ -64,8 +64,9 @@ PyAPI_FUNC(Py_complex) PyComplex_AsCComplex(PyObject *op);
(Advanced String Formatting). */
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *) _PyComplex_FormatAdvanced(PyObject *obj,
- Py_UNICODE *format_spec,
- Py_ssize_t format_spec_len);
+ PyObject *format_spec,
+ Py_ssize_t start,
+ Py_ssize_t end);
#endif
#ifdef __cplusplus
diff --git a/Include/datetime.h b/Include/datetime.h
index db57a18..41e3bcf 100644
--- a/Include/datetime.h
+++ b/Include/datetime.h
@@ -135,6 +135,12 @@ typedef struct
(((PyDateTime_Time*)o)->data[4] << 8) | \
((PyDateTime_Time*)o)->data[5])
+/* Apply for time delta instances */
+#define PyDateTime_DELTA_GET_DAYS(o) (((PyDateTime_Delta*)o)->days)
+#define PyDateTime_DELTA_GET_SECONDS(o) (((PyDateTime_Delta*)o)->seconds)
+#define PyDateTime_DELTA_GET_MICROSECONDS(o) \
+ (((PyDateTime_Delta*)o)->microseconds)
+
/* Define structure for C API. */
typedef struct {
diff --git a/Include/descrobject.h b/Include/descrobject.h
index 646b3cc..e2ba97f 100644
--- a/Include/descrobject.h
+++ b/Include/descrobject.h
@@ -42,6 +42,7 @@ typedef struct {
PyObject_HEAD
PyTypeObject *d_type;
PyObject *d_name;
+ PyObject *d_qualname;
} PyDescrObject;
#define PyDescr_COMMON PyDescrObject d_common
diff --git a/Include/dictobject.h b/Include/dictobject.h
index b026785..ed44e20 100644
--- a/Include/dictobject.h
+++ b/Include/dictobject.h
@@ -129,6 +129,8 @@ PyAPI_FUNC(int) _PyDict_Contains(PyObject *mp, PyObject *key, Py_hash_t hash);
PyAPI_FUNC(PyObject *) _PyDict_NewPresized(Py_ssize_t minused);
PyAPI_FUNC(void) _PyDict_MaybeUntrack(PyObject *mp);
PyAPI_FUNC(int) _PyDict_HasOnlyStringKeys(PyObject *mp);
+
+PyAPI_FUNC(int) PyDict_ClearFreeList(void);
#endif
/* PyDict_Update(mp, other) is equivalent to PyDict_Merge(mp, other, 1). */
diff --git a/Include/errcode.h b/Include/errcode.h
index 6bb3cc1..5946686 100644
--- a/Include/errcode.h
+++ b/Include/errcode.h
@@ -30,6 +30,7 @@ extern "C" {
#define E_EOLS 24 /* EOL in single-quoted string */
#define E_LINECONT 25 /* Unexpected characters after a line continuation */
#define E_IDENTIFIER 26 /* Invalid characters in identifier */
+#define E_BADSINGLE 27 /* Ill-formed single statement input */
#ifdef __cplusplus
}
diff --git a/Include/fileutils.h b/Include/fileutils.h
index 2fade9b..7c18cf2 100644
--- a/Include/fileutils.h
+++ b/Include/fileutils.h
@@ -5,6 +5,8 @@
extern "C" {
#endif
+PyAPI_FUNC(PyObject *) _Py_device_encoding(int);
+
PyAPI_FUNC(wchar_t *) _Py_char2wchar(
const char *arg,
size_t *size);
diff --git a/Include/floatobject.h b/Include/floatobject.h
index 90f0a45..0ca4881 100644
--- a/Include/floatobject.h
+++ b/Include/floatobject.h
@@ -27,12 +27,12 @@ PyAPI_DATA(PyTypeObject) PyFloat_Type;
#define Py_RETURN_NAN return PyFloat_FromDouble(Py_NAN)
#endif
-#define Py_RETURN_INF(sign) do \
- if (copysign(1., sign) == 1.) { \
- return PyFloat_FromDouble(Py_HUGE_VAL); \
- } else { \
- return PyFloat_FromDouble(-Py_HUGE_VAL); \
- } while(0)
+#define Py_RETURN_INF(sign) do \
+ if (copysign(1., sign) == 1.) { \
+ return PyFloat_FromDouble(Py_HUGE_VAL); \
+ } else { \
+ return PyFloat_FromDouble(-Py_HUGE_VAL); \
+ } while(0)
PyAPI_FUNC(double) PyFloat_GetMax(void);
PyAPI_FUNC(double) PyFloat_GetMin(void);
@@ -113,8 +113,9 @@ PyAPI_FUNC(int) PyFloat_ClearFreeList(void);
/* Format the object based on the format_spec, as defined in PEP 3101
(Advanced String Formatting). */
PyAPI_FUNC(PyObject *) _PyFloat_FormatAdvanced(PyObject *obj,
- Py_UNICODE *format_spec,
- Py_ssize_t format_spec_len);
+ PyObject *format_spec,
+ Py_ssize_t start,
+ Py_ssize_t end);
#endif /* Py_LIMITED_API */
#ifdef __cplusplus
diff --git a/Include/frameobject.h b/Include/frameobject.h
index 1fb64bb..a8df445 100644
--- a/Include/frameobject.h
+++ b/Include/frameobject.h
@@ -9,45 +9,45 @@ extern "C" {
#endif
typedef struct {
- int b_type; /* what kind of block this is */
- int b_handler; /* where to jump to find handler */
- int b_level; /* value stack level to pop to */
+ int b_type; /* what kind of block this is */
+ int b_handler; /* where to jump to find handler */
+ int b_level; /* value stack level to pop to */
} PyTryBlock;
typedef struct _frame {
PyObject_VAR_HEAD
- struct _frame *f_back; /* previous frame, or NULL */
- PyCodeObject *f_code; /* code segment */
- PyObject *f_builtins; /* builtin symbol table (PyDictObject) */
- PyObject *f_globals; /* global symbol table (PyDictObject) */
- PyObject *f_locals; /* local symbol table (any mapping) */
- PyObject **f_valuestack; /* points after the last local */
+ struct _frame *f_back; /* previous frame, or NULL */
+ PyCodeObject *f_code; /* code segment */
+ PyObject *f_builtins; /* builtin symbol table (PyDictObject) */
+ PyObject *f_globals; /* global symbol table (PyDictObject) */
+ PyObject *f_locals; /* local symbol table (any mapping) */
+ PyObject **f_valuestack; /* points after the last local */
/* Next free slot in f_valuestack. Frame creation sets to f_valuestack.
Frame evaluation usually NULLs it, but a frame that yields sets it
to the current stack top. */
PyObject **f_stacktop;
- PyObject *f_trace; /* Trace function */
-
- /* In a generator, we need to be able to swap between the exception
- state inside the generator and the exception state of the calling
- frame (which shouldn't be impacted when the generator "yields"
- from an except handler).
- These three fields exist exactly for that, and are unused for
- non-generator frames. See the SAVE_EXC_STATE and SWAP_EXC_STATE
- macros in ceval.c for details of their use. */
+ PyObject *f_trace; /* Trace function */
+
+ /* In a generator, we need to be able to swap between the exception
+ state inside the generator and the exception state of the calling
+ frame (which shouldn't be impacted when the generator "yields"
+ from an except handler).
+ These three fields exist exactly for that, and are unused for
+ non-generator frames. See the SAVE_EXC_STATE and SWAP_EXC_STATE
+ macros in ceval.c for details of their use. */
PyObject *f_exc_type, *f_exc_value, *f_exc_traceback;
PyThreadState *f_tstate;
- int f_lasti; /* Last instruction if called */
+ int f_lasti; /* Last instruction if called */
/* Call PyFrame_GetLineNumber() instead of reading this field
directly. As of 2.3 f_lineno is only valid when tracing is
active (i.e. when f_trace is set). At other times we use
PyCode_Addr2Line to calculate the line from the current
bytecode index. */
- int f_lineno; /* Current line number */
- int f_iblock; /* index in f_blockstack */
+ int f_lineno; /* Current line number */
+ int f_iblock; /* index in f_blockstack */
PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */
- PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */
+ PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */
} PyFrameObject;
diff --git a/Include/funcobject.h b/Include/funcobject.h
index 521d87b..cc1426c 100644
--- a/Include/funcobject.h
+++ b/Include/funcobject.h
@@ -31,6 +31,7 @@ typedef struct {
PyObject *func_weakreflist; /* List of weak references */
PyObject *func_module; /* The __module__ attribute, can be anything */
PyObject *func_annotations; /* Annotations, a dict or NULL */
+ PyObject *func_qualname; /* The qualified name */
/* Invariant:
* func_closure contains the bindings for func_code->co_freevars, so
@@ -44,6 +45,7 @@ PyAPI_DATA(PyTypeObject) PyFunction_Type;
#define PyFunction_Check(op) (Py_TYPE(op) == &PyFunction_Type)
PyAPI_FUNC(PyObject *) PyFunction_New(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyFunction_NewWithQualName(PyObject *, PyObject *, PyObject *);
PyAPI_FUNC(PyObject *) PyFunction_GetCode(PyObject *);
PyAPI_FUNC(PyObject *) PyFunction_GetGlobals(PyObject *);
PyAPI_FUNC(PyObject *) PyFunction_GetModule(PyObject *);
diff --git a/Include/genobject.h b/Include/genobject.h
index d29fb1e..25f6c33 100644
--- a/Include/genobject.h
+++ b/Include/genobject.h
@@ -11,20 +11,20 @@ extern "C" {
struct _frame; /* Avoid including frameobject.h */
typedef struct {
- PyObject_HEAD
- /* The gi_ prefix is intended to remind of generator-iterator. */
+ PyObject_HEAD
+ /* The gi_ prefix is intended to remind of generator-iterator. */
- /* Note: gi_frame can be NULL if the generator is "finished" */
- struct _frame *gi_frame;
+ /* Note: gi_frame can be NULL if the generator is "finished" */
+ struct _frame *gi_frame;
- /* True if generator is being executed. */
- int gi_running;
-
- /* The code object backing the generator */
- PyObject *gi_code;
+ /* True if generator is being executed. */
+ char gi_running;
- /* List of weak reference. */
- PyObject *gi_weakreflist;
+ /* The code object backing the generator */
+ PyObject *gi_code;
+
+ /* List of weak reference. */
+ PyObject *gi_weakreflist;
} PyGenObject;
PyAPI_DATA(PyTypeObject) PyGen_Type;
@@ -34,6 +34,8 @@ PyAPI_DATA(PyTypeObject) PyGen_Type;
PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *);
PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *);
+PyAPI_FUNC(int) PyGen_FetchStopIterationValue(PyObject **);
+PyObject *_PyGen_Send(PyGenObject *, PyObject *);
#ifdef __cplusplus
}
diff --git a/Include/graminit.h b/Include/graminit.h
index e0e27f9..3ec949a 100644
--- a/Include/graminit.h
+++ b/Include/graminit.h
@@ -81,3 +81,4 @@
#define comp_if 334
#define encoding_decl 335
#define yield_expr 336
+#define yield_arg 337
diff --git a/Include/import.h b/Include/import.h
index 400e97c..45544111 100644
--- a/Include/import.h
+++ b/Include/import.h
@@ -24,7 +24,16 @@ PyAPI_FUNC(PyObject *) PyImport_ExecCodeModuleWithPathnames(
char *pathname, /* decoded from the filesystem encoding */
char *cpathname /* decoded from the filesystem encoding */
);
+PyAPI_FUNC(PyObject *) PyImport_ExecCodeModuleObject(
+ PyObject *name,
+ PyObject *co,
+ PyObject *pathname,
+ PyObject *cpathname
+ );
PyAPI_FUNC(PyObject *) PyImport_GetModuleDict(void);
+PyAPI_FUNC(PyObject *) PyImport_AddModuleObject(
+ PyObject *name
+ );
PyAPI_FUNC(PyObject *) PyImport_AddModule(
const char *name /* UTF-8 encoded string */
);
@@ -35,7 +44,14 @@ PyAPI_FUNC(PyObject *) PyImport_ImportModuleNoBlock(
const char *name /* UTF-8 encoded string */
);
PyAPI_FUNC(PyObject *) PyImport_ImportModuleLevel(
- char *name, /* UTF-8 encoded string */
+ const char *name, /* UTF-8 encoded string */
+ PyObject *globals,
+ PyObject *locals,
+ PyObject *fromlist,
+ int level
+ );
+PyAPI_FUNC(PyObject *) PyImport_ImportModuleLevelObject(
+ PyObject *name,
PyObject *globals,
PyObject *locals,
PyObject *fromlist,
@@ -49,6 +65,9 @@ PyAPI_FUNC(PyObject *) PyImport_GetImporter(PyObject *path);
PyAPI_FUNC(PyObject *) PyImport_Import(PyObject *name);
PyAPI_FUNC(PyObject *) PyImport_ReloadModule(PyObject *m);
PyAPI_FUNC(void) PyImport_Cleanup(void);
+PyAPI_FUNC(int) PyImport_ImportFrozenModuleObject(
+ PyObject *name
+ );
PyAPI_FUNC(int) PyImport_ImportFrozenModule(
char *name /* UTF-8 encoded string */
);
@@ -65,17 +84,17 @@ PyAPI_FUNC(int) _PyImport_ReleaseLock(void);
PyAPI_FUNC(void) _PyImport_ReInitLock(void);
PyAPI_FUNC(PyObject *)_PyImport_FindBuiltin(
- char *name /* UTF-8 encoded string */
+ const char *name /* UTF-8 encoded string */
);
-PyAPI_FUNC(PyObject *)_PyImport_FindExtensionUnicode(char *, PyObject *);
+PyAPI_FUNC(PyObject *)_PyImport_FindExtensionObject(PyObject *, PyObject *);
PyAPI_FUNC(int)_PyImport_FixupBuiltin(
PyObject *mod,
char *name /* UTF-8 encoded string */
);
-PyAPI_FUNC(int)_PyImport_FixupExtensionUnicode(PyObject*, char *, PyObject *);
+PyAPI_FUNC(int)_PyImport_FixupExtensionObject(PyObject*, PyObject *, PyObject *);
struct _inittab {
- char *name;
+ char *name; /* ASCII encoded string */
PyObject* (*initfunc)(void);
};
PyAPI_DATA(struct _inittab *) PyImport_Inittab;
diff --git a/Include/intrcheck.h b/Include/intrcheck.h
index 3b67ed0..f53fee1 100644
--- a/Include/intrcheck.h
+++ b/Include/intrcheck.h
@@ -8,6 +8,12 @@ extern "C" {
PyAPI_FUNC(int) PyOS_InterruptOccurred(void);
PyAPI_FUNC(void) PyOS_InitInterrupts(void);
PyAPI_FUNC(void) PyOS_AfterFork(void);
+PyAPI_FUNC(int) _PyOS_IsMainThread(void);
+
+#ifdef MS_WINDOWS
+/* windows.h is not included by Python.h so use void* instead of HANDLE */
+PyAPI_FUNC(void*) _PyOS_SigintEvent(void);
+#endif
#ifdef __cplusplus
}
diff --git a/Include/listobject.h b/Include/listobject.h
index 949b1a3..6fd374b 100644
--- a/Include/listobject.h
+++ b/Include/listobject.h
@@ -62,6 +62,8 @@ PyAPI_FUNC(int) PyList_Reverse(PyObject *);
PyAPI_FUNC(PyObject *) PyList_AsTuple(PyObject *);
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *);
+
+PyAPI_FUNC(int) PyList_ClearFreeList(void);
#endif
/* Macro, trading safety for speed */
diff --git a/Include/longobject.h b/Include/longobject.h
index c09565a..c58ddf4 100644
--- a/Include/longobject.h
+++ b/Include/longobject.h
@@ -12,7 +12,7 @@ typedef struct _longobject PyLongObject; /* Revealed in longintrepr.h */
PyAPI_DATA(PyTypeObject) PyLong_Type;
#define PyLong_Check(op) \
- PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_LONG_SUBCLASS)
+ PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_LONG_SUBCLASS)
#define PyLong_CheckExact(op) (Py_TYPE(op) == &PyLong_Type)
PyAPI_FUNC(PyObject *) PyLong_FromLong(long);
@@ -80,6 +80,7 @@ PyAPI_FUNC(PY_LONG_LONG) PyLong_AsLongLongAndOverflow(PyObject *, int *);
PyAPI_FUNC(PyObject *) PyLong_FromString(char *, char **, int);
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *) PyLong_FromUnicode(Py_UNICODE*, Py_ssize_t, int);
+PyAPI_FUNC(PyObject *) PyLong_FromUnicodeObject(PyObject *u, int base);
#endif
#ifndef Py_LIMITED_API
@@ -121,8 +122,8 @@ PyAPI_FUNC(PyObject *) _PyLong_DivmodNear(PyObject *, PyObject *);
enough memory to create the Python long.
*/
PyAPI_FUNC(PyObject *) _PyLong_FromByteArray(
- const unsigned char* bytes, size_t n,
- int little_endian, int is_signed);
+ const unsigned char* bytes, size_t n,
+ int little_endian, int is_signed);
/* _PyLong_AsByteArray: Convert the least-significant 8*n bits of long
v to a base-256 integer, stored in array bytes. Normally return 0,
@@ -144,8 +145,8 @@ PyAPI_FUNC(PyObject *) _PyLong_FromByteArray(
case, but bytes holds the least-signficant n bytes of the true value.
*/
PyAPI_FUNC(int) _PyLong_AsByteArray(PyLongObject* v,
- unsigned char* bytes, size_t n,
- int little_endian, int is_signed);
+ unsigned char* bytes, size_t n,
+ int little_endian, int is_signed);
/* _PyLong_Format: Convert the long to a string object with given base,
@@ -155,8 +156,9 @@ PyAPI_FUNC(PyObject *) _PyLong_Format(PyObject *aa, int base);
/* Format the object based on the format_spec, as defined in PEP 3101
(Advanced String Formatting). */
PyAPI_FUNC(PyObject *) _PyLong_FormatAdvanced(PyObject *obj,
- Py_UNICODE *format_spec,
- Py_ssize_t format_spec_len);
+ PyObject *format_spec,
+ Py_ssize_t start,
+ Py_ssize_t end);
#endif /* Py_LIMITED_API */
/* These aren't really part of the long object, but they're handy. The
diff --git a/Include/memoryobject.h b/Include/memoryobject.h
index 62ecbd6..4ac6f65 100644
--- a/Include/memoryobject.h
+++ b/Include/memoryobject.h
@@ -6,69 +6,64 @@
extern "C" {
#endif
+#ifndef Py_LIMITED_API
+PyAPI_DATA(PyTypeObject) _PyManagedBuffer_Type;
+#endif
PyAPI_DATA(PyTypeObject) PyMemoryView_Type;
#define PyMemoryView_Check(op) (Py_TYPE(op) == &PyMemoryView_Type)
#ifndef Py_LIMITED_API
-/* Get a pointer to the underlying Py_buffer of a memoryview object. */
+/* Get a pointer to the memoryview's private copy of the exporter's buffer. */
#define PyMemoryView_GET_BUFFER(op) (&((PyMemoryViewObject *)(op))->view)
-/* Get a pointer to the PyObject from which originates a memoryview object. */
+/* Get a pointer to the exporting object (this may be NULL!). */
#define PyMemoryView_GET_BASE(op) (((PyMemoryViewObject *)(op))->view.obj)
#endif
-
-PyAPI_FUNC(PyObject *) PyMemoryView_GetContiguous(PyObject *base,
- int buffertype,
- char fort);
-
- /* Return a contiguous chunk of memory representing the buffer
- from an object in a memory view object. If a copy is made then the
- base object for the memory view will be a *new* bytes object.
-
- Otherwise, the base-object will be the object itself and no
- data-copying will be done.
-
- The buffertype argument can be PyBUF_READ, PyBUF_WRITE,
- PyBUF_SHADOW to determine whether the returned buffer
- should be READONLY, WRITABLE, or set to update the
- original buffer if a copy must be made. If buffertype is
- PyBUF_WRITE and the buffer is not contiguous an error will
- be raised. In this circumstance, the user can use
- PyBUF_SHADOW to ensure that a a writable temporary
- contiguous buffer is returned. The contents of this
- contiguous buffer will be copied back into the original
- object after the memoryview object is deleted as long as
- the original object is writable and allows setting an
- exclusive write lock. If this is not allowed by the
- original object, then a BufferError is raised.
-
- If the object is multi-dimensional and if fortran is 'F',
- the first dimension of the underlying array will vary the
- fastest in the buffer. If fortran is 'C', then the last
- dimension will vary the fastest (C-style contiguous). If
- fortran is 'A', then it does not matter and you will get
- whatever the object decides is more efficient.
-
- A new reference is returned that must be DECREF'd when finished.
- */
-
PyAPI_FUNC(PyObject *) PyMemoryView_FromObject(PyObject *base);
-
+PyAPI_FUNC(PyObject *) PyMemoryView_FromMemory(char *mem, Py_ssize_t size,
+ int flags);
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *) PyMemoryView_FromBuffer(Py_buffer *info);
- /* create new if bufptr is NULL
- will be a new bytesobject in base */
#endif
+PyAPI_FUNC(PyObject *) PyMemoryView_GetContiguous(PyObject *base,
+ int buffertype,
+ char order);
-/* The struct is declared here so that macros can work, but it shouldn't
- be considered public. Don't access those fields directly, use the macros
+/* The structs are declared here so that macros can work, but they shouldn't
+ be considered public. Don't access their fields directly, use the macros
and functions instead! */
#ifndef Py_LIMITED_API
+#define _Py_MANAGED_BUFFER_RELEASED 0x001 /* access to exporter blocked */
+#define _Py_MANAGED_BUFFER_FREE_FORMAT 0x002 /* free format */
typedef struct {
PyObject_HEAD
- Py_buffer view;
+ int flags; /* state flags */
+ Py_ssize_t exports; /* number of direct memoryview exports */
+ Py_buffer master; /* snapshot buffer obtained from the original exporter */
+} _PyManagedBufferObject;
+
+
+/* static storage used for casting between formats */
+#define _Py_MEMORYVIEW_MAX_FORMAT 3 /* must be >= 3 */
+
+/* memoryview state flags */
+#define _Py_MEMORYVIEW_RELEASED 0x001 /* access to master buffer blocked */
+#define _Py_MEMORYVIEW_C 0x002 /* C-contiguous layout */
+#define _Py_MEMORYVIEW_FORTRAN 0x004 /* Fortran contiguous layout */
+#define _Py_MEMORYVIEW_SCALAR 0x008 /* scalar: ndim = 0 */
+#define _Py_MEMORYVIEW_PIL 0x010 /* PIL-style layout */
+
+typedef struct {
+ PyObject_VAR_HEAD
+ _PyManagedBufferObject *mbuf; /* managed buffer */
+ Py_hash_t hash; /* hash value for read-only views */
+ int flags; /* state flags */
+ Py_ssize_t exports; /* number of buffer re-exports */
+ Py_buffer view; /* private copy of the exporter's view */
+ char format[_Py_MEMORYVIEW_MAX_FORMAT]; /* used for casting */
+ Py_ssize_t ob_array[1]; /* shape, strides, suboffsets */
} PyMemoryViewObject;
#endif
diff --git a/Include/methodobject.h b/Include/methodobject.h
index 7e67c0b..d798d13 100644
--- a/Include/methodobject.h
+++ b/Include/methodobject.h
@@ -30,7 +30,8 @@ PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *);
#define PyCFunction_GET_FUNCTION(func) \
(((PyCFunctionObject *)func) -> m_ml -> ml_meth)
#define PyCFunction_GET_SELF(func) \
- (((PyCFunctionObject *)func) -> m_self)
+ (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \
+ NULL : ((PyCFunctionObject *)func) -> m_self)
#define PyCFunction_GET_FLAGS(func) \
(((PyCFunctionObject *)func) -> m_ml -> ml_flags)
#endif
diff --git a/Include/moduleobject.h b/Include/moduleobject.h
index 7b2bf1c..8013dd9 100644
--- a/Include/moduleobject.h
+++ b/Include/moduleobject.h
@@ -12,10 +12,14 @@ PyAPI_DATA(PyTypeObject) PyModule_Type;
#define PyModule_Check(op) PyObject_TypeCheck(op, &PyModule_Type)
#define PyModule_CheckExact(op) (Py_TYPE(op) == &PyModule_Type)
+PyAPI_FUNC(PyObject *) PyModule_NewObject(
+ PyObject *name
+ );
PyAPI_FUNC(PyObject *) PyModule_New(
const char *name /* UTF-8 encoded string */
);
PyAPI_FUNC(PyObject *) PyModule_GetDict(PyObject *);
+PyAPI_FUNC(PyObject *) PyModule_GetNameObject(PyObject *);
PyAPI_FUNC(const char *) PyModule_GetName(PyObject *);
PyAPI_FUNC(const char *) PyModule_GetFilename(PyObject *);
PyAPI_FUNC(PyObject *) PyModule_GetFilenameObject(PyObject *);
diff --git a/Include/node.h b/Include/node.h
index e23e709..d161195 100644
--- a/Include/node.h
+++ b/Include/node.h
@@ -28,6 +28,7 @@ PyAPI_FUNC(void) PyNode_Free(node *n);
#define RCHILD(n, i) (CHILD(n, NCH(n) + i))
#define TYPE(n) ((n)->n_type)
#define STR(n) ((n)->n_str)
+#define LINENO(n) ((n)->n_lineno)
/* Assert that the type of a node is what we expect */
#define REQ(n, type) assert(TYPE(n) == (type))
diff --git a/Include/object.h b/Include/object.h
index 315766c..9b3055d 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -117,6 +117,35 @@ typedef struct {
#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
#define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size)
+/********************* String Literals ****************************************/
+/* This structure helps managing static strings. The basic usage goes like this:
+ Instead of doing
+
+ r = PyObject_CallMethod(o, "foo", "args", ...);
+
+ do
+
+ _Py_IDENTIFIER(foo);
+ ...
+ r = _PyObject_CallMethodId(o, &PyId_foo, "args", ...);
+
+ PyId_foo is a static variable, either on block level or file level. On first
+ usage, the string "foo" is interned, and the structures are linked. On interpreter
+ shutdown, all strings are released (through _PyUnicode_ClearStaticStrings).
+
+ Alternatively, _Py_static_string allows to choose the variable name.
+ _PyUnicode_FromId returns a borrowed reference to the interned string.
+ _PyObject_{Get,Set,Has}AttrId are __getattr__ versions using _Py_Identifier*.
+*/
+typedef struct _Py_Identifier {
+ struct _Py_Identifier *next;
+ const char* string;
+ PyObject *object;
+} _Py_Identifier;
+
+#define _Py_static_string(varname, value) static _Py_Identifier varname = { 0, value, 0 }
+#define _Py_IDENTIFIER(varname) _Py_static_string(PyId_##varname, #varname)
+
/*
Type objects contain a string containing the type name (to help somewhat
in debugging), the allocation parameters (see PyObject_New() and
@@ -157,15 +186,16 @@ typedef struct bufferinfo {
Py_ssize_t *shape;
Py_ssize_t *strides;
Py_ssize_t *suboffsets;
- Py_ssize_t smalltable[2]; /* static store for shape and strides of
- mono-dimensional buffers. */
void *internal;
} Py_buffer;
typedef int (*getbufferproc)(PyObject *, Py_buffer *, int);
typedef void (*releasebufferproc)(PyObject *, Py_buffer *);
- /* Flags for getting buffers */
+/* Maximum number of dimensions */
+#define PyBUF_MAX_NDIM 64
+
+/* Flags for getting buffers */
#define PyBUF_SIMPLE 0
#define PyBUF_WRITABLE 0x0001
/* we used to include an E, backwards compatible alias */
@@ -418,7 +448,7 @@ typedef struct _heaptypeobject {
a given operator (e.g. __getitem__).
see add_operators() in typeobject.c . */
PyBufferProcs as_buffer;
- PyObject *ht_name, *ht_slots;
+ PyObject *ht_name, *ht_slots, *ht_qualname;
/* here are optional user slots, followed by the members. */
} PyHeapTypeObject;
@@ -448,13 +478,14 @@ PyAPI_FUNC(PyObject *) PyType_GenericNew(PyTypeObject *,
PyObject *, PyObject *);
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *) _PyType_Lookup(PyTypeObject *, PyObject *);
-PyAPI_FUNC(PyObject *) _PyObject_LookupSpecial(PyObject *, char *, PyObject **);
+PyAPI_FUNC(PyObject *) _PyObject_LookupSpecial(PyObject *, _Py_Identifier *);
PyAPI_FUNC(PyTypeObject *) _PyType_CalculateMetaclass(PyTypeObject *, PyObject *);
#endif
PyAPI_FUNC(unsigned int) PyType_ClearCache(void);
PyAPI_FUNC(void) PyType_Modified(PyTypeObject *);
/* Generic operations on objects */
+struct _Py_Identifier;
#ifndef Py_LIMITED_API
PyAPI_FUNC(int) PyObject_Print(PyObject *, FILE *, int);
PyAPI_FUNC(void) _Py_BreakPoint(void);
@@ -472,6 +503,10 @@ PyAPI_FUNC(int) PyObject_HasAttrString(PyObject *, const char *);
PyAPI_FUNC(PyObject *) PyObject_GetAttr(PyObject *, PyObject *);
PyAPI_FUNC(int) PyObject_SetAttr(PyObject *, PyObject *, PyObject *);
PyAPI_FUNC(int) PyObject_HasAttr(PyObject *, PyObject *);
+PyAPI_FUNC(int) _PyObject_IsAbstract(PyObject *);
+PyAPI_FUNC(PyObject *) _PyObject_GetAttrId(PyObject *, struct _Py_Identifier *);
+PyAPI_FUNC(int) _PyObject_SetAttrId(PyObject *, struct _Py_Identifier *, PyObject *);
+PyAPI_FUNC(int) _PyObject_HasAttrId(PyObject *, struct _Py_Identifier *);
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject **) _PyObject_GetDictPtr(PyObject *);
#endif
@@ -482,6 +517,8 @@ PyAPI_FUNC(PyObject *) _PyObject_NextNotImplemented(PyObject *);
PyAPI_FUNC(PyObject *) PyObject_GenericGetAttr(PyObject *, PyObject *);
PyAPI_FUNC(int) PyObject_GenericSetAttr(PyObject *,
PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyObject_GenericGetDict(PyObject *, void *);
+PyAPI_FUNC(int) PyObject_GenericSetDict(PyObject *, PyObject *, void *);
PyAPI_FUNC(Py_hash_t) PyObject_Hash(PyObject *);
PyAPI_FUNC(Py_hash_t) PyObject_HashNotImplemented(PyObject *);
PyAPI_FUNC(int) PyObject_IsTrue(PyObject *);
@@ -515,6 +552,7 @@ PyAPI_FUNC(void) Py_ReprLeave(PyObject *);
#ifndef Py_LIMITED_API
PyAPI_FUNC(Py_hash_t) _Py_HashDouble(double);
PyAPI_FUNC(Py_hash_t) _Py_HashPointer(void*);
+PyAPI_FUNC(Py_hash_t) _Py_HashBytes(unsigned char*, Py_ssize_t);
#endif
typedef struct {
@@ -803,6 +841,10 @@ not implemented for a given type combination.
PyAPI_DATA(PyObject) _Py_NotImplementedStruct; /* Don't use this directly */
#define Py_NotImplemented (&_Py_NotImplementedStruct)
+/* Macro for returning Py_NotImplemented from a function */
+#define Py_RETURN_NOTIMPLEMENTED \
+ return Py_INCREF(Py_NotImplemented), Py_NotImplemented
+
/* Rich comparison opcodes */
#define Py_LT 0
#define Py_LE 1
diff --git a/Include/opcode.h b/Include/opcode.h
index 6b10944..a90184d 100644
--- a/Include/opcode.h
+++ b/Include/opcode.h
@@ -7,117 +7,117 @@ extern "C" {
/* Instruction opcodes for compiled code */
-#define STOP_CODE 0
-#define POP_TOP 1
-#define ROT_TWO 2
-#define ROT_THREE 3
-#define DUP_TOP 4
+#define POP_TOP 1
+#define ROT_TWO 2
+#define ROT_THREE 3
+#define DUP_TOP 4
#define DUP_TOP_TWO 5
-#define NOP 9
+#define NOP 9
-#define UNARY_POSITIVE 10
-#define UNARY_NEGATIVE 11
-#define UNARY_NOT 12
+#define UNARY_POSITIVE 10
+#define UNARY_NEGATIVE 11
+#define UNARY_NOT 12
-#define UNARY_INVERT 15
+#define UNARY_INVERT 15
-#define BINARY_POWER 19
+#define BINARY_POWER 19
-#define BINARY_MULTIPLY 20
+#define BINARY_MULTIPLY 20
-#define BINARY_MODULO 22
-#define BINARY_ADD 23
-#define BINARY_SUBTRACT 24
-#define BINARY_SUBSCR 25
+#define BINARY_MODULO 22
+#define BINARY_ADD 23
+#define BINARY_SUBTRACT 24
+#define BINARY_SUBSCR 25
#define BINARY_FLOOR_DIVIDE 26
#define BINARY_TRUE_DIVIDE 27
#define INPLACE_FLOOR_DIVIDE 28
#define INPLACE_TRUE_DIVIDE 29
-#define STORE_MAP 54
-#define INPLACE_ADD 55
-#define INPLACE_SUBTRACT 56
-#define INPLACE_MULTIPLY 57
-
-#define INPLACE_MODULO 59
-#define STORE_SUBSCR 60
-#define DELETE_SUBSCR 61
-
-#define BINARY_LSHIFT 62
-#define BINARY_RSHIFT 63
-#define BINARY_AND 64
-#define BINARY_XOR 65
-#define BINARY_OR 66
-#define INPLACE_POWER 67
-#define GET_ITER 68
-#define STORE_LOCALS 69
-#define PRINT_EXPR 70
+#define STORE_MAP 54
+#define INPLACE_ADD 55
+#define INPLACE_SUBTRACT 56
+#define INPLACE_MULTIPLY 57
+
+#define INPLACE_MODULO 59
+#define STORE_SUBSCR 60
+#define DELETE_SUBSCR 61
+
+#define BINARY_LSHIFT 62
+#define BINARY_RSHIFT 63
+#define BINARY_AND 64
+#define BINARY_XOR 65
+#define BINARY_OR 66
+#define INPLACE_POWER 67
+#define GET_ITER 68
+#define STORE_LOCALS 69
+#define PRINT_EXPR 70
#define LOAD_BUILD_CLASS 71
-
-#define INPLACE_LSHIFT 75
-#define INPLACE_RSHIFT 76
-#define INPLACE_AND 77
-#define INPLACE_XOR 78
-#define INPLACE_OR 79
-#define BREAK_LOOP 80
+#define YIELD_FROM 72
+
+#define INPLACE_LSHIFT 75
+#define INPLACE_RSHIFT 76
+#define INPLACE_AND 77
+#define INPLACE_XOR 78
+#define INPLACE_OR 79
+#define BREAK_LOOP 80
#define WITH_CLEANUP 81
-#define RETURN_VALUE 83
-#define IMPORT_STAR 84
+#define RETURN_VALUE 83
+#define IMPORT_STAR 84
-#define YIELD_VALUE 86
-#define POP_BLOCK 87
-#define END_FINALLY 88
-#define POP_EXCEPT 89
+#define YIELD_VALUE 86
+#define POP_BLOCK 87
+#define END_FINALLY 88
+#define POP_EXCEPT 89
-#define HAVE_ARGUMENT 90 /* Opcodes from here have an argument: */
+#define HAVE_ARGUMENT 90 /* Opcodes from here have an argument: */
-#define STORE_NAME 90 /* Index in name list */
-#define DELETE_NAME 91 /* "" */
-#define UNPACK_SEQUENCE 92 /* Number of sequence items */
-#define FOR_ITER 93
+#define STORE_NAME 90 /* Index in name list */
+#define DELETE_NAME 91 /* "" */
+#define UNPACK_SEQUENCE 92 /* Number of sequence items */
+#define FOR_ITER 93
#define UNPACK_EX 94 /* Num items before variable part +
(Num items after variable part << 8) */
-#define STORE_ATTR 95 /* Index in name list */
-#define DELETE_ATTR 96 /* "" */
-#define STORE_GLOBAL 97 /* "" */
-#define DELETE_GLOBAL 98 /* "" */
-
-#define LOAD_CONST 100 /* Index in const list */
-#define LOAD_NAME 101 /* Index in name list */
-#define BUILD_TUPLE 102 /* Number of tuple items */
-#define BUILD_LIST 103 /* Number of list items */
-#define BUILD_SET 104 /* Number of set items */
-#define BUILD_MAP 105 /* Always zero for now */
-#define LOAD_ATTR 106 /* Index in name list */
-#define COMPARE_OP 107 /* Comparison operator */
-#define IMPORT_NAME 108 /* Index in name list */
-#define IMPORT_FROM 109 /* Index in name list */
-
-#define JUMP_FORWARD 110 /* Number of bytes to skip */
-#define JUMP_IF_FALSE_OR_POP 111 /* Target byte offset from beginning of code */
-#define JUMP_IF_TRUE_OR_POP 112 /* "" */
-#define JUMP_ABSOLUTE 113 /* "" */
-#define POP_JUMP_IF_FALSE 114 /* "" */
-#define POP_JUMP_IF_TRUE 115 /* "" */
-
-#define LOAD_GLOBAL 116 /* Index in name list */
-
-#define CONTINUE_LOOP 119 /* Start of loop (absolute) */
-#define SETUP_LOOP 120 /* Target address (relative) */
-#define SETUP_EXCEPT 121 /* "" */
-#define SETUP_FINALLY 122 /* "" */
-
-#define LOAD_FAST 124 /* Local variable number */
-#define STORE_FAST 125 /* Local variable number */
-#define DELETE_FAST 126 /* Local variable number */
-
-#define RAISE_VARARGS 130 /* Number of raise arguments (1, 2 or 3) */
+#define STORE_ATTR 95 /* Index in name list */
+#define DELETE_ATTR 96 /* "" */
+#define STORE_GLOBAL 97 /* "" */
+#define DELETE_GLOBAL 98 /* "" */
+
+#define LOAD_CONST 100 /* Index in const list */
+#define LOAD_NAME 101 /* Index in name list */
+#define BUILD_TUPLE 102 /* Number of tuple items */
+#define BUILD_LIST 103 /* Number of list items */
+#define BUILD_SET 104 /* Number of set items */
+#define BUILD_MAP 105 /* Always zero for now */
+#define LOAD_ATTR 106 /* Index in name list */
+#define COMPARE_OP 107 /* Comparison operator */
+#define IMPORT_NAME 108 /* Index in name list */
+#define IMPORT_FROM 109 /* Index in name list */
+
+#define JUMP_FORWARD 110 /* Number of bytes to skip */
+#define JUMP_IF_FALSE_OR_POP 111 /* Target byte offset from beginning of code */
+#define JUMP_IF_TRUE_OR_POP 112 /* "" */
+#define JUMP_ABSOLUTE 113 /* "" */
+#define POP_JUMP_IF_FALSE 114 /* "" */
+#define POP_JUMP_IF_TRUE 115 /* "" */
+
+#define LOAD_GLOBAL 116 /* Index in name list */
+
+#define CONTINUE_LOOP 119 /* Start of loop (absolute) */
+#define SETUP_LOOP 120 /* Target address (relative) */
+#define SETUP_EXCEPT 121 /* "" */
+#define SETUP_FINALLY 122 /* "" */
+
+#define LOAD_FAST 124 /* Local variable number */
+#define STORE_FAST 125 /* Local variable number */
+#define DELETE_FAST 126 /* Local variable number */
+
+#define RAISE_VARARGS 130 /* Number of raise arguments (1, 2 or 3) */
/* CALL_FUNCTION_XXX opcodes defined below depend on this definition */
-#define CALL_FUNCTION 131 /* #args + (#kwargs<<8) */
-#define MAKE_FUNCTION 132 /* #defaults + #kwdefaults<<8 + #annotations<<16 */
-#define BUILD_SLICE 133 /* Number of items */
+#define CALL_FUNCTION 131 /* #args + (#kwargs<<8) */
+#define MAKE_FUNCTION 132 /* #defaults + #kwdefaults<<8 + #annotations<<16 */
+#define BUILD_SLICE 133 /* Number of items */
#define MAKE_CLOSURE 134 /* same as MAKE_FUNCTION */
#define LOAD_CLOSURE 135 /* Load free variable from closure */
@@ -127,9 +127,9 @@ extern "C" {
/* The next 3 opcodes must be contiguous and satisfy
(CALL_FUNCTION_VAR - CALL_FUNCTION) & 3 == 1 */
-#define CALL_FUNCTION_VAR 140 /* #args + (#kwargs<<8) */
-#define CALL_FUNCTION_KW 141 /* #args + (#kwargs<<8) */
-#define CALL_FUNCTION_VAR_KW 142 /* #args + (#kwargs<<8) */
+#define CALL_FUNCTION_VAR 140 /* #args + (#kwargs<<8) */
+#define CALL_FUNCTION_KW 141 /* #args + (#kwargs<<8) */
+#define CALL_FUNCTION_VAR_KW 142 /* #args + (#kwargs<<8) */
#define SETUP_WITH 143
@@ -149,7 +149,7 @@ extern "C" {
enum cmp_op {PyCmp_LT=Py_LT, PyCmp_LE=Py_LE, PyCmp_EQ=Py_EQ, PyCmp_NE=Py_NE, PyCmp_GT=Py_GT, PyCmp_GE=Py_GE,
- PyCmp_IN, PyCmp_NOT_IN, PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD};
+ PyCmp_IN, PyCmp_NOT_IN, PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD};
#define HAS_ARG(op) ((op) >= HAVE_ARGUMENT)
diff --git a/Include/parsetok.h b/Include/parsetok.h
index 4b7694f..911dfc1 100644
--- a/Include/parsetok.h
+++ b/Include/parsetok.h
@@ -9,7 +9,10 @@ extern "C" {
typedef struct {
int error;
- const char *filename; /* decoded from the filesystem encoding */
+#ifndef PGEN
+ /* The filename is useless for pgen, see comment in tok_state structure */
+ PyObject *filename;
+#endif
int lineno;
int offset;
char *text; /* UTF-8-encoded string */
@@ -66,8 +69,10 @@ PyAPI_FUNC(node *) PyParser_ParseStringFlagsFilenameEx(
perrdetail *err_ret,
int *flags);
-/* Note that he following function is defined in pythonrun.c not parsetok.c. */
+/* Note that the following functions are defined in pythonrun.c,
+ not in parsetok.c */
PyAPI_FUNC(void) PyParser_SetError(perrdetail *);
+PyAPI_FUNC(void) PyParser_ClearError(perrdetail *);
#ifdef __cplusplus
}
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index 83165a2..7350882 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -17,19 +17,15 @@
/* Version parsed out into numeric values */
/*--start constants--*/
#define PY_MAJOR_VERSION 3
-#define PY_MINOR_VERSION 2
-#define PY_MICRO_VERSION 3
-#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA
+#define PY_MINOR_VERSION 3
+#define PY_MICRO_VERSION 0
+#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
#define PY_RELEASE_SERIAL 1
/* Version as a string */
-#define PY_VERSION "3.2.3rc1"
+#define PY_VERSION "3.3.0a1+"
/*--end constants--*/
-/* Subversion Revision number of this file (not of the repository). Empty
- since Mercurial migration. */
-#define PY_PATCHLEVEL_REVISION ""
-
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
Use this for numeric comparisons, e.g. #if PY_VERSION_HEX >= ... */
#define PY_VERSION_HEX ((PY_MAJOR_VERSION << 24) | \
diff --git a/Include/py_curses.h b/Include/py_curses.h
index a891c42..f2c08f6 100644
--- a/Include/py_curses.h
+++ b/Include/py_curses.h
@@ -76,6 +76,7 @@ extern "C" {
typedef struct {
PyObject_HEAD
WINDOW *win;
+ char *encoding;
} PyCursesWindowObject;
#define PyCursesWindow_Check(v) (Py_TYPE(v) == &PyCursesWindow_Type)
diff --git a/Include/pydebug.h b/Include/pydebug.h
index e23cbdc..97c2f8c 100644
--- a/Include/pydebug.h
+++ b/Include/pydebug.h
@@ -16,7 +16,6 @@ PyAPI_DATA(int) Py_BytesWarningFlag;
PyAPI_DATA(int) Py_UseClassExceptionsFlag;
PyAPI_DATA(int) Py_FrozenFlag;
PyAPI_DATA(int) Py_IgnoreEnvironmentFlag;
-PyAPI_DATA(int) Py_DivisionWarningFlag;
PyAPI_DATA(int) Py_DontWriteBytecodeFlag;
PyAPI_DATA(int) Py_NoUserSiteDirectory;
PyAPI_DATA(int) Py_UnbufferedStdioFlag;
@@ -27,8 +26,6 @@ PyAPI_DATA(int) Py_HashRandomizationFlag;
PYTHONPATH and PYTHONHOME from the environment */
#define Py_GETENV(s) (Py_IgnoreEnvironmentFlag ? NULL : getenv(s))
-PyAPI_FUNC(void) Py_FatalError(const char *message);
-
#ifdef __cplusplus
}
#endif
diff --git a/Include/pyerrors.h b/Include/pyerrors.h
index 4bb3c01..1e42ebb 100644
--- a/Include/pyerrors.h
+++ b/Include/pyerrors.h
@@ -45,18 +45,23 @@ typedef struct {
PyObject *myerrno;
PyObject *strerror;
PyObject *filename;
-} PyEnvironmentErrorObject;
-
#ifdef MS_WINDOWS
-typedef struct {
- PyException_HEAD
- PyObject *myerrno;
- PyObject *strerror;
- PyObject *filename;
PyObject *winerror;
-} PyWindowsErrorObject;
#endif
+ Py_ssize_t written; /* only for BlockingIOError, -1 otherwise */
+} PyOSErrorObject;
+
+typedef struct {
+ PyException_HEAD
+ PyObject *value;
+} PyStopIterationObject;
+
+/* Compatibility typedefs */
+typedef PyOSErrorObject PyEnvironmentErrorObject;
+#ifdef MS_WINDOWS
+typedef PyOSErrorObject PyWindowsErrorObject;
#endif
+#endif /* !Py_LIMITED_API */
/* Error handling definitions */
@@ -70,7 +75,17 @@ PyAPI_FUNC(PyObject *) PyErr_Occurred(void);
PyAPI_FUNC(void) PyErr_Clear(void);
PyAPI_FUNC(void) PyErr_Fetch(PyObject **, PyObject **, PyObject **);
PyAPI_FUNC(void) PyErr_Restore(PyObject *, PyObject *, PyObject *);
-PyAPI_FUNC(void) Py_FatalError(const char *message);
+
+#if defined(__clang__) || \
+ (defined(__GNUC__) && \
+ ((__GNUC_MAJOR__ >= 3) || \
+ (__GNUC_MAJOR__ == 2) && (__GNUC_MINOR__ >= 5)))
+#define _Py_NO_RETURN __attribute__((__noreturn__))
+#else
+#define _Py_NO_RETURN
+#endif
+
+PyAPI_FUNC(void) Py_FatalError(const char *message) _Py_NO_RETURN;
#if defined(Py_DEBUG) || defined(Py_LIMITED_API)
#define _PyErr_OCCURRED() PyErr_Occurred()
@@ -90,6 +105,7 @@ PyAPI_FUNC(PyObject *) PyException_GetTraceback(PyObject *);
/* Cause manipulation (PEP 3134) */
PyAPI_FUNC(PyObject *) PyException_GetCause(PyObject *);
PyAPI_FUNC(void) PyException_SetCause(PyObject *, PyObject *);
+PyAPI_FUNC(int) _PyException_SetCauseChecked(PyObject *, PyObject *);
/* Context manipulation (PEP 3134) */
PyAPI_FUNC(PyObject *) PyException_GetContext(PyObject *);
@@ -122,10 +138,9 @@ PyAPI_DATA(PyObject *) PyExc_LookupError;
PyAPI_DATA(PyObject *) PyExc_AssertionError;
PyAPI_DATA(PyObject *) PyExc_AttributeError;
+PyAPI_DATA(PyObject *) PyExc_BufferError;
PyAPI_DATA(PyObject *) PyExc_EOFError;
PyAPI_DATA(PyObject *) PyExc_FloatingPointError;
-PyAPI_DATA(PyObject *) PyExc_EnvironmentError;
-PyAPI_DATA(PyObject *) PyExc_IOError;
PyAPI_DATA(PyObject *) PyExc_OSError;
PyAPI_DATA(PyObject *) PyExc_ImportError;
PyAPI_DATA(PyObject *) PyExc_IndexError;
@@ -150,6 +165,27 @@ PyAPI_DATA(PyObject *) PyExc_UnicodeDecodeError;
PyAPI_DATA(PyObject *) PyExc_UnicodeTranslateError;
PyAPI_DATA(PyObject *) PyExc_ValueError;
PyAPI_DATA(PyObject *) PyExc_ZeroDivisionError;
+
+PyAPI_DATA(PyObject *) PyExc_BlockingIOError;
+PyAPI_DATA(PyObject *) PyExc_BrokenPipeError;
+PyAPI_DATA(PyObject *) PyExc_ChildProcessError;
+PyAPI_DATA(PyObject *) PyExc_ConnectionError;
+PyAPI_DATA(PyObject *) PyExc_ConnectionAbortedError;
+PyAPI_DATA(PyObject *) PyExc_ConnectionRefusedError;
+PyAPI_DATA(PyObject *) PyExc_ConnectionResetError;
+PyAPI_DATA(PyObject *) PyExc_FileExistsError;
+PyAPI_DATA(PyObject *) PyExc_FileNotFoundError;
+PyAPI_DATA(PyObject *) PyExc_InterruptedError;
+PyAPI_DATA(PyObject *) PyExc_IsADirectoryError;
+PyAPI_DATA(PyObject *) PyExc_NotADirectoryError;
+PyAPI_DATA(PyObject *) PyExc_PermissionError;
+PyAPI_DATA(PyObject *) PyExc_ProcessLookupError;
+PyAPI_DATA(PyObject *) PyExc_TimeoutError;
+
+
+/* Compatibility aliases */
+PyAPI_DATA(PyObject *) PyExc_EnvironmentError;
+PyAPI_DATA(PyObject *) PyExc_IOError;
#ifdef MS_WINDOWS
PyAPI_DATA(PyObject *) PyExc_WindowsError;
#endif
@@ -157,8 +193,6 @@ PyAPI_DATA(PyObject *) PyExc_WindowsError;
PyAPI_DATA(PyObject *) PyExc_VMSError;
#endif
-PyAPI_DATA(PyObject *) PyExc_BufferError;
-
PyAPI_DATA(PyObject *) PyExc_RecursionErrorInst;
/* Predefined warning categories */
@@ -198,8 +232,6 @@ PyAPI_FUNC(PyObject *) PyErr_Format(
);
#ifdef MS_WINDOWS
-PyAPI_FUNC(PyObject *) PyErr_SetFromWindowsErrWithFilenameObject(
- int, const char *);
PyAPI_FUNC(PyObject *) PyErr_SetFromWindowsErrWithFilename(
int ierr,
const char *filename /* decoded from the filesystem encoding */
@@ -293,6 +325,12 @@ PyAPI_FUNC(PyObject *) PyUnicodeTranslateError_Create(
Py_ssize_t end,
const char *reason /* UTF-8 encoded string */
);
+PyAPI_FUNC(PyObject *) _PyUnicodeTranslateError_Create(
+ PyObject *object,
+ Py_ssize_t start,
+ Py_ssize_t end,
+ const char *reason /* UTF-8 encoded string */
+ );
#endif
/* get the encoding attribute */
@@ -348,6 +386,8 @@ PyAPI_FUNC(int) PyUnicodeTranslateError_SetReason(
const char *reason /* UTF-8 encoded string */
);
+/* create a StopIteration exception with the given value */
+PyAPI_FUNC(PyObject *) PyStopIteration_Create(PyObject *);
/* These APIs aren't really part of the error implementation, but
often needed to format error messages; the native C lib APIs are
diff --git a/Include/pymacro.h b/Include/pymacro.h
new file mode 100644
index 0000000..1dc0c61
--- /dev/null
+++ b/Include/pymacro.h
@@ -0,0 +1,55 @@
+#ifndef Py_PYMACRO_H
+#define Py_PYMACRO_H
+
+#define Py_MIN(x, y) (((x) > (y)) ? (y) : (x))
+#define Py_MAX(x, y) (((x) > (y)) ? (x) : (y))
+
+/* Argument must be a char or an int in [-128, 127] or [0, 255]. */
+#define Py_CHARMASK(c) ((unsigned char)((c) & 0xff))
+
+
+/* Assert a build-time dependency, as an expression.
+
+ Your compile will fail if the condition isn't true, or can't be evaluated
+ by the compiler. This can be used in an expression: its value is 0.
+
+ Example:
+
+ #define foo_to_char(foo) \
+ ((char *)(foo) \
+ + Py_BUILD_ASSERT_EXPR(offsetof(struct foo, string) == 0))
+
+ Written by Rusty Russell, public domain, http://ccodearchive.net/ */
+#define Py_BUILD_ASSERT_EXPR(cond) \
+ (sizeof(char [1 - 2*!(cond)]) - 1)
+
+/* Get the number of elements in a visible array
+
+ This does not work on pointers, or arrays declared as [], or function
+ parameters. With correct compiler support, such usage will cause a build
+ error (see Py_BUILD_ASSERT_EXPR).
+
+ Written by Rusty Russell, public domain, http://ccodearchive.net/ */
+#if defined(__GNUC__)
+/* Two gcc extensions.
+ &a[0] degrades to a pointer: a different type from an array */
+#define Py_ARRAY_LENGTH(array) \
+ (sizeof(array) / sizeof((array)[0]) \
+ + Py_BUILD_ASSERT_EXPR(!__builtin_types_compatible_p(typeof(array), \
+ typeof(&(array)[0]))))
+#else
+#define Py_ARRAY_LENGTH(array) \
+ (sizeof(array) / sizeof((array)[0]))
+#endif
+
+
+/* Define macros for inline documentation. */
+#define PyDoc_VAR(name) static char name[]
+#define PyDoc_STRVAR(name,str) PyDoc_VAR(name) = PyDoc_STR(str)
+#ifdef WITH_DOC_STRINGS
+#define PyDoc_STR(str) str
+#else
+#define PyDoc_STR(str) ""
+#endif
+
+#endif /* Py_PYMACRO_H */
diff --git a/Include/pymath.h b/Include/pymath.h
index b4eda66..62a6c42 100644
--- a/Include/pymath.h
+++ b/Include/pymath.h
@@ -37,12 +37,6 @@ extern double pow(double, double);
#endif /* __STDC__ */
#endif /* _MSC_VER */
-#ifdef _OSF_SOURCE
-/* OSF1 5.1 doesn't make these available with XOPEN_SOURCE_EXTENDED defined */
-extern int finite(double);
-extern double copysign(double, double);
-#endif
-
/* High precision defintion of pi and e (Euler)
* The values are taken from libc6's math.h.
*/
diff --git a/Include/pyport.h b/Include/pyport.h
index 269ba68..1fd4dcc 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -135,7 +135,7 @@ Used in: PY_LONG_LONG
#define _PyHASH_MULTIPLIER 1000003 /* 0xf4243 */
/* Parameters used for the numeric hash implementation. See notes for
- _PyHash_Double in Objects/object.c. Numeric hashes are based on
+ _Py_HashDouble in Objects/object.c. Numeric hashes are based on
reduction modulo the prime 2**_PyHASH_BITS - 1. */
#if SIZEOF_VOID_P >= 8
diff --git a/Include/pystate.h b/Include/pystate.h
index b5fe1ad..1bbb4e2 100644
--- a/Include/pystate.h
+++ b/Include/pystate.h
@@ -74,9 +74,9 @@ typedef struct _ts {
struct _frame *frame;
int recursion_depth;
char overflowed; /* The stack has overflowed. Allow 50 more calls
- to handle the runtime error. */
- char recursion_critical; /* The current calls must not cause
- a stack overflow. */
+ to handle the runtime error. */
+ char recursion_critical; /* The current calls must not cause
+ a stack overflow. */
/* 'tracing' keeps track of the execution depth when tracing/profiling.
This is to prevent the actual trace/profile code from being recorded in
the trace/profile. */
@@ -160,6 +160,8 @@ typedef
enum {PyGILState_LOCKED, PyGILState_UNLOCKED}
PyGILState_STATE;
+#ifdef WITH_THREAD
+
/* Ensure that the current thread is ready to call the Python
C API, regardless of the current state of Python, or of its
thread lock. This may be called as many times as desired
@@ -201,6 +203,8 @@ PyAPI_FUNC(void) PyGILState_Release(PyGILState_STATE);
*/
PyAPI_FUNC(PyThreadState *) PyGILState_GetThisThreadState(void);
+#endif /* #ifdef WITH_THREAD */
+
/* The implementation of sys._current_frames() Returns a dict mapping
thread id to that thread's current frame.
*/
diff --git a/Include/pythonrun.h b/Include/pythonrun.h
index e244ce7..74ab986 100644
--- a/Include/pythonrun.h
+++ b/Include/pythonrun.h
@@ -179,9 +179,6 @@ PyAPI_FUNC(const char *) Py_GetCopyright(void);
PyAPI_FUNC(const char *) Py_GetCompiler(void);
PyAPI_FUNC(const char *) Py_GetBuildInfo(void);
#ifndef Py_LIMITED_API
-PyAPI_FUNC(const char *) _Py_svnversion(void);
-PyAPI_FUNC(const char *) Py_SubversionRevision(void);
-PyAPI_FUNC(const char *) Py_SubversionShortBranch(void);
PyAPI_FUNC(const char *) _Py_hgidentifier(void);
PyAPI_FUNC(const char *) _Py_hgversion(void);
#endif
@@ -215,6 +212,7 @@ PyAPI_FUNC(void) PyByteArray_Fini(void);
PyAPI_FUNC(void) PyFloat_Fini(void);
PyAPI_FUNC(void) PyOS_FiniInterrupts(void);
PyAPI_FUNC(void) _PyGC_Fini(void);
+PyAPI_FUNC(void) PySlice_Fini(void);
PyAPI_DATA(PyThreadState *) _Py_Finalizing;
#endif
diff --git a/Include/pythread.h b/Include/pythread.h
index 9806c61..6e9f303 100644
--- a/Include/pythread.h
+++ b/Include/pythread.h
@@ -32,7 +32,7 @@ PyAPI_FUNC(int) PyThread_acquire_lock(PyThread_type_lock, int);
on a lock (see PyThread_acquire_lock_timed() below).
PY_TIMEOUT_MAX is the highest usable value (in microseconds) of that
type, and depends on the system threading API.
-
+
NOTE: this isn't the same value as `_thread.TIMEOUT_MAX`. The _thread
module exposes a higher-level API, with timeouts expressed in seconds
and floating-point numbers allowed.
@@ -74,6 +74,8 @@ PyAPI_FUNC(void) PyThread_release_lock(PyThread_type_lock);
PyAPI_FUNC(size_t) PyThread_get_stacksize(void);
PyAPI_FUNC(int) PyThread_set_stacksize(size_t);
+PyAPI_FUNC(PyObject*) PyThread_GetInfo(void);
+
/* Thread Local Storage (TLS) API */
PyAPI_FUNC(int) PyThread_create_key(void);
PyAPI_FUNC(void) PyThread_delete_key(int);
diff --git a/Include/pytime.h b/Include/pytime.h
index d707bdb..0473dc7 100644
--- a/Include/pytime.h
+++ b/Include/pytime.h
@@ -3,6 +3,7 @@
#define Py_PYTIME_H
#include "pyconfig.h" /* include for defines */
+#include "object.h"
/**************************************************************************
Symbols and macros to supply platform-independent interfaces to time related
@@ -37,6 +38,29 @@ do { \
((tv_end.tv_sec - tv_start.tv_sec) + \
(tv_end.tv_usec - tv_start.tv_usec) * 0.000001)
+#ifndef Py_LIMITED_API
+/* Convert a number of seconds, int or float, to time_t. */
+PyAPI_FUNC(int) _PyTime_ObjectToTime_t(
+ PyObject *obj,
+ time_t *sec);
+
+/* Convert a number of seconds, int or float, to a timeval structure.
+ usec is in the range [0; 999999] and rounded towards zero.
+ For example, -1.2 is converted to (-2, 800000). */
+PyAPI_FUNC(int) _PyTime_ObjectToTimeval(
+ PyObject *obj,
+ time_t *sec,
+ long *usec);
+
+/* Convert a number of seconds, int or float, to a timespec structure.
+ nsec is in the range [0; 999999999] and rounded towards zero.
+ For example, -1.2 is converted to (-2, 800000000). */
+PyAPI_FUNC(int) _PyTime_ObjectToTimespec(
+ PyObject *obj,
+ time_t *sec,
+ long *nsec);
+#endif
+
/* Dummy to force linking. */
PyAPI_FUNC(void) _PyTime_Init(void);
diff --git a/Include/setobject.h b/Include/setobject.h
index 6234111..00e5344 100644
--- a/Include/setobject.h
+++ b/Include/setobject.h
@@ -99,6 +99,8 @@ PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key,
PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set);
#ifndef Py_LIMITED_API
PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable);
+
+PyAPI_FUNC(int) PySet_ClearFreeList(void);
#endif
#ifdef __cplusplus
diff --git a/Include/structmember.h b/Include/structmember.h
index 0b85b2a..40c04e9 100644
--- a/Include/structmember.h
+++ b/Include/structmember.h
@@ -9,16 +9,6 @@ extern "C" {
#include <stddef.h> /* For offsetof */
-/* The offsetof() macro calculates the offset of a structure member
- in its structure. Unfortunately this cannot be written down
- portably, hence it is provided by a Standard C header file.
- For pre-Standard C compilers, here is a version that usually works
- (but watch out!): */
-
-#ifndef offsetof
-#define offsetof(type, member) ( (int) & ((type*)0) -> member )
-#endif
-
/* An array of PyMemberDef structures defines the name, type and offset
of selected members of a C structure. These can be read by
PyMember_GetOne() and set by PyMember_SetOne() (except if their READONLY
diff --git a/Include/symtable.h b/Include/symtable.h
index fd7de04..82f6269 100644
--- a/Include/symtable.h
+++ b/Include/symtable.h
@@ -23,10 +23,13 @@ struct symtable {
PyObject *st_blocks; /* dict: map AST node addresses
* to symbol table entries */
PyObject *st_stack; /* list: stack of namespace info */
- PyObject *st_global; /* borrowed ref to st_top->st_symbols */
- int st_nblocks; /* number of blocks used */
+ PyObject *st_global; /* borrowed ref to st_top->ste_symbols */
+ int st_nblocks; /* number of blocks used. kept for
+ consistency with the corresponding
+ compiler structure */
PyObject *st_private; /* name of current class or NULL */
- PyFutureFeatures *st_future; /* module's future features */
+ PyFutureFeatures *st_future; /* module's future features that affect
+ the symbol table */
};
typedef struct _symtable_entry {
@@ -34,7 +37,7 @@ typedef struct _symtable_entry {
PyObject *ste_id; /* int: key in ste_table->st_blocks */
PyObject *ste_symbols; /* dict: variable names to flags */
PyObject *ste_name; /* string: name of current block */
- PyObject *ste_varnames; /* list of variable names */
+ PyObject *ste_varnames; /* list of function parameters */
PyObject *ste_children; /* list of child blocks */
_Py_block_ty ste_type; /* module, class, or function */
int ste_unoptimized; /* false if namespace is optimized */
diff --git a/Include/timefuncs.h b/Include/timefuncs.h
deleted file mode 100644
index 3c43575..0000000
--- a/Include/timefuncs.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/* timefuncs.h
- */
-
-/* Utility function related to timemodule.c. */
-
-#ifndef TIMEFUNCS_H
-#define TIMEFUNCS_H
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-
-/* Cast double x to time_t, but raise ValueError if x is too large
- * to fit in a time_t. ValueError is set on return iff the return
- * value is (time_t)-1 and PyErr_Occurred().
- */
-#ifndef Py_LIMITED_API
-PyAPI_FUNC(time_t) _PyTime_DoubleToTimet(double x);
-#endif
-
-
-#ifdef __cplusplus
-}
-#endif
-#endif /* TIMEFUNCS_H */
diff --git a/Include/traceback.h b/Include/traceback.h
index 69e3d05..7734707 100644
--- a/Include/traceback.h
+++ b/Include/traceback.h
@@ -5,6 +5,8 @@
extern "C" {
#endif
+#include "pystate.h"
+
struct _frame;
/* Traceback interface */
@@ -28,6 +30,42 @@ PyAPI_FUNC(int) _Py_DisplaySourceLine(PyObject *, PyObject *, int, int);
PyAPI_DATA(PyTypeObject) PyTraceBack_Type;
#define PyTraceBack_Check(v) (Py_TYPE(v) == &PyTraceBack_Type)
+/* Write the Python traceback into the file 'fd'. For example:
+
+ Traceback (most recent call first):
+ File "xxx", line xxx in <xxx>
+ File "xxx", line xxx in <xxx>
+ ...
+ File "xxx", line xxx in <xxx>
+
+ This function is written for debug purpose only, to dump the traceback in
+ the worst case: after a segmentation fault, at fatal error, etc. That's why,
+ it is very limited. Strings are truncated to 100 characters and encoded to
+ ASCII with backslashreplace. It doesn't write the source code, only the
+ function name, filename and line number of each frame. Write only the first
+ 100 frames: if the traceback is truncated, write the line " ...".
+
+ This function is signal safe. */
+
+PyAPI_DATA(void) _Py_DumpTraceback(
+ int fd,
+ PyThreadState *tstate);
+
+/* Write the traceback of all threads into the file 'fd'. current_thread can be
+ NULL. Return NULL on success, or an error message on error.
+
+ This function is written for debug purpose only. It calls
+ _Py_DumpTraceback() for each thread, and so has the same limitations. It
+ only write the traceback of the first 100 threads: write "..." if there are
+ more threads.
+
+ This function is signal safe. */
+
+PyAPI_DATA(const char*) _Py_DumpTracebackThreads(
+ int fd, PyInterpreterState *interp,
+ PyThreadState *current_thread);
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/ucnhash.h b/Include/ucnhash.h
index 70fdf13..8de9ba0 100644
--- a/Include/ucnhash.h
+++ b/Include/ucnhash.h
@@ -19,11 +19,13 @@ typedef struct {
success, zero if not. Does not set Python exceptions.
If self is NULL, data come from the default version of the database.
If it is not NULL, it should be a unicodedata.ucd_X_Y_Z object */
- int (*getname)(PyObject *self, Py_UCS4 code, char* buffer, int buflen);
+ int (*getname)(PyObject *self, Py_UCS4 code, char* buffer, int buflen,
+ int with_alias_and_seq);
/* Get character code for a given name. Same error handling
as for getname. */
- int (*getcode)(PyObject *self, const char* name, int namelen, Py_UCS4* code);
+ int (*getcode)(PyObject *self, const char* name, int namelen, Py_UCS4* code,
+ int with_named_seq);
} _PyUnicode_Name_CAPI;
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h
index 379a90c..8f74995 100644
--- a/Include/unicodeobject.h
+++ b/Include/unicodeobject.h
@@ -64,16 +64,15 @@ Copyright (c) Corporation for National Research Initiatives.
/* Python 3.x requires unicode */
#define Py_USING_UNICODE
-/* FIXME: MvL's new implementation assumes that Py_UNICODE_SIZE is
- properly set, but the default rules below doesn't set it. I'll
- sort this out some other day -- fredrik@pythonware.com */
-
-#ifndef Py_UNICODE_SIZE
-#error Must define Py_UNICODE_SIZE
+#ifndef SIZEOF_WCHAR_T
+#error Must define SIZEOF_WCHAR_T
#endif
-/* Setting Py_UNICODE_WIDE enables UCS-4 storage. Otherwise, Unicode
- strings are stored as UCS-2 (with limited support for UTF-16) */
+#define Py_UNICODE_SIZE SIZEOF_WCHAR_T
+
+/* If wchar_t can be used for UCS-4 storage, set Py_UNICODE_WIDE.
+ Otherwise, Unicode strings are stored as UCS-2 (with limited support
+ for UTF-16) */
#if Py_UNICODE_SIZE >= 4
#define Py_UNICODE_WIDE
@@ -84,19 +83,14 @@ Copyright (c) Corporation for National Research Initiatives.
/* #define HAVE_WCHAR_H */
/* #define HAVE_USABLE_WCHAR_T */
-/* Defaults for various platforms */
-#ifndef PY_UNICODE_TYPE
-
-/* Windows has a usable wchar_t type (unless we're using UCS-4) */
-# if defined(MS_WIN32) && Py_UNICODE_SIZE == 2
-# define HAVE_USABLE_WCHAR_T
-# define PY_UNICODE_TYPE wchar_t
-# endif
-
-# if defined(Py_UNICODE_WIDE)
-# define PY_UNICODE_TYPE Py_UCS4
-# endif
+/* Py_UNICODE was the native Unicode storage format (code unit) used by
+ Python and represents a single Unicode element in the Unicode type.
+ With PEP 393, Py_UNICODE is deprecated and replaced with a
+ typedef to wchar_t. */
+#ifndef Py_LIMITED_API
+#define PY_UNICODE_TYPE wchar_t
+typedef wchar_t Py_UNICODE;
#endif
/* If the compiler provides a wchar_t type we try to support it
@@ -109,6 +103,10 @@ Copyright (c) Corporation for National Research Initiatives.
# endif
#endif
+#if defined(MS_WINDOWS)
+# define HAVE_MBCS
+#endif
+
#ifdef HAVE_WCHAR_H
/* Work around a cosmetic bug in BSDI 4.x wchar.h; thanks to Thomas Wouters */
# ifdef _HAVE_BSDI
@@ -117,201 +115,24 @@ Copyright (c) Corporation for National Research Initiatives.
# include <wchar.h>
#endif
-/*
- * Use this typedef when you need to represent a UTF-16 surrogate pair
- * as single unsigned integer.
- */
-#if SIZEOF_INT >= 4
+/* Py_UCS4 and Py_UCS2 are typedefs for the respective
+ unicode representations. */
+#if SIZEOF_INT == 4
typedef unsigned int Py_UCS4;
-#elif SIZEOF_LONG >= 4
+#elif SIZEOF_LONG == 4
typedef unsigned long Py_UCS4;
+#else
+#error "Could not find a proper typedef for Py_UCS4"
#endif
-/* Py_UNICODE is the native Unicode storage format (code unit) used by
- Python and represents a single Unicode element in the Unicode
- type. */
-
-#ifndef Py_LIMITED_API
-typedef PY_UNICODE_TYPE Py_UNICODE;
-#endif
-
-/* --- UCS-2/UCS-4 Name Mangling ------------------------------------------ */
-
-/* Unicode API names are mangled to assure that UCS-2 and UCS-4 builds
- produce different external names and thus cause import errors in
- case Python interpreters and extensions with mixed compiled in
- Unicode width assumptions are combined. */
-
-#ifndef Py_UNICODE_WIDE
-
-# define PyUnicode_AsASCIIString PyUnicodeUCS2_AsASCIIString
-# define PyUnicode_AsCharmapString PyUnicodeUCS2_AsCharmapString
-# define PyUnicode_AsDecodedObject PyUnicodeUCS2_AsDecodedObject
-# define PyUnicode_AsDecodedUnicode PyUnicodeUCS2_AsDecodedUnicode
-# define PyUnicode_AsEncodedObject PyUnicodeUCS2_AsEncodedObject
-# define PyUnicode_AsEncodedString PyUnicodeUCS2_AsEncodedString
-# define PyUnicode_AsEncodedUnicode PyUnicodeUCS2_AsEncodedUnicode
-# define PyUnicode_AsLatin1String PyUnicodeUCS2_AsLatin1String
-# define PyUnicode_AsRawUnicodeEscapeString PyUnicodeUCS2_AsRawUnicodeEscapeString
-# define PyUnicode_AsUTF32String PyUnicodeUCS2_AsUTF32String
-# define PyUnicode_AsUTF16String PyUnicodeUCS2_AsUTF16String
-# define PyUnicode_AsUTF8String PyUnicodeUCS2_AsUTF8String
-# define PyUnicode_AsUnicode PyUnicodeUCS2_AsUnicode
-# define PyUnicode_AsUnicodeEscapeString PyUnicodeUCS2_AsUnicodeEscapeString
-# define PyUnicode_AsWideChar PyUnicodeUCS2_AsWideChar
-# define PyUnicode_AsWideCharString PyUnicodeUCS2_AsWideCharString
-# define PyUnicode_ClearFreeList PyUnicodeUCS2_ClearFreelist
-# define PyUnicode_Compare PyUnicodeUCS2_Compare
-# define PyUnicode_CompareWithASCIIString PyUnicodeUCS2_CompareWithASCIIString
-# define PyUnicode_Concat PyUnicodeUCS2_Concat
-# define PyUnicode_Append PyUnicodeUCS2_Append
-# define PyUnicode_AppendAndDel PyUnicodeUCS2_AppendAndDel
-# define PyUnicode_Contains PyUnicodeUCS2_Contains
-# define PyUnicode_Count PyUnicodeUCS2_Count
-# define PyUnicode_Decode PyUnicodeUCS2_Decode
-# define PyUnicode_DecodeASCII PyUnicodeUCS2_DecodeASCII
-# define PyUnicode_DecodeCharmap PyUnicodeUCS2_DecodeCharmap
-# define PyUnicode_DecodeLatin1 PyUnicodeUCS2_DecodeLatin1
-# define PyUnicode_DecodeFSDefault PyUnicodeUCS2_DecodeFSDefault
-# define PyUnicode_DecodeFSDefaultAndSize PyUnicodeUCS2_DecodeFSDefaultAndSize
-# define PyUnicode_DecodeRawUnicodeEscape PyUnicodeUCS2_DecodeRawUnicodeEscape
-# define PyUnicode_DecodeUTF32 PyUnicodeUCS2_DecodeUTF32
-# define PyUnicode_DecodeUTF32Stateful PyUnicodeUCS2_DecodeUTF32Stateful
-# define PyUnicode_DecodeUTF16 PyUnicodeUCS2_DecodeUTF16
-# define PyUnicode_DecodeUTF16Stateful PyUnicodeUCS2_DecodeUTF16Stateful
-# define PyUnicode_DecodeUTF8 PyUnicodeUCS2_DecodeUTF8
-# define PyUnicode_DecodeUTF8Stateful PyUnicodeUCS2_DecodeUTF8Stateful
-# define PyUnicode_DecodeUnicodeEscape PyUnicodeUCS2_DecodeUnicodeEscape
-# define PyUnicode_Encode PyUnicodeUCS2_Encode
-# define PyUnicode_EncodeASCII PyUnicodeUCS2_EncodeASCII
-# define PyUnicode_EncodeCharmap PyUnicodeUCS2_EncodeCharmap
-# define PyUnicode_EncodeDecimal PyUnicodeUCS2_EncodeDecimal
-# define PyUnicode_EncodeLatin1 PyUnicodeUCS2_EncodeLatin1
-# define PyUnicode_EncodeRawUnicodeEscape PyUnicodeUCS2_EncodeRawUnicodeEscape
-# define PyUnicode_EncodeUTF32 PyUnicodeUCS2_EncodeUTF32
-# define PyUnicode_EncodeUTF16 PyUnicodeUCS2_EncodeUTF16
-# define PyUnicode_EncodeUTF8 PyUnicodeUCS2_EncodeUTF8
-# define PyUnicode_EncodeUnicodeEscape PyUnicodeUCS2_EncodeUnicodeEscape
-# define PyUnicode_Find PyUnicodeUCS2_Find
-# define PyUnicode_Format PyUnicodeUCS2_Format
-# define PyUnicode_FromEncodedObject PyUnicodeUCS2_FromEncodedObject
-# define PyUnicode_FromFormat PyUnicodeUCS2_FromFormat
-# define PyUnicode_FromFormatV PyUnicodeUCS2_FromFormatV
-# define PyUnicode_FromObject PyUnicodeUCS2_FromObject
-# define PyUnicode_FromOrdinal PyUnicodeUCS2_FromOrdinal
-# define PyUnicode_FromString PyUnicodeUCS2_FromString
-# define PyUnicode_FromStringAndSize PyUnicodeUCS2_FromStringAndSize
-# define PyUnicode_FromUnicode PyUnicodeUCS2_FromUnicode
-# define PyUnicode_FromWideChar PyUnicodeUCS2_FromWideChar
-# define PyUnicode_FSConverter PyUnicodeUCS2_FSConverter
-# define PyUnicode_FSDecoder PyUnicodeUCS2_FSDecoder
-# define PyUnicode_GetDefaultEncoding PyUnicodeUCS2_GetDefaultEncoding
-# define PyUnicode_GetMax PyUnicodeUCS2_GetMax
-# define PyUnicode_GetSize PyUnicodeUCS2_GetSize
-# define PyUnicode_IsIdentifier PyUnicodeUCS2_IsIdentifier
-# define PyUnicode_Join PyUnicodeUCS2_Join
-# define PyUnicode_Partition PyUnicodeUCS2_Partition
-# define PyUnicode_RPartition PyUnicodeUCS2_RPartition
-# define PyUnicode_RSplit PyUnicodeUCS2_RSplit
-# define PyUnicode_Replace PyUnicodeUCS2_Replace
-# define PyUnicode_Resize PyUnicodeUCS2_Resize
-# define PyUnicode_RichCompare PyUnicodeUCS2_RichCompare
-# define PyUnicode_Split PyUnicodeUCS2_Split
-# define PyUnicode_Splitlines PyUnicodeUCS2_Splitlines
-# define PyUnicode_Tailmatch PyUnicodeUCS2_Tailmatch
-# define PyUnicode_Translate PyUnicodeUCS2_Translate
-# define PyUnicode_TranslateCharmap PyUnicodeUCS2_TranslateCharmap
-# define _PyUnicode_AsDefaultEncodedString _PyUnicodeUCS2_AsDefaultEncodedString
-# define _PyUnicode_Fini _PyUnicodeUCS2_Fini
-# define _PyUnicode_Init _PyUnicodeUCS2_Init
-# define PyUnicode_strdup PyUnicodeUCS2_strdup
-
+#if SIZEOF_SHORT == 2
+typedef unsigned short Py_UCS2;
#else
-
-# define PyUnicode_AsASCIIString PyUnicodeUCS4_AsASCIIString
-# define PyUnicode_AsCharmapString PyUnicodeUCS4_AsCharmapString
-# define PyUnicode_AsDecodedObject PyUnicodeUCS4_AsDecodedObject
-# define PyUnicode_AsDecodedUnicode PyUnicodeUCS4_AsDecodedUnicode
-# define PyUnicode_AsEncodedObject PyUnicodeUCS4_AsEncodedObject
-# define PyUnicode_AsEncodedString PyUnicodeUCS4_AsEncodedString
-# define PyUnicode_AsEncodedUnicode PyUnicodeUCS4_AsEncodedUnicode
-# define PyUnicode_AsLatin1String PyUnicodeUCS4_AsLatin1String
-# define PyUnicode_AsRawUnicodeEscapeString PyUnicodeUCS4_AsRawUnicodeEscapeString
-# define PyUnicode_AsUTF32String PyUnicodeUCS4_AsUTF32String
-# define PyUnicode_AsUTF16String PyUnicodeUCS4_AsUTF16String
-# define PyUnicode_AsUTF8String PyUnicodeUCS4_AsUTF8String
-# define PyUnicode_AsUnicode PyUnicodeUCS4_AsUnicode
-# define PyUnicode_AsUnicodeEscapeString PyUnicodeUCS4_AsUnicodeEscapeString
-# define PyUnicode_AsWideChar PyUnicodeUCS4_AsWideChar
-# define PyUnicode_AsWideCharString PyUnicodeUCS4_AsWideCharString
-# define PyUnicode_ClearFreeList PyUnicodeUCS4_ClearFreelist
-# define PyUnicode_Compare PyUnicodeUCS4_Compare
-# define PyUnicode_CompareWithASCIIString PyUnicodeUCS4_CompareWithASCIIString
-# define PyUnicode_Concat PyUnicodeUCS4_Concat
-# define PyUnicode_Append PyUnicodeUCS4_Append
-# define PyUnicode_AppendAndDel PyUnicodeUCS4_AppendAndDel
-# define PyUnicode_Contains PyUnicodeUCS4_Contains
-# define PyUnicode_Count PyUnicodeUCS4_Count
-# define PyUnicode_Decode PyUnicodeUCS4_Decode
-# define PyUnicode_DecodeASCII PyUnicodeUCS4_DecodeASCII
-# define PyUnicode_DecodeCharmap PyUnicodeUCS4_DecodeCharmap
-# define PyUnicode_DecodeLatin1 PyUnicodeUCS4_DecodeLatin1
-# define PyUnicode_DecodeFSDefault PyUnicodeUCS4_DecodeFSDefault
-# define PyUnicode_DecodeFSDefaultAndSize PyUnicodeUCS4_DecodeFSDefaultAndSize
-# define PyUnicode_DecodeRawUnicodeEscape PyUnicodeUCS4_DecodeRawUnicodeEscape
-# define PyUnicode_DecodeUTF32 PyUnicodeUCS4_DecodeUTF32
-# define PyUnicode_DecodeUTF32Stateful PyUnicodeUCS4_DecodeUTF32Stateful
-# define PyUnicode_DecodeUTF16 PyUnicodeUCS4_DecodeUTF16
-# define PyUnicode_DecodeUTF16Stateful PyUnicodeUCS4_DecodeUTF16Stateful
-# define PyUnicode_DecodeUTF8 PyUnicodeUCS4_DecodeUTF8
-# define PyUnicode_DecodeUTF8Stateful PyUnicodeUCS4_DecodeUTF8Stateful
-# define PyUnicode_DecodeUnicodeEscape PyUnicodeUCS4_DecodeUnicodeEscape
-# define PyUnicode_Encode PyUnicodeUCS4_Encode
-# define PyUnicode_EncodeASCII PyUnicodeUCS4_EncodeASCII
-# define PyUnicode_EncodeCharmap PyUnicodeUCS4_EncodeCharmap
-# define PyUnicode_EncodeDecimal PyUnicodeUCS4_EncodeDecimal
-# define PyUnicode_EncodeLatin1 PyUnicodeUCS4_EncodeLatin1
-# define PyUnicode_EncodeRawUnicodeEscape PyUnicodeUCS4_EncodeRawUnicodeEscape
-# define PyUnicode_EncodeUTF32 PyUnicodeUCS4_EncodeUTF32
-# define PyUnicode_EncodeUTF16 PyUnicodeUCS4_EncodeUTF16
-# define PyUnicode_EncodeUTF8 PyUnicodeUCS4_EncodeUTF8
-# define PyUnicode_EncodeUnicodeEscape PyUnicodeUCS4_EncodeUnicodeEscape
-# define PyUnicode_Find PyUnicodeUCS4_Find
-# define PyUnicode_Format PyUnicodeUCS4_Format
-# define PyUnicode_FromEncodedObject PyUnicodeUCS4_FromEncodedObject
-# define PyUnicode_FromFormat PyUnicodeUCS4_FromFormat
-# define PyUnicode_FromFormatV PyUnicodeUCS4_FromFormatV
-# define PyUnicode_FromObject PyUnicodeUCS4_FromObject
-# define PyUnicode_FromOrdinal PyUnicodeUCS4_FromOrdinal
-# define PyUnicode_FromString PyUnicodeUCS4_FromString
-# define PyUnicode_FromStringAndSize PyUnicodeUCS4_FromStringAndSize
-# define PyUnicode_FromUnicode PyUnicodeUCS4_FromUnicode
-# define PyUnicode_FromWideChar PyUnicodeUCS4_FromWideChar
-# define PyUnicode_FSConverter PyUnicodeUCS4_FSConverter
-# define PyUnicode_FSDecoder PyUnicodeUCS4_FSDecoder
-# define PyUnicode_GetDefaultEncoding PyUnicodeUCS4_GetDefaultEncoding
-# define PyUnicode_GetMax PyUnicodeUCS4_GetMax
-# define PyUnicode_GetSize PyUnicodeUCS4_GetSize
-# define PyUnicode_IsIdentifier PyUnicodeUCS4_IsIdentifier
-# define PyUnicode_Join PyUnicodeUCS4_Join
-# define PyUnicode_Partition PyUnicodeUCS4_Partition
-# define PyUnicode_RPartition PyUnicodeUCS4_RPartition
-# define PyUnicode_RSplit PyUnicodeUCS4_RSplit
-# define PyUnicode_Replace PyUnicodeUCS4_Replace
-# define PyUnicode_Resize PyUnicodeUCS4_Resize
-# define PyUnicode_RichCompare PyUnicodeUCS4_RichCompare
-# define PyUnicode_Split PyUnicodeUCS4_Split
-# define PyUnicode_Splitlines PyUnicodeUCS4_Splitlines
-# define PyUnicode_Tailmatch PyUnicodeUCS4_Tailmatch
-# define PyUnicode_Translate PyUnicodeUCS4_Translate
-# define PyUnicode_TranslateCharmap PyUnicodeUCS4_TranslateCharmap
-# define _PyUnicode_AsDefaultEncodedString _PyUnicodeUCS4_AsDefaultEncodedString
-# define _PyUnicode_Fini _PyUnicodeUCS4_Fini
-# define _PyUnicode_Init _PyUnicodeUCS4_Init
-# define PyUnicode_strdup PyUnicodeUCS4_strdup
-
+#error "Could not find a proper typedef for Py_UCS2"
#endif
+typedef unsigned char Py_UCS1;
+
/* --- Internal Unicode Operations ---------------------------------------- */
/* Since splitting on whitespace is an important use case, and
@@ -350,7 +171,7 @@ typedef PY_UNICODE_TYPE Py_UNICODE;
Py_UNICODE_ISDIGIT(ch) || \
Py_UNICODE_ISNUMERIC(ch))
-#define Py_UNICODE_COPY(target, source, length) \
+#define Py_UNICODE_COPY(target, source, length) \
Py_MEMCPY((target), (source), (length)*sizeof(Py_UNICODE))
#define Py_UNICODE_FILL(target, value, length) \
@@ -358,13 +179,27 @@ typedef PY_UNICODE_TYPE Py_UNICODE;
for (i_ = 0; i_ < (length); i_++) t_[i_] = v_;\
} while (0)
+/* macros to work with surrogates */
+#define Py_UNICODE_IS_SURROGATE(ch) (0xD800 <= ch && ch <= 0xDFFF)
+#define Py_UNICODE_IS_HIGH_SURROGATE(ch) (0xD800 <= ch && ch <= 0xDBFF)
+#define Py_UNICODE_IS_LOW_SURROGATE(ch) (0xDC00 <= ch && ch <= 0xDFFF)
+/* Join two surrogate characters and return a single Py_UCS4 value. */
+#define Py_UNICODE_JOIN_SURROGATES(high, low) \
+ (((((Py_UCS4)(high) & 0x03FF) << 10) | \
+ ((Py_UCS4)(low) & 0x03FF)) + 0x10000)
+/* high surrogate = top 10 bits added to D800 */
+#define Py_UNICODE_HIGH_SURROGATE(ch) (0xD800 | (((ch) - 0x10000) >> 10))
+/* low surrogate = bottom 10 bits added to DC00 */
+#define Py_UNICODE_LOW_SURROGATE(ch) (0xDC00 | (((ch) - 0x10000) & 0x3FF))
+
/* Check if substring matches at given offset. The offset must be
valid, and the substring must not be empty. */
#define Py_UNICODE_MATCH(string, offset, substring) \
- ((*((string)->str + (offset)) == *((substring)->str)) && \
- ((*((string)->str + (offset) + (substring)->length-1) == *((substring)->str + (substring)->length-1))) && \
- !memcmp((string)->str + (offset), (substring)->str, (substring)->length*sizeof(Py_UNICODE)))
+ ((*((string)->wstr + (offset)) == *((substring)->wstr)) && \
+ ((*((string)->wstr + (offset) + (substring)->wstr_length-1) == *((substring)->wstr + (substring)->wstr_length-1))) && \
+ !memcmp((string)->wstr + (offset), (substring)->wstr, (substring)->wstr_length*sizeof(Py_UNICODE)))
+
#endif /* Py_LIMITED_API */
#ifdef __cplusplus
@@ -374,41 +209,374 @@ extern "C" {
/* --- Unicode Type ------------------------------------------------------- */
#ifndef Py_LIMITED_API
+
+/* ASCII-only strings created through PyUnicode_New use the PyASCIIObject
+ structure. state.ascii and state.compact are set, and the data
+ immediately follow the structure. utf8_length and wstr_length can be found
+ in the length field; the utf8 pointer is equal to the data pointer. */
typedef struct {
+ /* There are 4 forms of Unicode strings:
+
+ - compact ascii:
+
+ * structure = PyASCIIObject
+ * test: PyUnicode_IS_COMPACT_ASCII(op)
+ * kind = PyUnicode_1BYTE_KIND
+ * compact = 1
+ * ascii = 1
+ * ready = 1
+ * (length is the length of the utf8 and wstr strings)
+ * (data starts just after the structure)
+ * (since ASCII is decoded from UTF-8, the utf8 string are the data)
+
+ - compact:
+
+ * structure = PyCompactUnicodeObject
+ * test: PyUnicode_IS_COMPACT(op) && !PyUnicode_IS_ASCII(op)
+ * kind = PyUnicode_1BYTE_KIND, PyUnicode_2BYTE_KIND or
+ PyUnicode_4BYTE_KIND
+ * compact = 1
+ * ready = 1
+ * ascii = 0
+ * utf8 is not shared with data
+ * utf8_length = 0 if utf8 is NULL
+ * wstr is shared with data and wstr_length=length
+ if kind=PyUnicode_2BYTE_KIND and sizeof(wchar_t)=2
+ or if kind=PyUnicode_4BYTE_KIND and sizeof(wchar_t)=4
+ * wstr_length = 0 if wstr is NULL
+ * (data starts just after the structure)
+
+ - legacy string, not ready:
+
+ * structure = PyUnicodeObject
+ * test: kind == PyUnicode_WCHAR_KIND
+ * length = 0 (use wstr_length)
+ * hash = -1
+ * kind = PyUnicode_WCHAR_KIND
+ * compact = 0
+ * ascii = 0
+ * ready = 0
+ * interned = SSTATE_NOT_INTERNED
+ * wstr is not NULL
+ * data.any is NULL
+ * utf8 is NULL
+ * utf8_length = 0
+
+ - legacy string, ready:
+
+ * structure = PyUnicodeObject structure
+ * test: !PyUnicode_IS_COMPACT(op) && kind != PyUnicode_WCHAR_KIND
+ * kind = PyUnicode_1BYTE_KIND, PyUnicode_2BYTE_KIND or
+ PyUnicode_4BYTE_KIND
+ * compact = 0
+ * ready = 1
+ * data.any is not NULL
+ * utf8 is shared and utf8_length = length with data.any if ascii = 1
+ * utf8_length = 0 if utf8 is NULL
+ * wstr is shared with data.any and wstr_length = length
+ if kind=PyUnicode_2BYTE_KIND and sizeof(wchar_t)=2
+ or if kind=PyUnicode_4BYTE_KIND and sizeof(wchar_4)=4
+ * wstr_length = 0 if wstr is NULL
+
+ Compact strings use only one memory block (structure + characters),
+ whereas legacy strings use one block for the structure and one block
+ for characters.
+
+ Legacy strings are created by PyUnicode_FromUnicode() and
+ PyUnicode_FromStringAndSize(NULL, size) functions. They become ready
+ when PyUnicode_READY() is called.
+
+ See also _PyUnicode_CheckConsistency().
+ */
PyObject_HEAD
- Py_ssize_t length; /* Length of raw Unicode data in buffer */
- Py_UNICODE *str; /* Raw Unicode buffer */
+ Py_ssize_t length; /* Number of code points in the string */
Py_hash_t hash; /* Hash value; -1 if not set */
- int state; /* != 0 if interned. In this case the two
- * references from the dictionary to this object
- * are *not* counted in ob_refcnt. */
- PyObject *defenc; /* (Default) Encoded version as Python
- string, or NULL; this is used for
- implementing the buffer protocol */
+ struct {
+ /*
+ SSTATE_NOT_INTERNED (0)
+ SSTATE_INTERNED_MORTAL (1)
+ SSTATE_INTERNED_IMMORTAL (2)
+
+ If interned != SSTATE_NOT_INTERNED, the two references from the
+ dictionary to this object are *not* counted in ob_refcnt.
+ */
+ unsigned int interned:2;
+ /* Character size:
+
+ - PyUnicode_WCHAR_KIND (0):
+
+ * character type = wchar_t (16 or 32 bits, depending on the
+ platform)
+
+ - PyUnicode_1BYTE_KIND (1):
+
+ * character type = Py_UCS1 (8 bits, unsigned)
+ * all characters are in the range U+0000-U+00FF (latin1)
+ * if ascii is set, all characters are in the range U+0000-U+007F
+ (ASCII), otherwise at least one character is in the range
+ U+0080-U+00FF
+
+ - PyUnicode_2BYTE_KIND (2):
+
+ * character type = Py_UCS2 (16 bits, unsigned)
+ * all characters are in the range U+0000-U+FFFF (BMP)
+ * at least one character is in the range U+0100-U+FFFF
+
+ - PyUnicode_4BYTE_KIND (4):
+
+ * character type = Py_UCS4 (32 bits, unsigned)
+ * all characters are in the range U+0000-U+10FFFF
+ * at least one character is in the range U+10000-U+10FFFF
+ */
+ unsigned int kind:3;
+ /* Compact is with respect to the allocation scheme. Compact unicode
+ objects only require one memory block while non-compact objects use
+ one block for the PyUnicodeObject struct and another for its data
+ buffer. */
+ unsigned int compact:1;
+ /* The string only contains characters in the range U+0000-U+007F (ASCII)
+ and the kind is PyUnicode_1BYTE_KIND. If ascii is set and compact is
+ set, use the PyASCIIObject structure. */
+ unsigned int ascii:1;
+ /* The ready flag indicates whether the object layout is initialized
+ completely. This means that this is either a compact object, or
+ the data pointer is filled out. The bit is redundant, and helps
+ to minimize the test in PyUnicode_IS_READY(). */
+ unsigned int ready:1;
+ } state;
+ wchar_t *wstr; /* wchar_t representation (null-terminated) */
+} PyASCIIObject;
+
+/* Non-ASCII strings allocated through PyUnicode_New use the
+ PyCompactUnicodeObject structure. state.compact is set, and the data
+ immediately follow the structure. */
+typedef struct {
+ PyASCIIObject _base;
+ Py_ssize_t utf8_length; /* Number of bytes in utf8, excluding the
+ * terminating \0. */
+ char *utf8; /* UTF-8 representation (null-terminated) */
+ Py_ssize_t wstr_length; /* Number of code points in wstr, possible
+ * surrogates count as two code points. */
+} PyCompactUnicodeObject;
+
+/* Strings allocated through PyUnicode_FromUnicode(NULL, len) use the
+ PyUnicodeObject structure. The actual string data is initially in the wstr
+ block, and copied into the data block using _PyUnicode_Ready. */
+typedef struct {
+ PyCompactUnicodeObject _base;
+ union {
+ void *any;
+ Py_UCS1 *latin1;
+ Py_UCS2 *ucs2;
+ Py_UCS4 *ucs4;
+ } data; /* Canonical, smallest-form Unicode buffer */
} PyUnicodeObject;
#endif
PyAPI_DATA(PyTypeObject) PyUnicode_Type;
PyAPI_DATA(PyTypeObject) PyUnicodeIter_Type;
-#define SSTATE_NOT_INTERNED 0
-#define SSTATE_INTERNED_MORTAL 1
-#define SSTATE_INTERNED_IMMORTAL 2
-
#define PyUnicode_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_UNICODE_SUBCLASS)
#define PyUnicode_CheckExact(op) (Py_TYPE(op) == &PyUnicode_Type)
/* Fast access macros */
#ifndef Py_LIMITED_API
-#define PyUnicode_GET_SIZE(op) \
- (assert(PyUnicode_Check(op)),(((PyUnicodeObject *)(op))->length))
+
+#define PyUnicode_WSTR_LENGTH(op) \
+ (PyUnicode_IS_COMPACT_ASCII(op) ? \
+ ((PyASCIIObject*)op)->length : \
+ ((PyCompactUnicodeObject*)op)->wstr_length)
+
+/* Returns the deprecated Py_UNICODE representation's size in code units
+ (this includes surrogate pairs as 2 units).
+ If the Py_UNICODE representation is not available, it will be computed
+ on request. Use PyUnicode_GET_LENGTH() for the length in code points. */
+
+#define PyUnicode_GET_SIZE(op) \
+ (assert(PyUnicode_Check(op)), \
+ (((PyASCIIObject *)(op))->wstr) ? \
+ PyUnicode_WSTR_LENGTH(op) : \
+ ((void)PyUnicode_AsUnicode((PyObject *)(op)), \
+ assert(((PyASCIIObject *)(op))->wstr), \
+ PyUnicode_WSTR_LENGTH(op)))
+
#define PyUnicode_GET_DATA_SIZE(op) \
- (assert(PyUnicode_Check(op)),(((PyUnicodeObject *)(op))->length * sizeof(Py_UNICODE)))
+ (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
+
+/* Alias for PyUnicode_AsUnicode(). This will create a wchar_t/Py_UNICODE
+ representation on demand. Using this macro is very inefficient now,
+ try to port your code to use the new PyUnicode_*BYTE_DATA() macros or
+ use PyUnicode_WRITE() and PyUnicode_READ(). */
+
#define PyUnicode_AS_UNICODE(op) \
- (assert(PyUnicode_Check(op)),(((PyUnicodeObject *)(op))->str))
+ (assert(PyUnicode_Check(op)), \
+ (((PyASCIIObject *)(op))->wstr) ? (((PyASCIIObject *)(op))->wstr) : \
+ PyUnicode_AsUnicode((PyObject *)(op)))
+
#define PyUnicode_AS_DATA(op) \
- (assert(PyUnicode_Check(op)),((const char *)((PyUnicodeObject *)(op))->str))
+ ((const char *)(PyUnicode_AS_UNICODE(op)))
+
+
+/* --- Flexible String Representation Helper Macros (PEP 393) -------------- */
+
+/* Values for PyASCIIObject.state: */
+
+/* Interning state. */
+#define SSTATE_NOT_INTERNED 0
+#define SSTATE_INTERNED_MORTAL 1
+#define SSTATE_INTERNED_IMMORTAL 2
+
+/* Return true if the string contains only ASCII characters, or 0 if not. The
+ string may be compact (PyUnicode_IS_COMPACT_ASCII) or not, but must be
+ ready. */
+#define PyUnicode_IS_ASCII(op) \
+ (assert(PyUnicode_Check(op)), \
+ assert(PyUnicode_IS_READY(op)), \
+ ((PyASCIIObject*)op)->state.ascii)
+
+/* Return true if the string is compact or 0 if not.
+ No type checks or Ready calls are performed. */
+#define PyUnicode_IS_COMPACT(op) \
+ (((PyASCIIObject*)(op))->state.compact)
+
+/* Return true if the string is a compact ASCII string (use PyASCIIObject
+ structure), or 0 if not. No type checks or Ready calls are performed. */
+#define PyUnicode_IS_COMPACT_ASCII(op) \
+ (((PyASCIIObject*)op)->state.ascii && PyUnicode_IS_COMPACT(op))
+
+enum PyUnicode_Kind {
+/* String contains only wstr byte characters. This is only possible
+ when the string was created with a legacy API and _PyUnicode_Ready()
+ has not been called yet. */
+ PyUnicode_WCHAR_KIND = 0,
+/* Return values of the PyUnicode_KIND() macro: */
+ PyUnicode_1BYTE_KIND = 1,
+ PyUnicode_2BYTE_KIND = 2,
+ PyUnicode_4BYTE_KIND = 4
+};
+
+/* Return pointers to the canonical representation cast to unsigned char,
+ Py_UCS2, or Py_UCS4 for direct character access.
+ No checks are performed, use PyUnicode_KIND() before to ensure
+ these will work correctly. */
+
+#define PyUnicode_1BYTE_DATA(op) ((Py_UCS1*)PyUnicode_DATA(op))
+#define PyUnicode_2BYTE_DATA(op) ((Py_UCS2*)PyUnicode_DATA(op))
+#define PyUnicode_4BYTE_DATA(op) ((Py_UCS4*)PyUnicode_DATA(op))
+
+/* Return one of the PyUnicode_*_KIND values defined above. */
+#define PyUnicode_KIND(op) \
+ (assert(PyUnicode_Check(op)), \
+ assert(PyUnicode_IS_READY(op)), \
+ ((PyASCIIObject *)(op))->state.kind)
+
+/* Return a void pointer to the raw unicode buffer. */
+#define _PyUnicode_COMPACT_DATA(op) \
+ (PyUnicode_IS_ASCII(op) ? \
+ ((void*)((PyASCIIObject*)(op) + 1)) : \
+ ((void*)((PyCompactUnicodeObject*)(op) + 1)))
+
+#define _PyUnicode_NONCOMPACT_DATA(op) \
+ (assert(((PyUnicodeObject*)(op))->data.any), \
+ ((((PyUnicodeObject *)(op))->data.any)))
+
+#define PyUnicode_DATA(op) \
+ (assert(PyUnicode_Check(op)), \
+ PyUnicode_IS_COMPACT(op) ? _PyUnicode_COMPACT_DATA(op) : \
+ _PyUnicode_NONCOMPACT_DATA(op))
+
+/* In the access macros below, "kind" may be evaluated more than once.
+ All other macro parameters are evaluated exactly once, so it is safe
+ to put side effects into them (such as increasing the index). */
+
+/* Write into the canonical representation, this macro does not do any sanity
+ checks and is intended for usage in loops. The caller should cache the
+ kind and data pointers obtained from other macro calls.
+ index is the index in the string (starts at 0) and value is the new
+ code point value which should be written to that location. */
+#define PyUnicode_WRITE(kind, data, index, value) \
+ do { \
+ switch ((kind)) { \
+ case PyUnicode_1BYTE_KIND: { \
+ ((Py_UCS1 *)(data))[(index)] = (Py_UCS1)(value); \
+ break; \
+ } \
+ case PyUnicode_2BYTE_KIND: { \
+ ((Py_UCS2 *)(data))[(index)] = (Py_UCS2)(value); \
+ break; \
+ } \
+ default: { \
+ assert((kind) == PyUnicode_4BYTE_KIND); \
+ ((Py_UCS4 *)(data))[(index)] = (Py_UCS4)(value); \
+ } \
+ } \
+ } while (0)
+
+/* Read a code point from the string's canonical representation. No checks
+ or ready calls are performed. */
+#define PyUnicode_READ(kind, data, index) \
+ ((Py_UCS4) \
+ ((kind) == PyUnicode_1BYTE_KIND ? \
+ ((const Py_UCS1 *)(data))[(index)] : \
+ ((kind) == PyUnicode_2BYTE_KIND ? \
+ ((const Py_UCS2 *)(data))[(index)] : \
+ ((const Py_UCS4 *)(data))[(index)] \
+ ) \
+ ))
+
+/* PyUnicode_READ_CHAR() is less efficient than PyUnicode_READ() because it
+ calls PyUnicode_KIND() and might call it twice. For single reads, use
+ PyUnicode_READ_CHAR, for multiple consecutive reads callers should
+ cache kind and use PyUnicode_READ instead. */
+#define PyUnicode_READ_CHAR(unicode, index) \
+ (assert(PyUnicode_Check(unicode)), \
+ assert(PyUnicode_IS_READY(unicode)), \
+ (Py_UCS4) \
+ (PyUnicode_KIND((unicode)) == PyUnicode_1BYTE_KIND ? \
+ ((const Py_UCS1 *)(PyUnicode_DATA((unicode))))[(index)] : \
+ (PyUnicode_KIND((unicode)) == PyUnicode_2BYTE_KIND ? \
+ ((const Py_UCS2 *)(PyUnicode_DATA((unicode))))[(index)] : \
+ ((const Py_UCS4 *)(PyUnicode_DATA((unicode))))[(index)] \
+ ) \
+ ))
+
+/* Returns the length of the unicode string. The caller has to make sure that
+ the string has it's canonical representation set before calling
+ this macro. Call PyUnicode_(FAST_)Ready to ensure that. */
+#define PyUnicode_GET_LENGTH(op) \
+ (assert(PyUnicode_Check(op)), \
+ assert(PyUnicode_IS_READY(op)), \
+ ((PyASCIIObject *)(op))->length)
+
+
+/* Fast check to determine whether an object is ready. Equivalent to
+ PyUnicode_IS_COMPACT(op) || ((PyUnicodeObject*)(op))->data.any) */
+
+#define PyUnicode_IS_READY(op) (((PyASCIIObject*)op)->state.ready)
+
+/* PyUnicode_READY() does less work than _PyUnicode_Ready() in the best
+ case. If the canonical representation is not yet set, it will still call
+ _PyUnicode_Ready().
+ Returns 0 on success and -1 on errors. */
+#define PyUnicode_READY(op) \
+ (assert(PyUnicode_Check(op)), \
+ (PyUnicode_IS_READY(op) ? \
+ 0 : _PyUnicode_Ready((PyObject *)(op))))
+
+/* Return a maximum character value which is suitable for creating another
+ string based on op. This is always an approximation but more efficient
+ than iterating over the string. */
+#define PyUnicode_MAX_CHAR_VALUE(op) \
+ (assert(PyUnicode_IS_READY(op)), \
+ (PyUnicode_IS_ASCII(op) ? \
+ (0x7f) : \
+ (PyUnicode_KIND(op) == PyUnicode_1BYTE_KIND ? \
+ (0xffU) : \
+ (PyUnicode_KIND(op) == PyUnicode_2BYTE_KIND ? \
+ (0xffffU) : \
+ (0x10ffffU)))))
+
#endif
/* --- Constants ---------------------------------------------------------- */
@@ -418,12 +586,87 @@ PyAPI_DATA(PyTypeObject) PyUnicodeIter_Type;
Unicode character U+FFFD is the official REPLACEMENT CHARACTER in
Unicode 3.0. */
-#define Py_UNICODE_REPLACEMENT_CHARACTER ((Py_UNICODE) 0xFFFD)
+#define Py_UNICODE_REPLACEMENT_CHARACTER ((Py_UCS4) 0xFFFD)
/* === Public API ========================================================= */
/* --- Plain Py_UNICODE --------------------------------------------------- */
+/* With PEP 393, this is the recommended way to allocate a new unicode object.
+ This function will allocate the object and its buffer in a single memory
+ block. Objects created using this function are not resizable. */
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject*) PyUnicode_New(
+ Py_ssize_t size, /* Number of code points in the new string */
+ Py_UCS4 maxchar /* maximum code point value in the string */
+ );
+#endif
+
+/* Initializes the canonical string representation from a the deprecated
+ wstr/Py_UNICODE representation. This function is used to convert Unicode
+ objects which were created using the old API to the new flexible format
+ introduced with PEP 393.
+
+ Don't call this function directly, use the public PyUnicode_READY() macro
+ instead. */
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(int) _PyUnicode_Ready(
+ PyObject *unicode /* Unicode object */
+ );
+#endif
+
+/* Get a copy of a Unicode string. */
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject*) _PyUnicode_Copy(
+ PyObject *unicode
+ );
+#endif
+
+/* Copy character from one unicode object into another, this function performs
+ character conversion when necessary and falls back to memcpy() if possible.
+
+ Fail if to is too small (smaller than *how_many* or smaller than
+ len(from)-from_start), or if kind(from[from_start:from_start+how_many]) >
+ kind(to), or if *to* has more than 1 reference.
+
+ Return the number of written character, or return -1 and raise an exception
+ on error.
+
+ Pseudo-code:
+
+ how_many = min(how_many, len(from) - from_start)
+ to[to_start:to_start+how_many] = from[from_start:from_start+how_many]
+ return how_many
+
+ Note: The function doesn't write a terminating null character.
+ */
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(Py_ssize_t) PyUnicode_CopyCharacters(
+ PyObject *to,
+ Py_ssize_t to_start,
+ PyObject *from,
+ Py_ssize_t from_start,
+ Py_ssize_t how_many
+ );
+#endif
+
+/* Fill a string with a character: write fill_char into
+ unicode[start:start+length].
+
+ Fail if fill_char is bigger than the string maximum character, or if the
+ string has more than 1 reference.
+
+ Return the number of written character, or return -1 and raise an exception
+ on error. */
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(Py_ssize_t) PyUnicode_Fill(
+ PyObject *unicode,
+ Py_ssize_t start,
+ Py_ssize_t length,
+ Py_UCS4 fill_char
+ );
+#endif
+
/* Create a Unicode Object from the Py_UNICODE buffer u of the given
size.
@@ -448,13 +691,45 @@ PyAPI_FUNC(PyObject*) PyUnicode_FromStringAndSize(
);
/* Similar to PyUnicode_FromUnicode(), but u points to null-terminated
- UTF-8 encoded bytes */
+ UTF-8 encoded bytes. The size is determined with strlen(). */
PyAPI_FUNC(PyObject*) PyUnicode_FromString(
const char *u /* UTF-8 encoded string */
);
+/* Create a new string from a buffer of Py_UCS1, Py_UCS2 or Py_UCS4 characters.
+ Scan the string to find the maximum character. */
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject*) PyUnicode_FromKindAndData(
+ int kind,
+ const void *buffer,
+ Py_ssize_t size);
+#endif
+
+PyAPI_FUNC(PyObject*) PyUnicode_Substring(
+ PyObject *str,
+ Py_ssize_t start,
+ Py_ssize_t end);
+
+/* Copy the string into a UCS4 buffer including the null character if copy_null
+ is set. Return NULL and raise an exception on error. Raise a ValueError if
+ the buffer is smaller than the string. Return buffer on success.
+
+ buflen is the length of the buffer in (Py_UCS4) characters. */
+PyAPI_FUNC(Py_UCS4*) PyUnicode_AsUCS4(
+ PyObject *unicode,
+ Py_UCS4* buffer,
+ Py_ssize_t buflen,
+ int copy_null);
+
+/* Copy the string into a UCS4 buffer. A new buffer is allocated using
+ * PyMem_Malloc; if this fails, NULL is returned with a memory error
+ exception set. */
+PyAPI_FUNC(Py_UCS4*) PyUnicode_AsUCS4Copy(PyObject *unicode);
+
/* Return a read-only pointer to the Unicode object's internal
- Py_UNICODE buffer. */
+ Py_UNICODE buffer.
+ If the wchar_t/Py_UNICODE representation is not yet available, this
+ function will calculate it. */
#ifndef Py_LIMITED_API
PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
@@ -462,30 +737,69 @@ PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
);
#endif
+/* Return a read-only pointer to the Unicode object's internal
+ Py_UNICODE buffer and save the length at size.
+ If the wchar_t/Py_UNICODE representation is not yet available, this
+ function will calculate it. */
+
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicodeAndSize(
+ PyObject *unicode, /* Unicode object */
+ Py_ssize_t *size /* location where to save the length */
+ );
+#endif
+
/* Get the length of the Unicode object. */
+PyAPI_FUNC(Py_ssize_t) PyUnicode_GetLength(
+ PyObject *unicode
+);
+
+/* Get the number of Py_UNICODE units in the
+ string representation. */
+
PyAPI_FUNC(Py_ssize_t) PyUnicode_GetSize(
PyObject *unicode /* Unicode object */
);
+/* Read a character from the string. */
+
+PyAPI_FUNC(Py_UCS4) PyUnicode_ReadChar(
+ PyObject *unicode,
+ Py_ssize_t index
+ );
+
+/* Write a character to the string. The string must have been created through
+ PyUnicode_New, must not be shared, and must not have been hashed yet.
+
+ Return 0 on success, -1 on error. */
+
+PyAPI_FUNC(int) PyUnicode_WriteChar(
+ PyObject *unicode,
+ Py_ssize_t index,
+ Py_UCS4 character
+ );
+
#ifndef Py_LIMITED_API
/* Get the maximum ordinal for a Unicode character. */
PyAPI_FUNC(Py_UNICODE) PyUnicode_GetMax(void);
#endif
-/* Resize an already allocated Unicode object to the new size length.
+/* Resize an Unicode object. The length is the number of characters, except
+ if the kind of the string is PyUnicode_WCHAR_KIND: in this case, the length
+ is the number of Py_UNICODE characters.
*unicode is modified to point to the new (resized) object and 0
returned on success.
- This API may only be called by the function which also called the
- Unicode constructor. The refcount on the object must be 1. Otherwise,
- an error is returned.
+ Try to resize the string in place (which is usually faster than allocating
+ a new string and copy characters), or create a new string.
Error handling is implemented as follows: an exception is set, -1
is returned and *unicode left untouched.
-*/
+ WARNING: The function doesn't check string content, the result may not be a
+ string in canonical representation. */
PyAPI_FUNC(int) PyUnicode_Resize(
PyObject **unicode, /* Pointer to the Unicode object */
@@ -545,8 +859,9 @@ PyAPI_FUNC(PyObject *) PyUnicode_FromFormat(
/* Format the object based on the format_spec, as defined in PEP 3101
(Advanced String Formatting). */
PyAPI_FUNC(PyObject *) _PyUnicode_FormatAdvanced(PyObject *obj,
- Py_UNICODE *format_spec,
- Py_ssize_t format_spec_len);
+ PyObject *format_spec,
+ Py_ssize_t start,
+ Py_ssize_t end);
#endif
PyAPI_FUNC(void) PyUnicode_InternInPlace(PyObject **);
@@ -559,7 +874,8 @@ PyAPI_FUNC(void) _Py_ReleaseInternedUnicodeStrings(void);
#endif
/* Use only if you know it's a string */
-#define PyUnicode_CHECK_INTERNED(op) (((PyUnicodeObject *)(op))->state)
+#define PyUnicode_CHECK_INTERNED(op) \
+ (((PyASCIIObject *)(op))->state.interned)
/* --- wchar_t support for platforms which support it --------------------- */
@@ -606,6 +922,10 @@ PyAPI_FUNC(wchar_t*) PyUnicode_AsWideCharString(
Py_ssize_t *size /* number of characters of the result */
);
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(void*) _PyUnicode_AsKind(PyObject *s, unsigned int kind);
+#endif
+
#endif
/* --- Unicode ordinals --------------------------------------------------- */
@@ -651,50 +971,42 @@ PyAPI_FUNC(int) PyUnicode_ClearFreeList(void);
/* --- Manage the default encoding ---------------------------------------- */
-/* Return a Python string holding the default encoded value of the
- Unicode object.
-
- Same as PyUnicode_AsUTF8String() except
- the resulting string is cached in the Unicode object for subsequent
- usage by this function. The cached version is needed to implement
- the character buffer interface and will live (at least) as long as
- the Unicode object itself.
-
- The refcount of the string is *not* incremented.
-
- *** Exported for internal use by the interpreter only !!! ***
-
-*/
-
-#ifndef Py_LIMITED_API
-PyAPI_FUNC(PyObject *) _PyUnicode_AsDefaultEncodedString(
- PyObject *unicode,
- const char *errors);
-#endif
-
/* Returns a pointer to the default encoding (UTF-8) of the
Unicode object unicode and the size of the encoded representation
in bytes stored in *size.
In case of an error, no *size is set.
+ This function caches the UTF-8 encoded string in the unicodeobject
+ and subsequent calls will return the same string. The memory is released
+ when the unicodeobject is deallocated.
+
+ _PyUnicode_AsStringAndSize is a #define for PyUnicode_AsUTF8AndSize to
+ support the previous internal function with the same behaviour.
+
*** This API is for interpreter INTERNAL USE ONLY and will likely
*** be removed or changed in the future.
*** If you need to access the Unicode object as UTF-8 bytes string,
*** please use PyUnicode_AsUTF8String() instead.
-
*/
#ifndef Py_LIMITED_API
-PyAPI_FUNC(char *) _PyUnicode_AsStringAndSize(
+PyAPI_FUNC(char *) PyUnicode_AsUTF8AndSize(
PyObject *unicode,
Py_ssize_t *size);
+#define _PyUnicode_AsStringAndSize PyUnicode_AsUTF8AndSize
#endif
/* Returns a pointer to the default encoding (UTF-8) of the
Unicode object unicode.
+ Like PyUnicode_AsUTF8AndSize(), this also caches the UTF-8 representation
+ in the unicodeobject.
+
+ _PyUnicode_AsString is a #define for PyUnicode_AsUTF8 to
+ support the previous internal function with the same behaviour.
+
Use of this API is DEPRECATED since no size information can be
extracted from the returned data.
@@ -707,7 +1019,8 @@ PyAPI_FUNC(char *) _PyUnicode_AsStringAndSize(
*/
#ifndef Py_LIMITED_API
-PyAPI_FUNC(char *) _PyUnicode_AsString(PyObject *unicode);
+PyAPI_FUNC(char *) PyUnicode_AsUTF8(PyObject *unicode);
+#define _PyUnicode_AsString PyUnicode_AsUTF8
#endif
/* Returns "utf-8". */
@@ -812,6 +1125,12 @@ PyAPI_FUNC(PyObject*) PyUnicode_EncodeUTF7(
int base64WhiteSpace, /* Encode whitespace (sp, ht, nl, cr) in base64 */
const char *errors /* error handling */
);
+PyAPI_FUNC(PyObject*) _PyUnicode_EncodeUTF7(
+ PyObject *unicode, /* Unicode object */
+ int base64SetO, /* Encode RFC2152 Set O characters in base64 */
+ int base64WhiteSpace, /* Encode whitespace (sp, ht, nl, cr) in base64 */
+ const char *errors /* error handling */
+ );
#endif
/* --- UTF-8 Codecs ------------------------------------------------------- */
@@ -834,6 +1153,10 @@ PyAPI_FUNC(PyObject*) PyUnicode_AsUTF8String(
);
#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject*) _PyUnicode_AsUTF8String(
+ PyObject *unicode,
+ const char *errors);
+
PyAPI_FUNC(PyObject*) PyUnicode_EncodeUTF8(
const Py_UNICODE *data, /* Unicode char buffer */
Py_ssize_t length, /* number of Py_UNICODE chars to encode */
@@ -915,6 +1238,11 @@ PyAPI_FUNC(PyObject*) PyUnicode_EncodeUTF32(
const char *errors, /* error handling */
int byteorder /* byteorder to use 0=BOM+native;-1=LE,1=BE */
);
+PyAPI_FUNC(PyObject*) _PyUnicode_EncodeUTF32(
+ PyObject *object, /* Unicode object */
+ const char *errors, /* error handling */
+ int byteorder /* byteorder to use 0=BOM+native;-1=LE,1=BE */
+ );
#endif
/* --- UTF-16 Codecs ------------------------------------------------------ */
@@ -995,6 +1323,11 @@ PyAPI_FUNC(PyObject*) PyUnicode_EncodeUTF16(
const char *errors, /* error handling */
int byteorder /* byteorder to use 0=BOM+native;-1=LE,1=BE */
);
+PyAPI_FUNC(PyObject*) _PyUnicode_EncodeUTF16(
+ PyObject* unicode, /* Unicode object */
+ const char *errors, /* error handling */
+ int byteorder /* byteorder to use 0=BOM+native;-1=LE,1=BE */
+ );
#endif
/* --- Unicode-Escape Codecs ---------------------------------------------- */
@@ -1064,6 +1397,10 @@ PyAPI_FUNC(PyObject*) PyUnicode_AsLatin1String(
);
#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject*) _PyUnicode_AsLatin1String(
+ PyObject* unicode,
+ const char* errors);
+
PyAPI_FUNC(PyObject*) PyUnicode_EncodeLatin1(
const Py_UNICODE *data, /* Unicode char buffer */
Py_ssize_t length, /* Number of Py_UNICODE chars to encode */
@@ -1088,6 +1425,10 @@ PyAPI_FUNC(PyObject*) PyUnicode_AsASCIIString(
);
#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject*) _PyUnicode_AsASCIIString(
+ PyObject* unicode,
+ const char* errors);
+
PyAPI_FUNC(PyObject*) PyUnicode_EncodeASCII(
const Py_UNICODE *data, /* Unicode char buffer */
Py_ssize_t length, /* Number of Py_UNICODE chars to encode */
@@ -1139,6 +1480,12 @@ PyAPI_FUNC(PyObject*) PyUnicode_EncodeCharmap(
(unicode ordinal -> char ordinal) */
const char *errors /* error handling */
);
+PyAPI_FUNC(PyObject*) _PyUnicode_EncodeCharmap(
+ PyObject *unicode, /* Unicode object */
+ PyObject *mapping, /* character mapping
+ (unicode ordinal -> char ordinal) */
+ const char *errors /* error handling */
+ );
#endif
/* Translate a Py_UNICODE buffer of the given length by applying a
@@ -1163,7 +1510,7 @@ PyAPI_FUNC(PyObject *) PyUnicode_TranslateCharmap(
);
#endif
-#ifdef MS_WIN32
+#ifdef HAVE_MBCS
/* --- MBCS codecs for Windows -------------------------------------------- */
@@ -1180,6 +1527,14 @@ PyAPI_FUNC(PyObject*) PyUnicode_DecodeMBCSStateful(
Py_ssize_t *consumed /* bytes consumed */
);
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeCodePageStateful(
+ int code_page, /* code page number */
+ const char *string, /* encoded string */
+ Py_ssize_t length, /* size of string */
+ const char *errors, /* error handling */
+ Py_ssize_t *consumed /* bytes consumed */
+ );
+
PyAPI_FUNC(PyObject*) PyUnicode_AsMBCSString(
PyObject *unicode /* Unicode object */
);
@@ -1187,12 +1542,18 @@ PyAPI_FUNC(PyObject*) PyUnicode_AsMBCSString(
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject*) PyUnicode_EncodeMBCS(
const Py_UNICODE *data, /* Unicode char buffer */
- Py_ssize_t length, /* Number of Py_UNICODE chars to encode */
+ Py_ssize_t length, /* number of Py_UNICODE chars to encode */
const char *errors /* error handling */
);
#endif
-#endif /* MS_WIN32 */
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeCodePage(
+ int code_page, /* code page number */
+ PyObject *unicode, /* Unicode object */
+ const char *errors /* error handling */
+ );
+
+#endif /* HAVE_MBCS */
/* --- Decimal Encoder ---------------------------------------------------- */
@@ -1240,6 +1601,49 @@ PyAPI_FUNC(PyObject*) PyUnicode_TransformDecimalToASCII(
);
#endif
+/* Similar to PyUnicode_TransformDecimalToASCII(), but takes a PyObject
+ as argument instead of a raw buffer and length. This function additionally
+ transforms spaces to ASCII because this is what the callers in longobject,
+ floatobject, and complexobject did anyways. */
+
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject*) _PyUnicode_TransformDecimalAndSpaceToASCII(
+ PyObject *unicode /* Unicode object */
+ );
+#endif
+
+/* --- Locale encoding --------------------------------------------------- */
+
+/* Decode a string from the current locale encoding. The decoder is strict if
+ *surrogateescape* is equal to zero, otherwise it uses the 'surrogateescape'
+ error handler (PEP 383) to escape undecodable bytes. If a byte sequence can
+ be decoded as a surrogate character and *surrogateescape* is not equal to
+ zero, the byte sequence is escaped using the 'surrogateescape' error handler
+ instead of being decoded. *str* must end with a null character but cannot
+ contain embedded null characters. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeLocaleAndSize(
+ const char *str,
+ Py_ssize_t len,
+ const char *errors);
+
+/* Similar to PyUnicode_DecodeLocaleAndSize(), but compute the string
+ length using strlen(). */
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeLocale(
+ const char *str,
+ const char *errors);
+
+/* Encode a Unicode object to the current locale encoding. The encoder is
+ strict is *surrogateescape* is equal to zero, otherwise the
+ "surrogateescape" error handler is used. Return a bytes object. The string
+ cannot contain embedded null characters.. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeLocale(
+ PyObject *unicode,
+ const char *errors
+ );
+
/* --- File system encoding ---------------------------------------------- */
/* ParseTuple converter: encode str objects to bytes using
@@ -1292,7 +1696,7 @@ PyAPI_FUNC(PyObject*) PyUnicode_EncodeFSDefault(
These are capable of handling Unicode objects and strings on input
(we refer to them as strings in the descriptions) and return
- Unicode objects or integers as apporpriate. */
+ Unicode objects or integers as appropriate. */
/* Concat two strings giving a new Unicode string. */
@@ -1427,6 +1831,15 @@ PyAPI_FUNC(Py_ssize_t) PyUnicode_Find(
int direction /* Find direction: +1 forward, -1 backward */
);
+/* Like PyUnicode_Find, but search for single character only. */
+PyAPI_FUNC(Py_ssize_t) PyUnicode_FindChar(
+ PyObject *str,
+ Py_UCS4 ch,
+ Py_ssize_t start,
+ Py_ssize_t end,
+ int direction
+ );
+
/* Count the number of occurrences of substr in str[start:end]. */
PyAPI_FUNC(Py_ssize_t) PyUnicode_Count(
@@ -1463,7 +1876,7 @@ PyAPI_FUNC(int) PyUnicode_CompareWithASCIIString(
/* Rich compare two strings and return one of the following:
- NULL in case an exception was raised
- - Py_True or Py_False for successfuly comparisons
+ - Py_True or Py_False for successfully comparisons
- Py_NotImplemented in case the type combination is unknown
Note that Py_EQ and Py_NE comparisons can cause a UnicodeWarning in
@@ -1514,35 +1927,26 @@ PyAPI_FUNC(int) PyUnicode_IsIdentifier(PyObject *s);
#ifndef Py_LIMITED_API
/* Externally visible for str.strip(unicode) */
PyAPI_FUNC(PyObject *) _PyUnicode_XStrip(
- PyUnicodeObject *self,
+ PyObject *self,
int striptype,
PyObject *sepobj
);
#endif
-/* Using the current locale, insert the thousands grouping
- into the string pointed to by buffer. For the argument descriptions,
- see Objects/stringlib/localeutil.h */
-
-#ifndef Py_LIMITED_API
-PyAPI_FUNC(Py_ssize_t) _PyUnicode_InsertThousandsGroupingLocale(Py_UNICODE *buffer,
- Py_ssize_t n_buffer,
- Py_UNICODE *digits,
- Py_ssize_t n_digits,
- Py_ssize_t min_width);
-#endif
-
/* Using explicit passed-in values, insert the thousands grouping
into the string pointed to by buffer. For the argument descriptions,
see Objects/stringlib/localeutil.h */
#ifndef Py_LIMITED_API
-PyAPI_FUNC(Py_ssize_t) _PyUnicode_InsertThousandsGrouping(Py_UNICODE *buffer,
- Py_ssize_t n_buffer,
- Py_UNICODE *digits,
- Py_ssize_t n_digits,
- Py_ssize_t min_width,
- const char *grouping,
- const char *thousands_sep);
+PyAPI_FUNC(Py_ssize_t) _PyUnicode_InsertThousandsGrouping(
+ PyObject *unicode,
+ Py_ssize_t index,
+ Py_ssize_t n_buffer,
+ void *digits,
+ Py_ssize_t n_digits,
+ Py_ssize_t min_width,
+ const char *grouping,
+ PyObject *thousands_sep,
+ Py_UCS4 *maxchar);
#endif
/* === Characters Type APIs =============================================== */
@@ -1598,6 +2002,34 @@ PyAPI_FUNC(Py_UCS4) _PyUnicode_ToTitlecase(
Py_UCS4 ch /* Unicode character */
);
+PyAPI_FUNC(int) _PyUnicode_ToLowerFull(
+ Py_UCS4 ch, /* Unicode character */
+ Py_UCS4 *res
+ );
+
+PyAPI_FUNC(int) _PyUnicode_ToTitleFull(
+ Py_UCS4 ch, /* Unicode character */
+ Py_UCS4 *res
+ );
+
+PyAPI_FUNC(int) _PyUnicode_ToUpperFull(
+ Py_UCS4 ch, /* Unicode character */
+ Py_UCS4 *res
+ );
+
+PyAPI_FUNC(int) _PyUnicode_ToFoldedFull(
+ Py_UCS4 ch, /* Unicode character */
+ Py_UCS4 *res
+ );
+
+PyAPI_FUNC(int) _PyUnicode_IsCaseIgnorable(
+ Py_UCS4 ch /* Unicode character */
+ );
+
+PyAPI_FUNC(int) _PyUnicode_IsCased(
+ Py_UCS4 ch /* Unicode character */
+ );
+
PyAPI_FUNC(int) _PyUnicode_ToDecimalDigit(
Py_UCS4 ch /* Unicode character */
);
@@ -1676,6 +2108,17 @@ PyAPI_FUNC(Py_UNICODE*) PyUnicode_AsUnicodeCopy(
);
#endif /* Py_LIMITED_API */
+#if defined(Py_DEBUG) && !defined(Py_LIMITED_API)
+PyAPI_FUNC(int) _PyUnicode_CheckConsistency(
+ PyObject *op,
+ int check_content);
+#endif
+
+/* Return an interned Unicode object for an Identifier; may fail if there is no memory.*/
+PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*);
+/* Clear all static strings. */
+PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void);
+
#ifdef __cplusplus
}
#endif
diff --git a/LICENSE b/LICENSE
index 43388e7..235b568 100644
--- a/LICENSE
+++ b/LICENSE
@@ -73,7 +73,7 @@ the various releases.
3.2 3.1 2011 PSF yes
3.2.1 3.2 2011 PSF yes
3.2.2 3.2.1 2011 PSF yes
- 3.2.3 3.2.2 2012 PSF yes
+ 3.3.0 3.2 2012 PSF yes
Footnotes:
diff --git a/Lib/_dummy_thread.py b/Lib/_dummy_thread.py
index ed50520..13b1f26 100644
--- a/Lib/_dummy_thread.py
+++ b/Lib/_dummy_thread.py
@@ -24,11 +24,7 @@ TIMEOUT_MAX = 2**31
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
-class error(Exception):
- """Dummy implementation of _thread.error."""
-
- def __init__(self, *args):
- self.args = args
+error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index a2faeb3..f66290f 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -15,7 +15,6 @@ except ImportError:
import io
from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END)
-from errno import EINTR
# open() uses st_blksize whenever we can
DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
@@ -24,20 +23,12 @@ DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
# defined in io.py. We don't use real inheritance though, because we don't
# want to inherit the C implementations.
-
-class BlockingIOError(IOError):
-
- """Exception raised when I/O would block on a non-blocking I/O stream."""
-
- def __init__(self, errno, strerror, characters_written=0):
- super().__init__(errno, strerror)
- if not isinstance(characters_written, int):
- raise TypeError("characters_written must be a integer")
- self.characters_written = characters_written
+# Rebind for compatibility
+BlockingIOError = BlockingIOError
def open(file, mode="r", buffering=-1, encoding=None, errors=None,
- newline=None, closefd=True):
+ newline=None, closefd=True, opener=None):
r"""Open file and return a stream. Raise IOError upon failure.
@@ -47,21 +38,22 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
wrapped. (If a file descriptor is given, it is closed when the
returned I/O object is closed, unless closefd is set to False.)
- mode is an optional string that specifies the mode in which the file
- is opened. It defaults to 'r' which means open for reading in text
- mode. Other common values are 'w' for writing (truncating the file if
- it already exists), and 'a' for appending (which on some Unix systems,
- means that all writes append to the end of the file regardless of the
- current seek position). In text mode, if encoding is not specified the
- encoding used is platform dependent. (For reading and writing raw
- bytes use binary mode and leave encoding unspecified.) The available
- modes are:
+ mode is an optional string that specifies the mode in which the file is
+ opened. It defaults to 'r' which means open for reading in text mode. Other
+ common values are 'w' for writing (truncating the file if it already
+ exists), 'x' for exclusive creation of a new file, and 'a' for appending
+ (which on some Unix systems, means that all writes append to the end of the
+ file regardless of the current seek position). In text mode, if encoding is
+ not specified the encoding used is platform dependent. (For reading and
+ writing raw bytes use binary mode and leave encoding unspecified.) The
+ available modes are:
========= ===============================================================
Character Meaning
--------- ---------------------------------------------------------------
'r' open for reading (default)
'w' open for writing, truncating the file first
+ 'x' create a new file and open it for writing
'a' open for writing, appending to the end of the file if it exists
'b' binary mode
't' text mode (default)
@@ -72,7 +64,8 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
The default mode is 'rt' (open for reading text). For binary random
access, the mode 'w+b' opens and truncates the file to 0 bytes, while
- 'r+b' opens the file without truncation.
+ 'r+b' opens the file without truncation. The 'x' mode implies 'w' and
+ raises an `FileExistsError` if the file already exists.
Python distinguishes between files opened in binary and text modes,
even when the underlying operating system doesn't. Files opened in
@@ -132,6 +125,12 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
be kept open when the file is closed. This does not work when a file name is
given and must be True in that case.
+ A custom opener can be used by passing a callable as *opener*. The
+ underlying file descriptor for the file object is then obtained by calling
+ *opener* with (*file*, *flags*). *opener* must return an open file
+ descriptor (passing os.open as *opener* results in functionality similar to
+ passing None).
+
open() returns a file object whose type depends on the mode, and
through which the standard file operations such as reading and writing
are performed. When open() is used to open a file in a text mode ('w',
@@ -157,8 +156,9 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
if errors is not None and not isinstance(errors, str):
raise TypeError("invalid errors: %r" % errors)
modes = set(mode)
- if modes - set("arwb+tU") or len(mode) > len(modes):
+ if modes - set("axrwb+tU") or len(mode) > len(modes):
raise ValueError("invalid mode: %r" % mode)
+ creating = "x" in modes
reading = "r" in modes
writing = "w" in modes
appending = "a" in modes
@@ -166,14 +166,14 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
text = "t" in modes
binary = "b" in modes
if "U" in modes:
- if writing or appending:
+ if creating or writing or appending:
raise ValueError("can't use U and writing mode at once")
reading = True
if text and binary:
raise ValueError("can't have text and binary mode at once")
- if reading + writing + appending > 1:
+ if creating + reading + writing + appending > 1:
raise ValueError("can't have read/write/append mode at once")
- if not (reading or writing or appending):
+ if not (creating or reading or writing or appending):
raise ValueError("must have exactly one of read/write/append mode")
if binary and encoding is not None:
raise ValueError("binary mode doesn't take an encoding argument")
@@ -182,11 +182,12 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
if binary and newline is not None:
raise ValueError("binary mode doesn't take a newline argument")
raw = FileIO(file,
+ (creating and "x" or "") +
(reading and "r" or "") +
(writing and "w" or "") +
(appending and "a" or "") +
(updating and "+" or ""),
- closefd)
+ closefd, opener=opener)
line_buffering = False
if buffering == 1 or buffering < 0 and raw.isatty():
buffering = -1
@@ -208,7 +209,7 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
raise ValueError("can't have unbuffered text I/O")
if updating:
buffer = BufferedRandom(raw, buffering)
- elif writing or appending:
+ elif creating or writing or appending:
buffer = BufferedWriter(raw, buffering)
elif reading:
buffer = BufferedReader(raw, buffering)
@@ -948,15 +949,19 @@ class BufferedReader(_BufferedIOMixin):
# Special case for when the number of bytes to read is unspecified.
if n is None or n == -1:
self._reset_read_buf()
+ if hasattr(self.raw, 'readall'):
+ chunk = self.raw.readall()
+ if chunk is None:
+ return buf[pos:] or None
+ else:
+ return buf[pos:] + chunk
chunks = [buf[pos:]] # Strip the consumed bytes.
current_size = 0
while True:
# Read until EOF or until read() would block.
try:
chunk = self.raw.read()
- except IOError as e:
- if e.errno != EINTR:
- raise
+ except InterruptedError:
continue
if chunk in empty_values:
nodata_val = chunk
@@ -978,9 +983,7 @@ class BufferedReader(_BufferedIOMixin):
while avail < n:
try:
chunk = self.raw.read(wanted)
- except IOError as e:
- if e.errno != EINTR:
- raise
+ except InterruptedError:
continue
if chunk in empty_values:
nodata_val = chunk
@@ -1013,9 +1016,7 @@ class BufferedReader(_BufferedIOMixin):
while True:
try:
current = self.raw.read(to_read)
- except IOError as e:
- if e.errno != EINTR:
- raise
+ except InterruptedError:
continue
break
if current:
@@ -1120,13 +1121,11 @@ class BufferedWriter(_BufferedIOMixin):
while self._write_buf:
try:
n = self.raw.write(self._write_buf)
+ except InterruptedError:
+ continue
except BlockingIOError:
raise RuntimeError("self.raw should implement RawIOBase: it "
"should not raise BlockingIOError")
- except IOError as e:
- if e.errno != EINTR:
- raise
- continue
if n is None:
raise BlockingIOError(
errno.EAGAIN,
@@ -1515,6 +1514,7 @@ class TextIOWrapper(TextIOBase):
self._snapshot = None # info for reconstructing decoder state
self._seekable = self._telling = self.buffer.seekable()
self._has_read1 = hasattr(self.buffer, 'read1')
+ self._b2cratio = 0.0
if self._seekable and self.writable():
position = self.buffer.tell()
@@ -1685,7 +1685,12 @@ class TextIOWrapper(TextIOBase):
else:
input_chunk = self.buffer.read(self._CHUNK_SIZE)
eof = not input_chunk
- self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
+ decoded_chars = self._decoder.decode(input_chunk, eof)
+ self._set_decoded_chars(decoded_chars)
+ if decoded_chars:
+ self._b2cratio = len(input_chunk) / len(self._decoded_chars)
+ else:
+ self._b2cratio = 0.0
if self._telling:
# At the snapshot point, len(dec_buffer) bytes before the read,
@@ -1739,20 +1744,56 @@ class TextIOWrapper(TextIOBase):
# forward until it gives us enough decoded characters.
saved_state = decoder.getstate()
try:
+ # Fast search for an acceptable start point, close to our
+ # current pos.
+ # Rationale: calling decoder.decode() has a large overhead
+ # regardless of chunk size; we want the number of such calls to
+ # be O(1) in most situations (common decoders, non-crazy input).
+ # Actually, it will be exactly 1 for fixed-size codecs (all
+ # 8-bit codecs, also UTF-16 and UTF-32).
+ skip_bytes = int(self._b2cratio * chars_to_skip)
+ skip_back = 1
+ assert skip_bytes <= len(next_input)
+ while skip_bytes > 0:
+ decoder.setstate((b'', dec_flags))
+ # Decode up to temptative start point
+ n = len(decoder.decode(next_input[:skip_bytes]))
+ if n <= chars_to_skip:
+ b, d = decoder.getstate()
+ if not b:
+ # Before pos and no bytes buffered in decoder => OK
+ dec_flags = d
+ chars_to_skip -= n
+ break
+ # Skip back by buffered amount and reset heuristic
+ skip_bytes -= len(b)
+ skip_back = 1
+ else:
+ # We're too far ahead, skip back a bit
+ skip_bytes -= skip_back
+ skip_back = skip_back * 2
+ else:
+ skip_bytes = 0
+ decoder.setstate((b'', dec_flags))
+
# Note our initial start point.
- decoder.setstate((b'', dec_flags))
- start_pos = position
- start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
- need_eof = 0
+ start_pos = position + skip_bytes
+ start_flags = dec_flags
+ if chars_to_skip == 0:
+ # We haven't moved from the start point.
+ return self._pack_cookie(start_pos, start_flags)
# Feed the decoder one byte at a time. As we go, note the
# nearest "safe start point" before the current location
# (a point where the decoder has nothing buffered, so seek()
# can safely start from there and advance to this location).
- next_byte = bytearray(1)
- for next_byte[0] in next_input:
+ bytes_fed = 0
+ need_eof = 0
+ # Chars decoded since `start_pos`
+ chars_decoded = 0
+ for i in range(skip_bytes, len(next_input)):
bytes_fed += 1
- chars_decoded += len(decoder.decode(next_byte))
+ chars_decoded += len(decoder.decode(next_input[i:i+1]))
dec_buffer, dec_flags = decoder.getstate()
if not dec_buffer and chars_decoded <= chars_to_skip:
# Decoder buffer is empty, so this is a safe start point.
diff --git a/Lib/abc.py b/Lib/abc.py
index a6c2dc4..09778e8 100644
--- a/Lib/abc.py
+++ b/Lib/abc.py
@@ -26,7 +26,8 @@ def abstractmethod(funcobj):
class abstractclassmethod(classmethod):
- """A decorator indicating abstract classmethods.
+ """
+ A decorator indicating abstract classmethods.
Similar to abstractmethod.
@@ -36,6 +37,9 @@ class abstractclassmethod(classmethod):
@abstractclassmethod
def my_abstract_classmethod(cls, ...):
...
+
+ 'abstractclassmethod' is deprecated. Use 'classmethod' with
+ 'abstractmethod' instead.
"""
__isabstractmethod__ = True
@@ -46,7 +50,8 @@ class abstractclassmethod(classmethod):
class abstractstaticmethod(staticmethod):
- """A decorator indicating abstract staticmethods.
+ """
+ A decorator indicating abstract staticmethods.
Similar to abstractmethod.
@@ -56,6 +61,9 @@ class abstractstaticmethod(staticmethod):
@abstractstaticmethod
def my_abstract_staticmethod(...):
...
+
+ 'abstractstaticmethod' is deprecated. Use 'staticmethod' with
+ 'abstractmethod' instead.
"""
__isabstractmethod__ = True
@@ -66,7 +74,8 @@ class abstractstaticmethod(staticmethod):
class abstractproperty(property):
- """A decorator indicating abstract properties.
+ """
+ A decorator indicating abstract properties.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
@@ -88,7 +97,11 @@ class abstractproperty(property):
def getx(self): ...
def setx(self, value): ...
x = abstractproperty(getx, setx)
+
+ 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod'
+ instead.
"""
+
__isabstractmethod__ = True
@@ -133,11 +146,14 @@ class ABCMeta(type):
return cls
def register(cls, subclass):
- """Register a virtual subclass of an ABC."""
+ """Register a virtual subclass of an ABC.
+
+ Returns the subclass, to allow usage as a class decorator.
+ """
if not isinstance(subclass, type):
raise TypeError("Can only register classes")
if issubclass(subclass, cls):
- return # Already a subclass
+ return subclass # Already a subclass
# Subtle: test for cycles *after* testing for "already a subclass";
# this means we allow X.register(X) and interpret it as a no-op.
if issubclass(cls, subclass):
@@ -145,6 +161,7 @@ class ABCMeta(type):
raise RuntimeError("Refusing to create an inheritance cycle")
cls._abc_registry.add(subclass)
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
+ return subclass
def _dump_registry(cls, file=None):
"""Debug helper to print the ABC registry."""
diff --git a/Lib/aifc.py b/Lib/aifc.py
index 775f39c..ec4f822 100644
--- a/Lib/aifc.py
+++ b/Lib/aifc.py
@@ -136,6 +136,7 @@ writeframesraw.
import struct
import builtins
+import warnings
__all__ = ["Error", "open", "openfp"]
@@ -440,7 +441,7 @@ class Aifc_read:
kludge = 0
if chunk.chunksize == 18:
kludge = 1
- print('Warning: bad COMM chunk size')
+ warnings.warn('Warning: bad COMM chunk size')
chunk.chunksize = 23
#DEBUG end
self._comptype = chunk.read(4)
@@ -484,11 +485,10 @@ class Aifc_read:
# a position 0 and name ''
self._markers.append((id, pos, name))
except EOFError:
- print('Warning: MARK chunk contains only', end=' ')
- print(len(self._markers), end=' ')
- if len(self._markers) == 1: print('marker', end=' ')
- else: print('markers', end=' ')
- print('instead of', nmarkers)
+ w = ('Warning: MARK chunk contains only %s marker%s instead of %s' %
+ (len(self._markers), '' if len(self._markers) == 1 else 's',
+ nmarkers))
+ warnings.warn(w)
class Aifc_write:
# Variables used in this class:
diff --git a/Lib/argparse.py b/Lib/argparse.py
index 2202b57..87d0cef 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -71,6 +71,7 @@ __all__ = [
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
+ 'MetavarTypeHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
@@ -419,7 +420,8 @@ class HelpFormatter(object):
# produce all arg strings
elif not action.option_strings:
- part = self._format_args(action, action.dest)
+ default = self._get_default_metavar_for_positional(action)
+ part = self._format_args(action, default)
# if it's in a group, strip the outer []
if action in group_actions:
@@ -441,7 +443,7 @@ class HelpFormatter(object):
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
- default = action.dest.upper()
+ default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
@@ -527,7 +529,8 @@ class HelpFormatter(object):
def _format_action_invocation(self, action):
if not action.option_strings:
- metavar, = self._metavar_formatter(action, action.dest)(1)
+ default = self._get_default_metavar_for_positional(action)
+ metavar, = self._metavar_formatter(action, default)(1)
return metavar
else:
@@ -541,7 +544,7 @@ class HelpFormatter(object):
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
- default = action.dest.upper()
+ default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
@@ -619,6 +622,12 @@ class HelpFormatter(object):
def _get_help_string(self, action):
return action.help
+ def _get_default_metavar_for_optional(self, action):
+ return action.dest.upper()
+
+ def _get_default_metavar_for_positional(self, action):
+ return action.dest
+
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
@@ -628,7 +637,7 @@ class RawDescriptionHelpFormatter(HelpFormatter):
"""
def _fill_text(self, text, width, indent):
- return ''.join([indent + line for line in text.splitlines(True)])
+ return ''.join(indent + line for line in text.splitlines(keepends=True))
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
@@ -659,6 +668,22 @@ class ArgumentDefaultsHelpFormatter(HelpFormatter):
return help
+class MetavarTypeHelpFormatter(HelpFormatter):
+ """Help message formatter which uses the argument 'type' as the default
+ metavar value (instead of the argument 'dest')
+
+ Only the name of this class is considered a public API. All the methods
+ provided by the class are considered an implementation detail.
+ """
+
+ def _get_default_metavar_for_optional(self, action):
+ return action.type.__name__
+
+ def _get_default_metavar_for_positional(self, action):
+ return action.type.__name__
+
+
+
# =====================
# Options and Arguments
# =====================
@@ -1940,17 +1965,12 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
- # if we didn't use all the Positional objects, there were too few
- # arg strings supplied.
- if positionals:
- self.error(_('too few arguments'))
-
# make sure all required actions were present
- for action in self._actions:
- if action.required:
- if action not in seen_actions:
- name = _get_action_name(action)
- self.error(_('argument %s is required') % name)
+ required_actions = [_get_action_name(action) for action in self._actions
+ if action.required and action not in seen_actions]
+ if required_actions:
+ self.error(_('the following arguments are required: %s') %
+ ', '.join(required_actions))
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
diff --git a/Lib/ast.py b/Lib/ast.py
index fb5adac..13f59f9 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -25,7 +25,6 @@
:license: Python License.
"""
from _ast import *
-from _ast import __version__
def parse(source, filename='<unknown>', mode='exec'):
diff --git a/Lib/asynchat.py b/Lib/asynchat.py
index 6558512..2199d1b 100644
--- a/Lib/asynchat.py
+++ b/Lib/asynchat.py
@@ -75,7 +75,7 @@ class async_chat (asyncore.dispatcher):
# sign of an application bug that we don't want to pass silently
use_encoding = 0
- encoding = 'latin1'
+ encoding = 'latin-1'
def __init__ (self, sock=None, map=None):
# for string terminator matching
diff --git a/Lib/asyncore.py b/Lib/asyncore.py
index 7f42d39..920444d 100644
--- a/Lib/asyncore.py
+++ b/Lib/asyncore.py
@@ -54,7 +54,7 @@ import warnings
import os
from errno import EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL, \
- ENOTCONN, ESHUTDOWN, EINTR, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, \
+ ENOTCONN, ESHUTDOWN, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, \
errorcode
_DISCONNECTED = frozenset((ECONNRESET, ENOTCONN, ESHUTDOWN, ECONNABORTED, EPIPE,
@@ -143,11 +143,8 @@ def poll(timeout=0.0, map=None):
try:
r, w, e = select.select(r, w, e, timeout)
- except select.error as err:
- if err.args[0] != EINTR:
- raise
- else:
- return
+ except InterruptedError:
+ return
for fd in r:
obj = map.get(fd)
@@ -184,15 +181,10 @@ def poll2(timeout=0.0, map=None):
if obj.writable() and not obj.accepting:
flags |= select.POLLOUT
if flags:
- # Only check for exceptions if object was either readable
- # or writable.
- flags |= select.POLLERR | select.POLLHUP | select.POLLNVAL
pollster.register(fd, flags)
try:
r = pollster.poll(timeout)
- except select.error as err:
- if err.args[0] != EINTR:
- raise
+ except InterruptedError:
r = []
for fd, flags in r:
obj = map.get(fd)
@@ -291,7 +283,7 @@ class dispatcher:
del map[fd]
self._fileno = None
- def create_socket(self, family, type):
+ def create_socket(self, family=socket.AF_INET, type=socket.SOCK_STREAM):
self.family_and_type = family, type
sock = socket.socket(family, type)
sock.setblocking(0)
diff --git a/Lib/base64.py b/Lib/base64.py
index 895d813..edcc4be 100755
--- a/Lib/base64.py
+++ b/Lib/base64.py
@@ -29,6 +29,16 @@ __all__ = [
bytes_types = (bytes, bytearray) # Types acceptable as binary data
+def _bytes_from_decode_data(s):
+ if isinstance(s, str):
+ try:
+ return s.encode('ascii')
+ except UnicodeEncodeError:
+ raise ValueError('string argument should contain only ASCII characters')
+ elif isinstance(s, bytes_types):
+ return s
+ else:
+ raise TypeError("argument should be bytes or ASCII string, not %s" % s.__class__.__name__)
def _translate(s, altchars):
if not isinstance(s, bytes_types):
@@ -79,12 +89,9 @@ def b64decode(s, altchars=None, validate=False):
discarded prior to the padding check. If validate is True,
non-base64-alphabet characters in the input result in a binascii.Error.
"""
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+ s = _bytes_from_decode_data(s)
if altchars is not None:
- if not isinstance(altchars, bytes_types):
- raise TypeError("expected bytes, not %s"
- % altchars.__class__.__name__)
+ altchars = _bytes_from_decode_data(altchars)
assert len(altchars) == 2, repr(altchars)
s = _translate(s, {chr(altchars[0]): b'+', chr(altchars[1]): b'/'})
if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s):
@@ -211,8 +218,7 @@ def b32decode(s, casefold=False, map01=None):
the input is incorrectly padded or if there are non-alphabet
characters present in the input.
"""
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+ s = _bytes_from_decode_data(s)
quanta, leftover = divmod(len(s), 8)
if leftover:
raise binascii.Error('Incorrect padding')
@@ -220,8 +226,7 @@ def b32decode(s, casefold=False, map01=None):
# False, or the character to map the digit 1 (one) to. It should be
# either L (el) or I (eye).
if map01 is not None:
- if not isinstance(map01, bytes_types):
- raise TypeError("expected bytes, not %s" % map01.__class__.__name__)
+ map01 = _bytes_from_decode_data(map01)
assert len(map01) == 1, repr(map01)
s = _translate(s, {b'0': b'O', b'1': map01})
if casefold:
@@ -292,8 +297,7 @@ def b16decode(s, casefold=False):
s were incorrectly padded or if there are non-alphabet characters
present in the string.
"""
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+ s = _bytes_from_decode_data(s)
if casefold:
s = s.upper()
if re.search(b'[^0-9A-F]', s):
diff --git a/Lib/binhex.py b/Lib/binhex.py
index 999a675..7bf9278 100644
--- a/Lib/binhex.py
+++ b/Lib/binhex.py
@@ -23,7 +23,6 @@ hexbin(inputfilename, outputfilename)
#
import io
import os
-import sys
import struct
import binascii
diff --git a/Lib/bz2.py b/Lib/bz2.py
new file mode 100644
index 0000000..51b9ac4
--- /dev/null
+++ b/Lib/bz2.py
@@ -0,0 +1,424 @@
+"""Interface to the libbzip2 compression library.
+
+This module provides a file interface, classes for incremental
+(de)compression, and functions for one-shot (de)compression.
+"""
+
+__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "compress",
+ "decompress"]
+
+__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
+
+import io
+import warnings
+
+try:
+ from threading import RLock
+except ImportError:
+ from dummy_threading import RLock
+
+from _bz2 import BZ2Compressor, BZ2Decompressor
+
+
+_MODE_CLOSED = 0
+_MODE_READ = 1
+_MODE_READ_EOF = 2
+_MODE_WRITE = 3
+
+_BUFFER_SIZE = 8192
+
+
+class BZ2File(io.BufferedIOBase):
+
+ """A file object providing transparent bzip2 (de)compression.
+
+ A BZ2File can act as a wrapper for an existing file object, or refer
+ directly to a named file on disk.
+
+ Note that BZ2File provides a *binary* file interface - data read is
+ returned as bytes, and data to be written should be given as bytes.
+ """
+
+ def __init__(self, filename=None, mode="r", buffering=None,
+ compresslevel=9, *, fileobj=None):
+ """Open a bzip2-compressed file.
+
+ If filename is given, open the named file. Otherwise, operate on
+ the file object given by fileobj. Exactly one of these two
+ parameters should be provided.
+
+ mode can be 'r' for reading (default), 'w' for (over)writing, or
+ 'a' for appending.
+
+ buffering is ignored. Its use is deprecated.
+
+ If mode is 'w' or 'a', compresslevel can be a number between 1
+ and 9 specifying the level of compression: 1 produces the least
+ compression, and 9 (default) produces the most compression.
+
+ If mode is 'r', the input file may be the concatenation of
+ multiple compressed streams.
+ """
+ # This lock must be recursive, so that BufferedIOBase's
+ # readline(), readlines() and writelines() don't deadlock.
+ self._lock = RLock()
+ self._fp = None
+ self._closefp = False
+ self._mode = _MODE_CLOSED
+ self._pos = 0
+ self._size = -1
+
+ if buffering is not None:
+ warnings.warn("Use of 'buffering' argument is deprecated",
+ DeprecationWarning)
+
+ if not (1 <= compresslevel <= 9):
+ raise ValueError("compresslevel must be between 1 and 9")
+
+ if mode in ("", "r", "rb"):
+ mode = "rb"
+ mode_code = _MODE_READ
+ self._decompressor = BZ2Decompressor()
+ self._buffer = None
+ elif mode in ("w", "wb"):
+ mode = "wb"
+ mode_code = _MODE_WRITE
+ self._compressor = BZ2Compressor(compresslevel)
+ elif mode in ("a", "ab"):
+ mode = "ab"
+ mode_code = _MODE_WRITE
+ self._compressor = BZ2Compressor(compresslevel)
+ else:
+ raise ValueError("Invalid mode: {!r}".format(mode))
+
+ if filename is not None and fileobj is None:
+ self._fp = open(filename, mode)
+ self._closefp = True
+ self._mode = mode_code
+ elif fileobj is not None and filename is None:
+ self._fp = fileobj
+ self._mode = mode_code
+ else:
+ raise ValueError("Must give exactly one of filename and fileobj")
+
+ def close(self):
+ """Flush and close the file.
+
+ May be called more than once without error. Once the file is
+ closed, any other operation on it will raise a ValueError.
+ """
+ with self._lock:
+ if self._mode == _MODE_CLOSED:
+ return
+ try:
+ if self._mode in (_MODE_READ, _MODE_READ_EOF):
+ self._decompressor = None
+ elif self._mode == _MODE_WRITE:
+ self._fp.write(self._compressor.flush())
+ self._compressor = None
+ finally:
+ try:
+ if self._closefp:
+ self._fp.close()
+ finally:
+ self._fp = None
+ self._closefp = False
+ self._mode = _MODE_CLOSED
+ self._buffer = None
+
+ @property
+ def closed(self):
+ """True if this file is closed."""
+ return self._mode == _MODE_CLOSED
+
+ def fileno(self):
+ """Return the file descriptor for the underlying file."""
+ self._check_not_closed()
+ return self._fp.fileno()
+
+ def seekable(self):
+ """Return whether the file supports seeking."""
+ return self.readable() and self._fp.seekable()
+
+ def readable(self):
+ """Return whether the file was opened for reading."""
+ self._check_not_closed()
+ return self._mode in (_MODE_READ, _MODE_READ_EOF)
+
+ def writable(self):
+ """Return whether the file was opened for writing."""
+ self._check_not_closed()
+ return self._mode == _MODE_WRITE
+
+ # Mode-checking helper functions.
+
+ def _check_not_closed(self):
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ def _check_can_read(self):
+ if not self.readable():
+ raise io.UnsupportedOperation("File not open for reading")
+
+ def _check_can_write(self):
+ if not self.writable():
+ raise io.UnsupportedOperation("File not open for writing")
+
+ def _check_can_seek(self):
+ if not self.readable():
+ raise io.UnsupportedOperation("Seeking is only supported "
+ "on files open for reading")
+ if not self._fp.seekable():
+ raise io.UnsupportedOperation("The underlying file object "
+ "does not support seeking")
+
+ # Fill the readahead buffer if it is empty. Returns False on EOF.
+ def _fill_buffer(self):
+ if self._buffer:
+ return True
+
+ if self._decompressor.unused_data:
+ rawblock = self._decompressor.unused_data
+ else:
+ rawblock = self._fp.read(_BUFFER_SIZE)
+
+ if not rawblock:
+ if self._decompressor.eof:
+ self._mode = _MODE_READ_EOF
+ self._size = self._pos
+ return False
+ else:
+ raise EOFError("Compressed file ended before the "
+ "end-of-stream marker was reached")
+
+ # Continue to next stream.
+ if self._decompressor.eof:
+ self._decompressor = BZ2Decompressor()
+
+ self._buffer = self._decompressor.decompress(rawblock)
+ return True
+
+ # Read data until EOF.
+ # If return_data is false, consume the data without returning it.
+ def _read_all(self, return_data=True):
+ blocks = []
+ while self._fill_buffer():
+ if return_data:
+ blocks.append(self._buffer)
+ self._pos += len(self._buffer)
+ self._buffer = None
+ if return_data:
+ return b"".join(blocks)
+
+ # Read a block of up to n bytes.
+ # If return_data is false, consume the data without returning it.
+ def _read_block(self, n, return_data=True):
+ blocks = []
+ while n > 0 and self._fill_buffer():
+ if n < len(self._buffer):
+ data = self._buffer[:n]
+ self._buffer = self._buffer[n:]
+ else:
+ data = self._buffer
+ self._buffer = None
+ if return_data:
+ blocks.append(data)
+ self._pos += len(data)
+ n -= len(data)
+ if return_data:
+ return b"".join(blocks)
+
+ def peek(self, n=0):
+ """Return buffered data without advancing the file position.
+
+ Always returns at least one byte of data, unless at EOF.
+ The exact number of bytes returned is unspecified.
+ """
+ with self._lock:
+ self._check_can_read()
+ if self._mode == _MODE_READ_EOF or not self._fill_buffer():
+ return b""
+ return self._buffer
+
+ def read(self, size=-1):
+ """Read up to size uncompressed bytes from the file.
+
+ If size is negative or omitted, read until EOF is reached.
+ Returns b'' if the file is already at EOF.
+ """
+ with self._lock:
+ self._check_can_read()
+ if self._mode == _MODE_READ_EOF or size == 0:
+ return b""
+ elif size < 0:
+ return self._read_all()
+ else:
+ return self._read_block(size)
+
+ def read1(self, size=-1):
+ """Read up to size uncompressed bytes with at most one read
+ from the underlying stream.
+
+ Returns b'' if the file is at EOF.
+ """
+ with self._lock:
+ self._check_can_read()
+ if (size == 0 or self._mode == _MODE_READ_EOF or
+ not self._fill_buffer()):
+ return b""
+ if 0 < size < len(self._buffer):
+ data = self._buffer[:size]
+ self._buffer = self._buffer[size:]
+ else:
+ data = self._buffer
+ self._buffer = None
+ self._pos += len(data)
+ return data
+
+ def readinto(self, b):
+ """Read up to len(b) bytes into b.
+
+ Returns the number of bytes read (0 for EOF).
+ """
+ with self._lock:
+ return io.BufferedIOBase.readinto(self, b)
+
+ def readline(self, size=-1):
+ """Read a line of uncompressed bytes from the file.
+
+ The terminating newline (if present) is retained. If size is
+ non-negative, no more than size bytes will be read (in which
+ case the line may be incomplete). Returns b'' if already at EOF.
+ """
+ if not hasattr(size, "__index__"):
+ raise TypeError("Integer argument expected")
+ size = size.__index__()
+ with self._lock:
+ return io.BufferedIOBase.readline(self, size)
+
+ def readlines(self, size=-1):
+ """Read a list of lines of uncompressed bytes from the file.
+
+ size can be specified to control the number of lines read: no
+ further lines will be read once the total size of the lines read
+ so far equals or exceeds size.
+ """
+ if not hasattr(size, "__index__"):
+ raise TypeError("Integer argument expected")
+ size = size.__index__()
+ with self._lock:
+ return io.BufferedIOBase.readlines(self, size)
+
+ def write(self, data):
+ """Write a byte string to the file.
+
+ Returns the number of uncompressed bytes written, which is
+ always len(data). Note that due to buffering, the file on disk
+ may not reflect the data written until close() is called.
+ """
+ with self._lock:
+ self._check_can_write()
+ compressed = self._compressor.compress(data)
+ self._fp.write(compressed)
+ self._pos += len(data)
+ return len(data)
+
+ def writelines(self, seq):
+ """Write a sequence of byte strings to the file.
+
+ Returns the number of uncompressed bytes written.
+ seq can be any iterable yielding byte strings.
+
+ Line separators are not added between the written byte strings.
+ """
+ with self._lock:
+ return io.BufferedIOBase.writelines(self, seq)
+
+ # Rewind the file to the beginning of the data stream.
+ def _rewind(self):
+ self._fp.seek(0, 0)
+ self._mode = _MODE_READ
+ self._pos = 0
+ self._decompressor = BZ2Decompressor()
+ self._buffer = None
+
+ def seek(self, offset, whence=0):
+ """Change the file position.
+
+ The new position is specified by offset, relative to the
+ position indicated by whence. Values for whence are:
+
+ 0: start of stream (default); offset must not be negative
+ 1: current stream position
+ 2: end of stream; offset must not be positive
+
+ Returns the new file position.
+
+ Note that seeking is emulated, so depending on the parameters,
+ this operation may be extremely slow.
+ """
+ with self._lock:
+ self._check_can_seek()
+
+ # Recalculate offset as an absolute file position.
+ if whence == 0:
+ pass
+ elif whence == 1:
+ offset = self._pos + offset
+ elif whence == 2:
+ # Seeking relative to EOF - we need to know the file's size.
+ if self._size < 0:
+ self._read_all(return_data=False)
+ offset = self._size + offset
+ else:
+ raise ValueError("Invalid value for whence: {}".format(whence))
+
+ # Make it so that offset is the number of bytes to skip forward.
+ if offset < self._pos:
+ self._rewind()
+ else:
+ offset -= self._pos
+
+ # Read and discard data until we reach the desired position.
+ if self._mode != _MODE_READ_EOF:
+ self._read_block(offset, return_data=False)
+
+ return self._pos
+
+ def tell(self):
+ """Return the current file position."""
+ with self._lock:
+ self._check_not_closed()
+ return self._pos
+
+
+def compress(data, compresslevel=9):
+ """Compress a block of data.
+
+ compresslevel, if given, must be a number between 1 and 9.
+
+ For incremental compression, use a BZ2Compressor object instead.
+ """
+ comp = BZ2Compressor(compresslevel)
+ return comp.compress(data) + comp.flush()
+
+
+def decompress(data):
+ """Decompress a block of data.
+
+ For incremental decompression, use a BZ2Decompressor object instead.
+ """
+ if len(data) == 0:
+ return b""
+
+ results = []
+ while True:
+ decomp = BZ2Decompressor()
+ results.append(decomp.decompress(data))
+ if not decomp.eof:
+ raise ValueError("Compressed data ended before the "
+ "end-of-stream marker was reached")
+ if not decomp.unused_data:
+ return b"".join(results)
+ # There is unused data left over. Proceed to next stream.
+ data = decomp.unused_data
diff --git a/Lib/cgi.py b/Lib/cgi.py
index 90a3345..e964f0c 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -76,7 +76,7 @@ def initlog(*allargs):
send an error message).
"""
- global logfp, log
+ global log, logfile, logfp
if logfile and not logfp:
try:
logfp = open(logfile, "a")
@@ -96,6 +96,15 @@ def nolog(*allargs):
"""Dummy function, assigned to log when logging is disabled."""
pass
+def closelog():
+ """Close the log file."""
+ global log, logfile, logfp
+ logfile = ''
+ if logfp:
+ logfp.close()
+ logfp = None
+ log = initlog
+
log = initlog # The current logging function
@@ -1003,7 +1012,7 @@ environment as well. Here are some common variable names:
def escape(s, quote=None):
"""Deprecated API."""
warn("cgi.escape is deprecated, use html.escape instead",
- PendingDeprecationWarning, stacklevel=2)
+ DeprecationWarning, stacklevel=2)
s = s.replace("&", "&amp;") # Must be done first!
s = s.replace("<", "&lt;")
s = s.replace(">", "&gt;")
diff --git a/Lib/cgitb.py b/Lib/cgitb.py
index 7b52c8e..e3ce2cb 100644
--- a/Lib/cgitb.py
+++ b/Lib/cgitb.py
@@ -31,7 +31,6 @@ import tempfile
import time
import tokenize
import traceback
-import types
def reset():
"""Return a string that resets the CGI and browser to a known state."""
diff --git a/Lib/codecs.py b/Lib/codecs.py
index b150d64..e63a0c6 100644
--- a/Lib/codecs.py
+++ b/Lib/codecs.py
@@ -484,7 +484,7 @@ class StreamReader(Codec):
if firstline:
newchars, decodedbytes = \
self.decode(data[:exc.start], self.errors)
- lines = newchars.splitlines(True)
+ lines = newchars.splitlines(keepends=True)
if len(lines)<=1:
raise
else:
@@ -526,7 +526,7 @@ class StreamReader(Codec):
self.charbuffer = self.linebuffer[0]
self.linebuffer = None
if not keepends:
- line = line.splitlines(False)[0]
+ line = line.splitlines(keepends=False)[0]
return line
readsize = size or 72
@@ -543,7 +543,7 @@ class StreamReader(Codec):
data += self.read(size=1, chars=1)
line += data
- lines = line.splitlines(True)
+ lines = line.splitlines(keepends=True)
if lines:
if len(lines) > 1:
# More than one line result; the first line is a full line
@@ -559,10 +559,10 @@ class StreamReader(Codec):
# only one remaining line, put it back into charbuffer
self.charbuffer = lines[0] + self.charbuffer
if not keepends:
- line = line.splitlines(False)[0]
+ line = line.splitlines(keepends=False)[0]
break
line0withend = lines[0]
- line0withoutend = lines[0].splitlines(False)[0]
+ line0withoutend = lines[0].splitlines(keepends=False)[0]
if line0withend != line0withoutend: # We really have a line end
# Put the rest back together and keep it until the next call
self.charbuffer = self._empty_charbuffer.join(lines[1:]) + \
@@ -575,7 +575,7 @@ class StreamReader(Codec):
# we didn't get anything or this was our only try
if not data or size is not None:
if line and not keepends:
- line = line.splitlines(False)[0]
+ line = line.splitlines(keepends=False)[0]
break
if readsize < 8000:
readsize *= 2
@@ -803,7 +803,7 @@ class StreamRecoder:
data = self.reader.read()
data, bytesencoded = self.encode(data, self.errors)
- return data.splitlines(1)
+ return data.splitlines(keepends=True)
def __next__(self):
diff --git a/Lib/collections.py b/Lib/collections/__init__.py
index eb20243..b1c82b4 100644
--- a/Lib/collections.py
+++ b/Lib/collections/__init__.py
@@ -1,10 +1,11 @@
__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList',
- 'UserString', 'Counter', 'OrderedDict']
-# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
-# They should however be considered an integral part of collections.py.
-from _abcoll import *
-import _abcoll
-__all__ += _abcoll.__all__
+ 'UserString', 'Counter', 'OrderedDict', 'ChainMap']
+
+# For backwards compatibility, continue to make the collections ABCs
+# available through the collections module.
+from collections.abc import *
+import collections.abc
+__all__ += collections.abc.__all__
from _collections import deque, defaultdict
from operator import itemgetter as _itemgetter
@@ -364,8 +365,9 @@ def namedtuple(typename, field_names, verbose=False, rename=False):
except SyntaxError as e:
raise SyntaxError(e.msg + ':\n\n' + class_definition)
result = namespace[typename]
+ result._source = class_definition
if verbose:
- print(class_definition)
+ print(result._source)
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
@@ -674,12 +676,86 @@ class Counter(dict):
result[elem] = newcount
return result
+ def __pos__(self):
+ 'Adds an empty counter, effectively stripping negative and zero counts'
+ return self + Counter()
+
+ def __neg__(self):
+ '''Subtracts from an empty counter. Strips positive and zero counts,
+ and flips the sign on negative counts.
+
+ '''
+ return Counter() - self
+
+ def _keep_positive(self):
+ '''Internal method to strip elements with a negative or zero count'''
+ nonpositive = [elem for elem, count in self.items() if not count > 0]
+ for elem in nonpositive:
+ del self[elem]
+ return self
+
+ def __iadd__(self, other):
+ '''Inplace add from another counter, keeping only positive counts.
+
+ >>> c = Counter('abbb')
+ >>> c += Counter('bcc')
+ >>> c
+ Counter({'b': 4, 'c': 2, 'a': 1})
+
+ '''
+ for elem, count in other.items():
+ self[elem] += count
+ return self._keep_positive()
+
+ def __isub__(self, other):
+ '''Inplace subtract counter, but keep only results with positive counts.
+
+ >>> c = Counter('abbbc')
+ >>> c -= Counter('bccd')
+ >>> c
+ Counter({'b': 2, 'a': 1})
+
+ '''
+ for elem, count in other.items():
+ self[elem] -= count
+ return self._keep_positive()
+
+ def __ior__(self, other):
+ '''Inplace union is the maximum of value from either counter.
+
+ >>> c = Counter('abbb')
+ >>> c |= Counter('bcc')
+ >>> c
+ Counter({'b': 3, 'c': 2, 'a': 1})
+
+ '''
+ for elem, other_count in other.items():
+ count = self[elem]
+ if other_count > count:
+ self[elem] = other_count
+ return self._keep_positive()
+
+ def __iand__(self, other):
+ '''Inplace intersection is the minimum of corresponding counts.
+
+ >>> c = Counter('abbb')
+ >>> c &= Counter('bcc')
+ >>> c
+ Counter({'b': 1})
+
+ '''
+ for elem, count in self.items():
+ other_count = other[elem]
+ if other_count < count:
+ self[elem] = other_count
+ return self._keep_positive()
+
########################################################################
-### ChainMap (helper for configparser)
+### ChainMap (helper for configparser and string.Template)
########################################################################
-class _ChainMap(MutableMapping):
+class ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
@@ -890,6 +966,8 @@ class UserList(MutableSequence):
def insert(self, i, item): self.data.insert(i, item)
def pop(self, i=-1): return self.data.pop(i)
def remove(self, item): self.data.remove(item)
+ def clear(self): self.data.clear()
+ def copy(self): return self.__class__(self)
def count(self, item): return self.data.count(item)
def index(self, item, *args): return self.data.index(item, *args)
def reverse(self): self.data.reverse()
@@ -1034,7 +1112,7 @@ class UserString(Sequence):
return self.data.split(sep, maxsplit)
def rsplit(self, sep=None, maxsplit=-1):
return self.data.rsplit(sep, maxsplit)
- def splitlines(self, keepends=0): return self.data.splitlines(keepends)
+ def splitlines(self, keepends=False): return self.data.splitlines(keepends)
def startswith(self, prefix, start=0, end=_sys.maxsize):
return self.data.startswith(prefix, start, end)
def strip(self, chars=None): return self.__class__(self.data.strip(chars))
diff --git a/Lib/_abcoll.py b/Lib/collections/abc.py
index 2417d18..7fbe84d 100644
--- a/Lib/_abcoll.py
+++ b/Lib/collections/abc.py
@@ -3,9 +3,7 @@
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
-DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
-via collections; they are defined here only to alleviate certain
-bootstrapping issues. Unit tests are in test_collections.
+Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
@@ -48,6 +46,8 @@ dict_proxy = type(type.__dict__)
class Hashable(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __hash__(self):
return 0
@@ -65,6 +65,8 @@ class Hashable(metaclass=ABCMeta):
class Iterable(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __iter__(self):
while False:
@@ -80,6 +82,8 @@ class Iterable(metaclass=ABCMeta):
class Iterator(Iterable):
+ __slots__ = ()
+
@abstractmethod
def __next__(self):
raise StopIteration
@@ -111,6 +115,8 @@ Iterator.register(zip_iterator)
class Sized(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __len__(self):
return 0
@@ -125,6 +131,8 @@ class Sized(metaclass=ABCMeta):
class Container(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __contains__(self, x):
return False
@@ -139,6 +147,8 @@ class Container(metaclass=ABCMeta):
class Callable(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __call__(self, *args, **kwds):
return False
@@ -166,6 +176,8 @@ class Set(Sized, Iterable, Container):
then the other operations will automatically follow suit.
"""
+ __slots__ = ()
+
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
@@ -277,6 +289,8 @@ Set.register(frozenset)
class MutableSet(Set):
+ __slots__ = ()
+
@abstractmethod
def add(self, value):
"""Add an element."""
@@ -350,6 +364,8 @@ MutableSet.register(set)
class Mapping(Sized, Iterable, Container):
+ __slots__ = ()
+
@abstractmethod
def __getitem__(self, key):
raise KeyError
@@ -453,6 +469,8 @@ ValuesView.register(dict_values)
class MutableMapping(Mapping):
+ __slots__ = ()
+
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@@ -532,6 +550,8 @@ class Sequence(Sized, Iterable, Container):
__getitem__, and __len__.
"""
+ __slots__ = ()
+
@abstractmethod
def __getitem__(self, index):
raise IndexError
@@ -577,12 +597,16 @@ class ByteString(Sequence):
XXX Should add all their methods.
"""
+ __slots__ = ()
+
ByteString.register(bytes)
ByteString.register(bytearray)
class MutableSequence(Sequence):
+ __slots__ = ()
+
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@@ -598,6 +622,13 @@ class MutableSequence(Sequence):
def append(self, value):
self.insert(len(self), value)
+ def clear(self):
+ try:
+ while True:
+ self.pop()
+ except IndexError:
+ pass
+
def reverse(self):
n = len(self)
for i in range(n//2):
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index 79b91d4..3b097b5 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -4,7 +4,6 @@
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
import collections
-import functools
import logging
import threading
import time
@@ -536,15 +535,19 @@ class Executor(object):
fs = [self.submit(fn, *args) for args in zip(*iterables)]
- try:
- for future in fs:
- if timeout is None:
- yield future.result()
- else:
- yield future.result(end_time - time.time())
- finally:
- for future in fs:
- future.cancel()
+ # Yield must be hidden in closure so that the futures are submitted
+ # before the first iterator value is required.
+ def result_iterator():
+ try:
+ for future in fs:
+ if timeout is None:
+ yield future.result()
+ else:
+ yield future.result(end_time - time.time())
+ finally:
+ for future in fs:
+ future.cancel()
+ return result_iterator()
def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index d3bbe2c..04238a7 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -46,9 +46,12 @@ Process #1..n:
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
import atexit
+import os
from concurrent.futures import _base
import queue
import multiprocessing
+from multiprocessing.queues import SimpleQueue, Full
+from multiprocessing.connection import wait
import threading
import weakref
@@ -121,7 +124,7 @@ def _process_worker(call_queue, result_queue):
call_item = call_queue.get(block=True)
if call_item is None:
# Wake up queue management thread
- result_queue.put(None)
+ result_queue.put(os.getpid())
return
try:
r = call_item.fn(*call_item.args, **call_item.kwargs)
@@ -193,46 +196,92 @@ def _queue_management_worker(executor_reference,
result_queue: A multiprocessing.Queue of _ResultItems generated by the
process workers.
"""
- nb_shutdown_processes = 0
- def shutdown_one_process():
- """Tell a worker to terminate, which will in turn wake us again"""
- nonlocal nb_shutdown_processes
- call_queue.put(None)
- nb_shutdown_processes += 1
+ executor = None
+
+ def shutting_down():
+ return _shutdown or executor is None or executor._shutdown_thread
+
+ def shutdown_worker():
+ # This is an upper bound
+ nb_children_alive = sum(p.is_alive() for p in processes.values())
+ for i in range(0, nb_children_alive):
+ call_queue.put_nowait(None)
+ # Release the queue's resources as soon as possible.
+ call_queue.close()
+ # If .join() is not called on the created processes then
+ # some multiprocessing.Queue methods may deadlock on Mac OS X.
+ for p in processes.values():
+ p.join()
+
+ reader = result_queue._reader
+
while True:
_add_call_item_to_queue(pending_work_items,
work_ids_queue,
call_queue)
- result_item = result_queue.get(block=True)
- if result_item is not None:
- work_item = pending_work_items[result_item.work_id]
- del pending_work_items[result_item.work_id]
-
- if result_item.exception:
- work_item.future.set_exception(result_item.exception)
- else:
- work_item.future.set_result(result_item.result)
+ sentinels = [p.sentinel for p in processes.values()]
+ assert sentinels
+ ready = wait([reader] + sentinels)
+ if reader in ready:
+ result_item = reader.recv()
+ else:
+ # Mark the process pool broken so that submits fail right now.
+ executor = executor_reference()
+ if executor is not None:
+ executor._broken = True
+ executor._shutdown_thread = True
+ executor = None
+ # All futures in flight must be marked failed
+ for work_id, work_item in pending_work_items.items():
+ work_item.future.set_exception(
+ BrokenProcessPool(
+ "A process in the process pool was "
+ "terminated abruptly while the future was "
+ "running or pending."
+ ))
+ pending_work_items.clear()
+ # Terminate remaining workers forcibly: the queues or their
+ # locks may be in a dirty state and block forever.
+ for p in processes.values():
+ p.terminate()
+ shutdown_worker()
+ return
+ if isinstance(result_item, int):
+ # Clean shutdown of a worker using its PID
+ # (avoids marking the executor broken)
+ assert shutting_down()
+ p = processes.pop(result_item)
+ p.join()
+ if not processes:
+ shutdown_worker()
+ return
+ elif result_item is not None:
+ work_item = pending_work_items.pop(result_item.work_id, None)
+ # work_item can be None if another process terminated (see above)
+ if work_item is not None:
+ if result_item.exception:
+ work_item.future.set_exception(result_item.exception)
+ else:
+ work_item.future.set_result(result_item.result)
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
- if _shutdown or executor is None or executor._shutdown_thread:
- # Since no new work items can be added, it is safe to shutdown
- # this thread if there are no pending work items.
- if not pending_work_items:
- while nb_shutdown_processes < len(processes):
- shutdown_one_process()
- # If .join() is not called on the created processes then
- # some multiprocessing.Queue methods may deadlock on Mac OS
- # X.
- for p in processes:
- p.join()
- call_queue.close()
- return
- del executor
+ if shutting_down():
+ try:
+ # Since no new work items can be added, it is safe to shutdown
+ # this thread if there are no pending work items.
+ if not pending_work_items:
+ shutdown_worker()
+ return
+ except Full:
+ # This is not a problem: we will eventually be woken up (in
+ # result_queue.get()) and be able to send a sentinel again.
+ pass
+ executor = None
_system_limits_checked = False
_system_limited = None
@@ -243,7 +292,6 @@ def _check_system_limits():
raise NotImplementedError(_system_limited)
_system_limits_checked = True
try:
- import os
nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
except (AttributeError, ValueError):
# sysconf not available or setting not available
@@ -259,6 +307,14 @@ def _check_system_limits():
_system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
raise NotImplementedError(_system_limited)
+
+class BrokenProcessPool(RuntimeError):
+ """
+ Raised when a process in a ProcessPoolExecutor terminated abruptly
+ while a future was in the running state.
+ """
+
+
class ProcessPoolExecutor(_base.Executor):
def __init__(self, max_workers=None):
"""Initializes a new ProcessPoolExecutor instance.
@@ -280,14 +336,20 @@ class ProcessPoolExecutor(_base.Executor):
# because futures in the call queue cannot be cancelled.
self._call_queue = multiprocessing.Queue(self._max_workers +
EXTRA_QUEUED_CALLS)
- self._result_queue = multiprocessing.Queue()
+ # Killed worker processes can produce spurious "broken pipe"
+ # tracebacks in the queue's own worker thread. But we detect killed
+ # processes anyway, so silence the tracebacks.
+ self._call_queue._ignore_epipe = True
+ self._result_queue = SimpleQueue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
- self._processes = set()
+ # Map of pids to processes
+ self._processes = {}
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
+ self._broken = False
self._queue_count = 0
self._pending_work_items = {}
@@ -297,6 +359,8 @@ class ProcessPoolExecutor(_base.Executor):
def weakref_cb(_, q=self._result_queue):
q.put(None)
if self._queue_management_thread is None:
+ # Start the processes so that their sentinels are known.
+ self._adjust_process_count()
self._queue_management_thread = threading.Thread(
target=_queue_management_worker,
args=(weakref.ref(self, weakref_cb),
@@ -316,10 +380,13 @@ class ProcessPoolExecutor(_base.Executor):
args=(self._call_queue,
self._result_queue))
p.start()
- self._processes.add(p)
+ self._processes[p.pid] = p
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
+ if self._broken:
+ raise BrokenProcessPool('A child process terminated '
+ 'abruptly, the process pool is not usable anymore')
if self._shutdown_thread:
raise RuntimeError('cannot schedule new futures after shutdown')
@@ -333,7 +400,6 @@ class ProcessPoolExecutor(_base.Executor):
self._result_queue.put(None)
self._start_queue_management_thread()
- self._adjust_process_count()
return f
submit.__doc__ = _base.Executor.submit.__doc__
diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py
index fbac088..95bb682 100644
--- a/Lib/concurrent/futures/thread.py
+++ b/Lib/concurrent/futures/thread.py
@@ -74,7 +74,7 @@ def _worker(executor_reference, work_queue):
work_queue.put(None)
return
del executor
- except BaseException as e:
+ except BaseException:
_base.LOGGER.critical('Exception in worker', exc_info=True)
class ThreadPoolExecutor(_base.Executor):
diff --git a/Lib/configparser.py b/Lib/configparser.py
index d148b88..7bc4398 100644
--- a/Lib/configparser.py
+++ b/Lib/configparser.py
@@ -119,7 +119,8 @@ ConfigParser -- responsible for parsing a list of
between keys and values are surrounded by spaces.
"""
-from collections import MutableMapping, OrderedDict as _default_dict, _ChainMap
+from collections.abc import MutableMapping
+from collections import OrderedDict as _default_dict, ChainMap as _ChainMap
import functools
import io
import itertools
diff --git a/Lib/contextlib.py b/Lib/contextlib.py
index 5ebbbc6..2f8f00d 100644
--- a/Lib/contextlib.py
+++ b/Lib/contextlib.py
@@ -2,7 +2,6 @@
import sys
from functools import wraps
-from warnings import warn
__all__ = ["contextmanager", "closing", "ContextDecorator"]
diff --git a/Lib/copy.py b/Lib/copy.py
index 089d101..d96201e 100644
--- a/Lib/copy.py
+++ b/Lib/copy.py
@@ -173,8 +173,10 @@ def deepcopy(x, memo=None, _nil=[]):
"un(deep)copyable object of type %s" % cls)
y = _reconstruct(x, rv, 1, memo)
- memo[d] = y
- _keep_alive(x, memo) # Make sure x lives at least as long as d
+ # If is its own copy, don't memoize.
+ if y is not x:
+ memo[d] = y
+ _keep_alive(x, memo) # Make sure x lives at least as long as d
return y
_deepcopy_dispatch = d = {}
@@ -214,9 +216,10 @@ def _deepcopy_tuple(x, memo):
y = []
for a in x:
y.append(deepcopy(a, memo))
- d = id(x)
+ # We're not going to put the tuple in the memo, but it's still important we
+ # check for it, in case the tuple contains recursive mutable structures.
try:
- return memo[d]
+ return memo[id(x)]
except KeyError:
pass
for i in range(len(x)):
@@ -225,7 +228,6 @@ def _deepcopy_tuple(x, memo):
break
else:
y = x
- memo[d] = y
return y
d[tuple] = _deepcopy_tuple
@@ -321,68 +323,3 @@ del types
# Helper for instance creation without calling __init__
class _EmptyClass:
pass
-
-def _test():
- l = [None, 1, 2, 3.14, 'xyzzy', (1, 2), [3.14, 'abc'],
- {'abc': 'ABC'}, (), [], {}]
- l1 = copy(l)
- print(l1==l)
- l1 = map(copy, l)
- print(l1==l)
- l1 = deepcopy(l)
- print(l1==l)
- class C:
- def __init__(self, arg=None):
- self.a = 1
- self.arg = arg
- if __name__ == '__main__':
- import sys
- file = sys.argv[0]
- else:
- file = __file__
- self.fp = open(file)
- self.fp.close()
- def __getstate__(self):
- return {'a': self.a, 'arg': self.arg}
- def __setstate__(self, state):
- for key, value in state.items():
- setattr(self, key, value)
- def __deepcopy__(self, memo=None):
- new = self.__class__(deepcopy(self.arg, memo))
- new.a = self.a
- return new
- c = C('argument sketch')
- l.append(c)
- l2 = copy(l)
- print(l == l2)
- print(l)
- print(l2)
- l2 = deepcopy(l)
- print(l == l2)
- print(l)
- print(l2)
- l.append({l[1]: l, 'xyz': l[2]})
- l3 = copy(l)
- import reprlib
- print(map(reprlib.repr, l))
- print(map(reprlib.repr, l1))
- print(map(reprlib.repr, l2))
- print(map(reprlib.repr, l3))
- l3 = deepcopy(l)
- print(map(reprlib.repr, l))
- print(map(reprlib.repr, l1))
- print(map(reprlib.repr, l2))
- print(map(reprlib.repr, l3))
- class odict(dict):
- def __init__(self, d = {}):
- self.a = 99
- dict.__init__(self, d)
- def __setitem__(self, k, i):
- dict.__setitem__(self, k, i)
- self.a
- o = odict({"A" : "B"})
- x = deepcopy(o)
- print(o, x)
-
-if __name__ == '__main__':
- _test()
diff --git a/Lib/crypt.py b/Lib/crypt.py
new file mode 100644
index 0000000..e65b0cb
--- /dev/null
+++ b/Lib/crypt.py
@@ -0,0 +1,62 @@
+"""Wrapper to the POSIX crypt library call and associated functionality."""
+
+import _crypt
+import string
+from random import choice
+from collections import namedtuple
+
+
+_saltchars = string.ascii_letters + string.digits + './'
+
+
+class _Method(namedtuple('_Method', 'name ident salt_chars total_size')):
+
+ """Class representing a salt method per the Modular Crypt Format or the
+ legacy 2-character crypt method."""
+
+ def __repr__(self):
+ return '<crypt.METHOD_{}>'.format(self.name)
+
+
+
+def mksalt(method=None):
+ """Generate a salt for the specified method.
+
+ If not specified, the strongest available method will be used.
+
+ """
+ if method is None:
+ method = methods[0]
+ s = '${}$'.format(method.ident) if method.ident else ''
+ s += ''.join(choice(_saltchars) for _ in range(method.salt_chars))
+ return s
+
+
+def crypt(word, salt=None):
+ """Return a string representing the one-way hash of a password, with a salt
+ prepended.
+
+ If ``salt`` is not specified or is ``None``, the strongest
+ available method will be selected and a salt generated. Otherwise,
+ ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as
+ returned by ``crypt.mksalt()``.
+
+ """
+ if salt is None or isinstance(salt, _Method):
+ salt = mksalt(salt)
+ return _crypt.crypt(word, salt)
+
+
+# available salting/crypto methods
+METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
+METHOD_MD5 = _Method('MD5', '1', 8, 34)
+METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
+METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
+
+methods = []
+for _method in (METHOD_SHA512, METHOD_SHA256, METHOD_MD5):
+ _result = crypt('', _method)
+ if _result and len(_result) == _method.total_size:
+ methods.append(_method)
+methods.append(METHOD_CRYPT)
+del _result, _method
diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py
index c7207ea..5600b43 100644
--- a/Lib/ctypes/test/test_callbacks.py
+++ b/Lib/ctypes/test/test_callbacks.py
@@ -140,7 +140,7 @@ class Callbacks(unittest.TestCase):
def __del__(self):
gc.collect()
CFUNCTYPE(None)(lambda x=Nasty(): None)
-
+
try:
WINFUNCTYPE
diff --git a/Lib/ctypes/test/test_memfunctions.py b/Lib/ctypes/test/test_memfunctions.py
index aa2113b..aec4aaa 100644
--- a/Lib/ctypes/test/test_memfunctions.py
+++ b/Lib/ctypes/test/test_memfunctions.py
@@ -1,4 +1,5 @@
import sys
+from test import support
import unittest
from ctypes import *
@@ -49,6 +50,7 @@ class MemFunctionsTest(unittest.TestCase):
self.assertEqual(cast(a, POINTER(c_byte))[:7:7],
[97])
+ @support.refcount_test
def test_string_at(self):
s = string_at(b"foo bar")
# XXX The following may be wrong, depending on how Python
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py
index e83fd9a..9762fb9 100644
--- a/Lib/ctypes/test/test_parameters.py
+++ b/Lib/ctypes/test/test_parameters.py
@@ -73,13 +73,10 @@ class SimpleTypesTestCase(unittest.TestCase):
except ImportError:
## print "(No c_wchar_p)"
return
- s = "123"
- if sys.platform == "win32":
- self.assertTrue(c_wchar_p.from_param(s)._obj is s)
- self.assertRaises(TypeError, c_wchar_p.from_param, 42)
- # new in 0.9.1: convert (decode) ascii to unicode
- self.assertEqual(c_wchar_p.from_param("123")._obj, "123")
+ c_wchar_p.from_param("123")
+
+ self.assertRaises(TypeError, c_wchar_p.from_param, 42)
self.assertRaises(TypeError, c_wchar_p.from_param, b"123\377")
pa = c_wchar_p.from_param(c_wchar_p("123"))
diff --git a/Lib/ctypes/test/test_pep3118.py b/Lib/ctypes/test/test_pep3118.py
index fa6461f..ad13b01 100644
--- a/Lib/ctypes/test/test_pep3118.py
+++ b/Lib/ctypes/test/test_pep3118.py
@@ -25,14 +25,17 @@ class Test(unittest.TestCase):
v = memoryview(ob)
try:
self.assertEqual(normalize(v.format), normalize(fmt))
- if shape is not None:
+ if shape:
self.assertEqual(len(v), shape[0])
else:
self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
- # ctypes object always have a non-strided memory block
- self.assertEqual(v.strides, None)
+ # XXX Issue #12851: PyCData_NewGetBuffer() must provide strides
+ # if requested. memoryview currently reconstructs missing
+ # stride information, so this assert will fail.
+ # self.assertEqual(v.strides, ())
+
# they are always read/write
self.assertFalse(v.readonly)
@@ -52,14 +55,15 @@ class Test(unittest.TestCase):
v = memoryview(ob)
try:
self.assertEqual(v.format, fmt)
- if shape is not None:
+ if shape:
self.assertEqual(len(v), shape[0])
else:
self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
- # ctypes object always have a non-strided memory block
- self.assertEqual(v.strides, None)
+ # XXX Issue #12851
+ # self.assertEqual(v.strides, ())
+
# they are always read/write
self.assertFalse(v.readonly)
@@ -110,34 +114,34 @@ native_types = [
## simple types
- (c_char, "<c", None, c_char),
- (c_byte, "<b", None, c_byte),
- (c_ubyte, "<B", None, c_ubyte),
- (c_short, "<h", None, c_short),
- (c_ushort, "<H", None, c_ushort),
+ (c_char, "<c", (), c_char),
+ (c_byte, "<b", (), c_byte),
+ (c_ubyte, "<B", (), c_ubyte),
+ (c_short, "<h", (), c_short),
+ (c_ushort, "<H", (), c_ushort),
# c_int and c_uint may be aliases to c_long
- #(c_int, "<i", None, c_int),
- #(c_uint, "<I", None, c_uint),
+ #(c_int, "<i", (), c_int),
+ #(c_uint, "<I", (), c_uint),
- (c_long, "<l", None, c_long),
- (c_ulong, "<L", None, c_ulong),
+ (c_long, "<l", (), c_long),
+ (c_ulong, "<L", (), c_ulong),
# c_longlong and c_ulonglong are aliases on 64-bit platforms
#(c_longlong, "<q", None, c_longlong),
#(c_ulonglong, "<Q", None, c_ulonglong),
- (c_float, "<f", None, c_float),
- (c_double, "<d", None, c_double),
+ (c_float, "<f", (), c_float),
+ (c_double, "<d", (), c_double),
# c_longdouble may be an alias to c_double
- (c_bool, "<?", None, c_bool),
- (py_object, "<O", None, py_object),
+ (c_bool, "<?", (), c_bool),
+ (py_object, "<O", (), py_object),
## pointers
- (POINTER(c_byte), "&<b", None, POINTER(c_byte)),
- (POINTER(POINTER(c_long)), "&&<l", None, POINTER(POINTER(c_long))),
+ (POINTER(c_byte), "&<b", (), POINTER(c_byte)),
+ (POINTER(POINTER(c_long)), "&&<l", (), POINTER(POINTER(c_long))),
## arrays and pointers
@@ -145,32 +149,32 @@ native_types = [
(c_float * 4 * 3 * 2, "(2,3,4)<f", (2,3,4), c_float),
(POINTER(c_short) * 2, "(2)&<h", (2,), POINTER(c_short)),
(POINTER(c_short) * 2 * 3, "(3,2)&<h", (3,2,), POINTER(c_short)),
- (POINTER(c_short * 2), "&(2)<h", None, POINTER(c_short)),
+ (POINTER(c_short * 2), "&(2)<h", (), POINTER(c_short)),
## structures and unions
- (Point, "T{<l:x:<l:y:}", None, Point),
+ (Point, "T{<l:x:<l:y:}", (), Point),
# packed structures do not implement the pep
- (PackedPoint, "B", None, PackedPoint),
- (Point2, "T{<l:x:<l:y:}", None, Point2),
- (EmptyStruct, "T{}", None, EmptyStruct),
+ (PackedPoint, "B", (), PackedPoint),
+ (Point2, "T{<l:x:<l:y:}", (), Point2),
+ (EmptyStruct, "T{}", (), EmptyStruct),
# the pep does't support unions
- (aUnion, "B", None, aUnion),
+ (aUnion, "B", (), aUnion),
## pointer to incomplete structure
- (Incomplete, "B", None, Incomplete),
- (POINTER(Incomplete), "&B", None, POINTER(Incomplete)),
+ (Incomplete, "B", (), Incomplete),
+ (POINTER(Incomplete), "&B", (), POINTER(Incomplete)),
# 'Complete' is a structure that starts incomplete, but is completed after the
# pointer type to it has been created.
- (Complete, "T{<l:a:}", None, Complete),
+ (Complete, "T{<l:a:}", (), Complete),
# Unfortunately the pointer format string is not fixed...
- (POINTER(Complete), "&B", None, POINTER(Complete)),
+ (POINTER(Complete), "&B", (), POINTER(Complete)),
## other
# function signatures are not implemented
- (CFUNCTYPE(None), "X{}", None, CFUNCTYPE(None)),
+ (CFUNCTYPE(None), "X{}", (), CFUNCTYPE(None)),
]
@@ -186,10 +190,10 @@ class LEPoint(LittleEndianStructure):
# and little endian machines.
#
endian_types = [
- (BEPoint, "T{>l:x:>l:y:}", None, BEPoint),
- (LEPoint, "T{<l:x:<l:y:}", None, LEPoint),
- (POINTER(BEPoint), "&T{>l:x:>l:y:}", None, POINTER(BEPoint)),
- (POINTER(LEPoint), "&T{<l:x:<l:y:}", None, POINTER(LEPoint)),
+ (BEPoint, "T{>l:x:>l:y:}", (), BEPoint),
+ (LEPoint, "T{<l:x:<l:y:}", (), LEPoint),
+ (POINTER(BEPoint), "&T{>l:x:>l:y:}", (), POINTER(BEPoint)),
+ (POINTER(LEPoint), "&T{<l:x:<l:y:}", (), POINTER(LEPoint)),
]
if __name__ == "__main__":
diff --git a/Lib/ctypes/test/test_python_api.py b/Lib/ctypes/test/test_python_api.py
index 1f4c603..9de3980 100644
--- a/Lib/ctypes/test/test_python_api.py
+++ b/Lib/ctypes/test/test_python_api.py
@@ -1,5 +1,6 @@
from ctypes import *
import unittest, sys
+from test import support
from ctypes.test import is_resource_enabled
################################################################
@@ -25,6 +26,7 @@ class PythonAPITestCase(unittest.TestCase):
self.assertEqual(PyBytes_FromStringAndSize(b"abcdefghi", 3), b"abc")
+ @support.refcount_test
def test_PyString_FromString(self):
pythonapi.PyBytes_FromString.restype = py_object
pythonapi.PyBytes_FromString.argtypes = (c_char_p,)
@@ -56,6 +58,7 @@ class PythonAPITestCase(unittest.TestCase):
del res
self.assertEqual(grc(42), ref42)
+ @support.refcount_test
def test_PyObj_FromPtr(self):
s = "abc def ghi jkl"
ref = grc(s)
diff --git a/Lib/ctypes/test/test_refcounts.py b/Lib/ctypes/test/test_refcounts.py
index 35a81aa..5613e7a 100644
--- a/Lib/ctypes/test/test_refcounts.py
+++ b/Lib/ctypes/test/test_refcounts.py
@@ -1,4 +1,5 @@
import unittest
+from test import support
import ctypes
import gc
@@ -10,6 +11,7 @@ dll = ctypes.CDLL(_ctypes_test.__file__)
class RefcountTestCase(unittest.TestCase):
+ @support.refcount_test
def test_1(self):
from sys import getrefcount as grc
@@ -34,6 +36,7 @@ class RefcountTestCase(unittest.TestCase):
self.assertEqual(grc(callback), 2)
+ @support.refcount_test
def test_refcount(self):
from sys import getrefcount as grc
def func(*args):
diff --git a/Lib/ctypes/test/test_stringptr.py b/Lib/ctypes/test/test_stringptr.py
index 3d25fa5..95cd161 100644
--- a/Lib/ctypes/test/test_stringptr.py
+++ b/Lib/ctypes/test/test_stringptr.py
@@ -1,4 +1,5 @@
import unittest
+from test import support
from ctypes import *
import _ctypes_test
@@ -7,6 +8,7 @@ lib = CDLL(_ctypes_test.__file__)
class StringPtrTestCase(unittest.TestCase):
+ @support.refcount_test
def test__POINTER_c_char(self):
class X(Structure):
_fields_ = [("str", POINTER(c_char))]
diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py
index 1bb7d1d..97d0c2f 100644
--- a/Lib/ctypes/util.py
+++ b/Lib/ctypes/util.py
@@ -1,5 +1,6 @@
import sys, os
import contextlib
+import subprocess
# find_library(name) returns the pathname of a library, or None.
if os.name == "nt":
@@ -136,16 +137,12 @@ elif os.name == "posix":
rv = f.close()
if rv == 10:
raise OSError('objdump command not found')
- with contextlib.closing(os.popen(cmd)) as f:
- data = f.read()
- res = re.search(r'\sSONAME\s+([^\s]+)', data)
+ res = re.search(r'\sSONAME\s+([^\s]+)', dump)
if not res:
return None
return res.group(1)
- if (sys.platform.startswith("freebsd")
- or sys.platform.startswith("openbsd")
- or sys.platform.startswith("dragonfly")):
+ if sys.platform.startswith(("freebsd", "openbsd", "dragonfly")):
def _num_version(libname):
# "libxyz.so.MAJOR.MINOR" => [ MAJOR, MINOR ]
@@ -187,13 +184,19 @@ elif os.name == "posix":
abi_type = mach_map.get(machine, 'libc6')
# XXX assuming GLIBC's ldconfig (with option -p)
- expr = r'\s+(lib%s\.[^\s]+)\s+\(%s' % (re.escape(name), abi_type)
- with contextlib.closing(os.popen('LC_ALL=C LANG=C /sbin/ldconfig -p 2>/dev/null')) as f:
- data = f.read()
- res = re.search(expr, data)
- if not res:
- return None
- return res.group(1)
+ regex = os.fsencode(
+ '\s+(lib%s\.[^\s]+)\s+\(%s' % (re.escape(name), abi_type))
+ try:
+ with subprocess.Popen(['/sbin/ldconfig', '-p'],
+ stdin=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ stdout=subprocess.PIPE,
+ env={'LC_ALL': 'C', 'LANG': 'C'}) as p:
+ res = re.search(regex, p.stdout.read())
+ if res:
+ return os.fsdecode(res.group(1))
+ except OSError:
+ pass
def find_library(name):
return _findSoname_ldconfig(name) or _get_soname(_findLib_gcc(name))
diff --git a/Lib/curses/__init__.py b/Lib/curses/__init__.py
index a3e9def..7bfa667 100644
--- a/Lib/curses/__init__.py
+++ b/Lib/curses/__init__.py
@@ -11,7 +11,6 @@ the package, and perhaps a particular module inside it.
"""
from _curses import *
-from curses.wrapper import wrapper
import os as _os
import sys as _sys
@@ -55,3 +54,48 @@ try:
has_key
except NameError:
from .has_key import has_key
+
+# Wrapper for the entire curses-based application. Runs a function which
+# should be the rest of your curses-based application. If the application
+# raises an exception, wrapper() will restore the terminal to a sane state so
+# you can read the resulting traceback.
+
+def wrapper(func, *args, **kwds):
+ """Wrapper function that initializes curses and calls another function,
+ restoring normal keyboard/screen behavior on error.
+ The callable object 'func' is then passed the main window 'stdscr'
+ as its first argument, followed by any other arguments passed to
+ wrapper().
+ """
+
+ try:
+ # Initialize curses
+ stdscr = initscr()
+
+ # Turn off echoing of keys, and enter cbreak mode,
+ # where no buffering is performed on keyboard input
+ noecho()
+ cbreak()
+
+ # In keypad mode, escape sequences for special keys
+ # (like the cursor keys) will be interpreted and
+ # a special value like curses.KEY_LEFT will be returned
+ stdscr.keypad(1)
+
+ # Start color, too. Harmless if the terminal doesn't have
+ # color; user can test with has_color() later on. The try/catch
+ # works around a minor bit of over-conscientiousness in the curses
+ # module -- the error return from C start_color() is ignorable.
+ try:
+ start_color()
+ except:
+ pass
+
+ return func(stdscr, *args, **kwds)
+ finally:
+ # Set everything back to normal
+ if 'stdscr' in locals():
+ stdscr.keypad(0)
+ echo()
+ nocbreak()
+ endwin()
diff --git a/Lib/curses/wrapper.py b/Lib/curses/wrapper.py
deleted file mode 100644
index 5183ce7..0000000
--- a/Lib/curses/wrapper.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""curses.wrapper
-
-Contains one function, wrapper(), which runs another function which
-should be the rest of your curses-based application. If the
-application raises an exception, wrapper() will restore the terminal
-to a sane state so you can read the resulting traceback.
-
-"""
-
-import curses
-
-def wrapper(func, *args, **kwds):
- """Wrapper function that initializes curses and calls another function,
- restoring normal keyboard/screen behavior on error.
- The callable object 'func' is then passed the main window 'stdscr'
- as its first argument, followed by any other arguments passed to
- wrapper().
- """
-
- try:
- # Initialize curses
- stdscr = curses.initscr()
-
- # Turn off echoing of keys, and enter cbreak mode,
- # where no buffering is performed on keyboard input
- curses.noecho()
- curses.cbreak()
-
- # In keypad mode, escape sequences for special keys
- # (like the cursor keys) will be interpreted and
- # a special value like curses.KEY_LEFT will be returned
- stdscr.keypad(1)
-
- # Start color, too. Harmless if the terminal doesn't have
- # color; user can test with has_color() later on. The try/catch
- # works around a minor bit of over-conscientiousness in the curses
- # module -- the error return from C start_color() is ignorable.
- try:
- curses.start_color()
- except:
- pass
-
- return func(stdscr, *args, **kwds)
- finally:
- # Set everything back to normal
- if 'stdscr' in locals():
- stdscr.keypad(0)
- curses.echo()
- curses.nocbreak()
- curses.endwin()
diff --git a/Lib/datetime.py b/Lib/datetime.py
index 65f95d2..59f3c68 100644
--- a/Lib/datetime.py
+++ b/Lib/datetime.py
@@ -172,10 +172,6 @@ def _format_time(hh, mm, ss, us):
# Correctly substitute for %z and %Z escapes in strftime formats.
def _wrap_strftime(object, format, timetuple):
- year = timetuple[0]
- if year < 1000:
- raise ValueError("year=%d is before 1000; the datetime strftime() "
- "methods require year >= 1000" % year)
# Don't call utcoffset() or tzname() unless actually needed.
freplace = None # the string to use for %f
zreplace = None # the string to use for %z
@@ -1364,7 +1360,7 @@ class datetime(date):
converter = _time.localtime if tz is None else _time.gmtime
t, frac = divmod(t, 1.0)
- us = round(frac * 1e6)
+ us = int(frac * 1e6)
# If timestamp is less than one microsecond smaller than a
# full second, us can be rounded up to 1000000. In this case,
@@ -1384,7 +1380,7 @@ class datetime(date):
def utcfromtimestamp(cls, t):
"Construct a UTC datetime from a POSIX timestamp (like time.time())."
t, frac = divmod(t, 1.0)
- us = round(frac * 1e6)
+ us = int(frac * 1e6)
# If timestamp is less than one microsecond smaller than a
# full second, us can be rounded up to 1000000. In this case,
diff --git a/Lib/decimal.py b/Lib/decimal.py
index f5277c5..e946182 100644
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -1871,6 +1871,7 @@ class Decimal(object):
"""
other = _convert_other(other, raiseit=True)
+ third = _convert_other(third, raiseit=True)
# compute product; raise InvalidOperation if either operand is
# a signaling NaN or if the product is zero times infinity.
@@ -1900,7 +1901,6 @@ class Decimal(object):
str(int(self._int) * int(other._int)),
self._exp + other._exp)
- third = _convert_other(third, raiseit=True)
return product.__add__(third, context)
def _power_modulo(self, other, modulo, context=None):
@@ -2001,9 +2001,9 @@ class Decimal(object):
nonzero. For efficiency, other._exp should not be too large,
so that 10**abs(other._exp) is a feasible calculation."""
- # In the comments below, we write x for the value of self and
- # y for the value of other. Write x = xc*10**xe and y =
- # yc*10**ye.
+ # In the comments below, we write x for the value of self and y for the
+ # value of other. Write x = xc*10**xe and abs(y) = yc*10**ye, with xc
+ # and yc positive integers not divisible by 10.
# The main purpose of this method is to identify the *failure*
# of x**y to be exactly representable with as little effort as
@@ -2011,13 +2011,12 @@ class Decimal(object):
# eliminate the possibility of x**y being exact. Only if all
# these tests are passed do we go on to actually compute x**y.
- # Here's the main idea. First normalize both x and y. We
- # express y as a rational m/n, with m and n relatively prime
- # and n>0. Then for x**y to be exactly representable (at
- # *any* precision), xc must be the nth power of a positive
- # integer and xe must be divisible by n. If m is negative
- # then additionally xc must be a power of either 2 or 5, hence
- # a power of 2**n or 5**n.
+ # Here's the main idea. Express y as a rational number m/n, with m and
+ # n relatively prime and n>0. Then for x**y to be exactly
+ # representable (at *any* precision), xc must be the nth power of a
+ # positive integer and xe must be divisible by n. If y is negative
+ # then additionally xc must be a power of either 2 or 5, hence a power
+ # of 2**n or 5**n.
#
# There's a limit to how small |y| can be: if y=m/n as above
# then:
@@ -2089,21 +2088,43 @@ class Decimal(object):
return None
# now xc is a power of 2; e is its exponent
e = _nbits(xc)-1
- # find e*y and xe*y; both must be integers
- if ye >= 0:
- y_as_int = yc*10**ye
- e = e*y_as_int
- xe = xe*y_as_int
- else:
- ten_pow = 10**-ye
- e, remainder = divmod(e*yc, ten_pow)
- if remainder:
- return None
- xe, remainder = divmod(xe*yc, ten_pow)
- if remainder:
- return None
-
- if e*65 >= p*93: # 93/65 > log(10)/log(5)
+
+ # We now have:
+ #
+ # x = 2**e * 10**xe, e > 0, and y < 0.
+ #
+ # The exact result is:
+ #
+ # x**y = 5**(-e*y) * 10**(e*y + xe*y)
+ #
+ # provided that both e*y and xe*y are integers. Note that if
+ # 5**(-e*y) >= 10**p, then the result can't be expressed
+ # exactly with p digits of precision.
+ #
+ # Using the above, we can guard against large values of ye.
+ # 93/65 is an upper bound for log(10)/log(5), so if
+ #
+ # ye >= len(str(93*p//65))
+ #
+ # then
+ #
+ # -e*y >= -y >= 10**ye > 93*p/65 > p*log(10)/log(5),
+ #
+ # so 5**(-e*y) >= 10**p, and the coefficient of the result
+ # can't be expressed in p digits.
+
+ # emax >= largest e such that 5**e < 10**p.
+ emax = p*93//65
+ if ye >= len(str(emax)):
+ return None
+
+ # Find -e*y and -xe*y; both must be integers
+ e = _decimal_lshift_exact(e * yc, ye)
+ xe = _decimal_lshift_exact(xe * yc, ye)
+ if e is None or xe is None:
+ return None
+
+ if e > emax:
return None
xc = 5**e
@@ -2117,19 +2138,20 @@ class Decimal(object):
while xc % 5 == 0:
xc //= 5
e -= 1
- if ye >= 0:
- y_as_integer = yc*10**ye
- e = e*y_as_integer
- xe = xe*y_as_integer
- else:
- ten_pow = 10**-ye
- e, remainder = divmod(e*yc, ten_pow)
- if remainder:
- return None
- xe, remainder = divmod(xe*yc, ten_pow)
- if remainder:
- return None
- if e*3 >= p*10: # 10/3 > log(10)/log(2)
+
+ # Guard against large values of ye, using the same logic as in
+ # the 'xc is a power of 2' branch. 10/3 is an upper bound for
+ # log(10)/log(2).
+ emax = p*10//3
+ if ye >= len(str(emax)):
+ return None
+
+ e = _decimal_lshift_exact(e * yc, ye)
+ xe = _decimal_lshift_exact(xe * yc, ye)
+ if e is None or xe is None:
+ return None
+
+ if e > emax:
return None
xc = 2**e
else:
@@ -3881,28 +3903,6 @@ class Context(object):
return nc
__copy__ = copy
- # _clamp is provided for backwards compatibility with third-party
- # code. May be removed in Python >= 3.3.
- def _get_clamp(self):
- "_clamp mirrors the clamp attribute. Its use is deprecated."
- import warnings
- warnings.warn('Use of the _clamp attribute is deprecated. '
- 'Please use clamp instead.',
- DeprecationWarning)
- return self.clamp
-
- def _set_clamp(self, clamp):
- "_clamp mirrors the clamp attribute. Its use is deprecated."
- import warnings
- warnings.warn('Use of the _clamp attribute is deprecated. '
- 'Please use clamp instead.',
- DeprecationWarning)
- self.clamp = clamp
-
- # don't bother with _del_clamp; no sane 3rd party code should
- # be deleting the _clamp attribute
- _clamp = property(_get_clamp, _set_clamp)
-
def _raise_error(self, condition, explanation = None, *args):
"""Handles an error
@@ -5529,6 +5529,27 @@ def _normalize(op1, op2, prec = 0):
_nbits = int.bit_length
+def _decimal_lshift_exact(n, e):
+ """ Given integers n and e, return n * 10**e if it's an integer, else None.
+
+ The computation is designed to avoid computing large powers of 10
+ unnecessarily.
+
+ >>> _decimal_lshift_exact(3, 4)
+ 30000
+ >>> _decimal_lshift_exact(300, -999999999) # returns None
+
+ """
+ if n == 0:
+ return 0
+ elif e >= 0:
+ return n * 10**e
+ else:
+ # val_n = largest power of 10 dividing n.
+ str_n = str(abs(n))
+ val_n = len(str_n) - len(str_n.rstrip('0'))
+ return None if val_n < -e else n // 10**-e
+
def _sqrt_nearest(n, a):
"""Closest integer to the square root of the positive integer n. a is
an initial approximation to the square root. Any positive integer
diff --git a/Lib/difflib.py b/Lib/difflib.py
index e6cc6ee..ae377d7 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -204,7 +204,7 @@ class SequenceMatcher:
# returning true iff the element is "junk" -- this has
# subtle but helpful effects on the algorithm, which I'll
# get around to writing up someday <0.9 wink>.
- # DON'T USE! Only __chain_b uses this. Use isbjunk.
+ # DON'T USE! Only __chain_b uses this. Use "in self.bjunk".
# bjunk
# the items in b for which isjunk is True.
# bpopular
@@ -287,7 +287,6 @@ class SequenceMatcher:
# when self.isjunk is defined, junk elements don't show up in this
# map at all, which stops the central find_longest_match method
# from starting any matching block at a junk element ...
- # also creates the fast isbjunk function ...
# b2j also does not contain entries for "popular" elements, meaning
# elements that account for more than 1 + 1% of the total elements, and
# when the sequence is reasonably large (>= 200 elements); this can
@@ -800,7 +799,7 @@ class Differ:
... 2. Explicit is better than implicit.
... 3. Simple is better than complex.
... 4. Complex is better than complicated.
- ... '''.splitlines(1)
+ ... '''.splitlines(keepends=True)
>>> len(text1)
4
>>> text1[0][-1]
@@ -809,7 +808,7 @@ class Differ:
... 3. Simple is better than complex.
... 4. Complicated is better than complex.
... 5. Flat is better than nested.
- ... '''.splitlines(1)
+ ... '''.splitlines(keepends=True)
Next we instantiate a Differ object:
@@ -896,8 +895,8 @@ class Differ:
Example:
- >>> print(''.join(Differ().compare('one\ntwo\nthree\n'.splitlines(1),
- ... 'ore\ntree\nemu\n'.splitlines(1))),
+ >>> print(''.join(Differ().compare('one\ntwo\nthree\n'.splitlines(True),
+ ... 'ore\ntree\nemu\n'.splitlines(True))),
... end="")
- one
? ^
@@ -1269,8 +1268,8 @@ def context_diff(a, b, fromfile='', tofile='',
Example:
- >>> print(''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(1),
- ... 'zero\none\ntree\nfour\n'.splitlines(1), 'Original', 'Current')),
+ >>> print(''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(True),
+ ... 'zero\none\ntree\nfour\n'.splitlines(True), 'Original', 'Current')),
... end="")
*** Original
--- Current
@@ -1339,8 +1338,8 @@ def ndiff(a, b, linejunk=None, charjunk=IS_CHARACTER_JUNK):
Example:
- >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(1),
- ... 'ore\ntree\nemu\n'.splitlines(1))
+ >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(keepends=True),
+ ... 'ore\ntree\nemu\n'.splitlines(keepends=True))
>>> print(''.join(diff), end="")
- one
? ^
@@ -2034,8 +2033,8 @@ def restore(delta, which):
Examples:
- >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(1),
- ... 'ore\ntree\nemu\n'.splitlines(1))
+ >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(keepends=True),
+ ... 'ore\ntree\nemu\n'.splitlines(keepends=True))
>>> diff = list(diff)
>>> print(''.join(restore(diff, 1)), end="")
one
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index 3697505..8b4261a 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -13,5 +13,5 @@ used from a setup script as
# Updated automatically by the Python release process.
#
#--start constants--
-__version__ = "3.2.3rc1"
+__version__ = "3.3.0a1"
#--end constants--
diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py
index e3ed3ad..959a8bf 100644
--- a/Lib/distutils/command/bdist_wininst.py
+++ b/Lib/distutils/command/bdist_wininst.py
@@ -265,11 +265,11 @@ class bdist_wininst(Command):
cfgdata = cfgdata + b"\0"
if self.pre_install_script:
# We need to normalize newlines, so we open in text mode and
- # convert back to bytes. "latin1" simply avoids any possible
+ # convert back to bytes. "latin-1" simply avoids any possible
# failures.
with open(self.pre_install_script, "r",
- encoding="latin1") as script:
- script_data = script.read().encode("latin1")
+ encoding="latin-1") as script:
+ script_data = script.read().encode("latin-1")
cfgdata = cfgdata + script_data + b"\n\0"
else:
# empty pre-install script
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index 34b61bd..59d0cd2 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -239,8 +239,7 @@ class build_ext(Command):
# for extensions under Linux or Solaris with a shared Python library,
# Python's library directory must be appended to library_dirs
sysconfig.get_config_var('Py_ENABLE_SHARED')
- if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
- or sys.platform.startswith('sunos'))
+ if (sys.platform.startswith(('linux', 'gnu', 'sunos'))
and sysconfig.get_config_var('Py_ENABLE_SHARED')):
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
# building third party extensions
diff --git a/Lib/distutils/command/build_scripts.py b/Lib/distutils/command/build_scripts.py
index ec43477..4b5b22e 100644
--- a/Lib/distutils/command/build_scripts.py
+++ b/Lib/distutils/command/build_scripts.py
@@ -126,10 +126,9 @@ class build_scripts(Command):
"The shebang ({!r}) is not decodable "
"from the script encoding ({})"
.format(shebang, encoding))
- outf = open(outfile, "wb")
- outf.write(shebang)
- outf.writelines(f.readlines())
- outf.close()
+ with open(outfile, "wb") as outf:
+ outf.write(shebang)
+ outf.writelines(f.readlines())
if f:
f.close()
else:
diff --git a/Lib/distutils/tests/test_archive_util.py b/Lib/distutils/tests/test_archive_util.py
index 8edfab4..1afdd46 100644
--- a/Lib/distutils/tests/test_archive_util.py
+++ b/Lib/distutils/tests/test_archive_util.py
@@ -1,6 +1,8 @@
+# -*- coding: utf-8 -*-
"""Tests for distutils.archive_util."""
import unittest
import os
+import sys
import tarfile
from os.path import splitdrive
import warnings
@@ -25,6 +27,18 @@ try:
except ImportError:
ZLIB_SUPPORT = False
+def can_fs_encode(filename):
+ """
+ Return True if the filename can be saved in the file system.
+ """
+ if os.path.supports_unicode_filenames:
+ return True
+ try:
+ filename.encode(sys.getfilesystemencoding())
+ except UnicodeEncodeError:
+ return False
+ return True
+
class ArchiveUtilTestCase(support.TempdirManager,
support.LoggingSilencer,
@@ -32,6 +46,28 @@ class ArchiveUtilTestCase(support.TempdirManager,
@unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
def test_make_tarball(self):
+ self._make_tarball('archive')
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ @unittest.skipUnless(can_fs_encode('årchiv'),
+ 'File system cannot handle this filename')
+ def test_make_tarball_latin1(self):
+ """
+ Mirror test_make_tarball, except filename contains latin characters.
+ """
+ self._make_tarball('årchiv') # note this isn't a real word
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ @unittest.skipUnless(can_fs_encode('のアーカイブ'),
+ 'File system cannot handle this filename')
+ def test_make_tarball_extended(self):
+ """
+ Mirror test_make_tarball, except filename contains extended
+ characters outside the latin charset.
+ """
+ self._make_tarball('のアーカイブ') # japanese for archive
+
+ def _make_tarball(self, target_name):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
@@ -43,7 +79,7 @@ class ArchiveUtilTestCase(support.TempdirManager,
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
"Source and target should be on same drive")
- base_name = os.path.join(tmpdir2, 'archive')
+ base_name = os.path.join(tmpdir2, target_name)
# working with relative paths to avoid tar warnings
old_dir = os.getcwd()
@@ -58,7 +94,7 @@ class ArchiveUtilTestCase(support.TempdirManager,
self.assertTrue(os.path.exists(tarball))
# trying an uncompressed one
- base_name = os.path.join(tmpdir2, 'archive')
+ base_name = os.path.join(tmpdir2, target_name)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
diff --git a/Lib/distutils/tests/test_bdist_rpm.py b/Lib/distutils/tests/test_bdist_rpm.py
index ab7a1bf..b090b79 100644
--- a/Lib/distutils/tests/test_bdist_rpm.py
+++ b/Lib/distutils/tests/test_bdist_rpm.py
@@ -28,6 +28,11 @@ class BuildRpmTestCase(support.TempdirManager,
unittest.TestCase):
def setUp(self):
+ try:
+ sys.executable.encode("UTF-8")
+ except UnicodeEncodeError:
+ raise unittest.SkipTest("sys.executable is not encodable to UTF-8")
+
super(BuildRpmTestCase, self).setUp()
self.old_location = os.getcwd()
self.old_sys_argv = sys.argv, sys.argv[:]
@@ -42,7 +47,7 @@ class BuildRpmTestCase(support.TempdirManager,
# XXX I am unable yet to make this test work without
# spurious sdtout/stderr output under Mac OS X
- if sys.platform != 'linux2':
+ if not sys.platform.startswith('linux'):
return
# this test will run only if the rpm commands are found
@@ -86,7 +91,7 @@ class BuildRpmTestCase(support.TempdirManager,
# XXX I am unable yet to make this test work without
# spurious sdtout/stderr output under Mac OS X
- if sys.platform != 'linux2':
+ if not sys.platform.startswith('linux'):
return
# http://bugs.python.org/issue1533164
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 234733e..aba98dc 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -458,7 +458,6 @@ class Example:
return hash((self.source, self.want, self.lineno, self.indent,
self.exc_msg))
-
class DocTest:
"""
A collection of doctest examples that should be run in a single
@@ -1367,7 +1366,7 @@ class DocTestRunner:
m = self.__LINECACHE_FILENAME_RE.match(filename)
if m and m.group('name') == self.test.name:
example = self.test.examples[int(m.group('examplenum'))]
- return example.source.splitlines(True)
+ return example.source.splitlines(keepends=True)
else:
return self.save_linecache_getlines(filename, module_globals)
@@ -1413,6 +1412,7 @@ class DocTestRunner:
# Note that the interactive output will go to *our*
# save_stdout, even if that's not the real sys.stdout; this
# allows us to write test cases for the set_trace behavior.
+ save_trace = sys.gettrace()
save_set_trace = pdb.set_trace
self.debugger = _OutputRedirectingPdb(save_stdout)
self.debugger.reset()
@@ -1432,6 +1432,7 @@ class DocTestRunner:
finally:
sys.stdout = save_stdout
pdb.set_trace = save_set_trace
+ sys.settrace(save_trace)
linecache.getlines = self.save_linecache_getlines
sys.displayhook = save_displayhook
if clear_globs:
@@ -1628,8 +1629,8 @@ class OutputChecker:
# Check if we should use diff.
if self._do_a_fancy_diff(want, got, optionflags):
# Split want & got into lines.
- want_lines = want.splitlines(True) # True == keep line ends
- got_lines = got.splitlines(True)
+ want_lines = want.splitlines(keepends=True)
+ got_lines = got.splitlines(keepends=True)
# Use difflib to find their differences.
if optionflags & REPORT_UDIFF:
diff = difflib.unified_diff(want_lines, got_lines, n=2)
diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py
index a295757..0342469 100644
--- a/Lib/email/_parseaddr.py
+++ b/Lib/email/_parseaddr.py
@@ -47,6 +47,21 @@ def parsedate_tz(data):
Accounts for military timezones.
"""
+ res = _parsedate_tz(data)
+ if res[9] is None:
+ res[9] = 0
+ return tuple(res)
+
+def _parsedate_tz(data):
+ """Convert date to extended time tuple.
+
+ The last (additional) element is the time zone offset in seconds, except if
+ the timezone was specified as -0000. In that case the last element is
+ None. This indicates a UTC timestamp that explicitly declaims knowledge of
+ the source timezone, as opposed to a +0000 timestamp that indicates the
+ source timezone really was UTC.
+
+ """
data = data.split()
# The FWS after the comma after the day-of-week is optional, so search and
# adjust for this.
@@ -99,6 +114,14 @@ def parsedate_tz(data):
tss = '0'
elif len(tm) == 3:
[thh, tmm, tss] = tm
+ elif len(tm) == 1 and '.' in tm[0]:
+ # Some non-compliant MUAs use '.' to separate time elements.
+ tm = tm[0].split('.')
+ if len(tm) == 2:
+ [thh, tmm] = tm
+ tss = 0
+ elif len(tm) == 3:
+ [thh, tmm, tss] = tm
else:
return None
try:
@@ -130,6 +153,8 @@ def parsedate_tz(data):
tzoffset = int(tz)
except ValueError:
pass
+ if tzoffset==0 and tz.startswith('-'):
+ tzoffset = None
# Convert a timezone offset into seconds ; -0500 -> -18000
if tzoffset:
if tzoffset < 0:
@@ -139,7 +164,7 @@ def parsedate_tz(data):
tzsign = 1
tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
# Daylight Saving Time flag is set to -1, since DST is unknown.
- return yy, mm, dd, thh, tmm, tss, 0, 1, -1, tzoffset
+ return [yy, mm, dd, thh, tmm, tss, 0, 1, -1, tzoffset]
def parsedate(data):
diff --git a/Lib/email/errors.py b/Lib/email/errors.py
index d52a624..c04deb4 100644
--- a/Lib/email/errors.py
+++ b/Lib/email/errors.py
@@ -32,7 +32,7 @@ class CharsetError(MessageError):
# These are parsing defects which the parser was able to work around.
-class MessageDefect:
+class MessageDefect(Exception):
"""Base class for a message defect."""
def __init__(self, line=None):
@@ -55,3 +55,6 @@ class MalformedHeaderDefect(MessageDefect):
class MultipartInvariantViolationDefect(MessageDefect):
"""A message claimed to be a multipart but no subparts were found."""
+
+class InvalidMultipartContentTransferEncodingDefect(MessageDefect):
+ """An invalid content transfer encoding was set on the multipart itself."""
diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py
index 60a8325..e754d89 100644
--- a/Lib/email/feedparser.py
+++ b/Lib/email/feedparser.py
@@ -25,6 +25,7 @@ import re
from email import errors
from email import message
+from email import policy
NLCRE = re.compile('\r\n|\r|\n')
NLCRE_bol = re.compile('(\r\n|\r|\n)')
@@ -120,9 +121,6 @@ class BufferedSubFile(object):
# Reverse and insert at the front of the lines.
self._lines[:0] = lines[::-1]
- def is_closed(self):
- return self._closed
-
def __iter__(self):
return self
@@ -137,9 +135,16 @@ class BufferedSubFile(object):
class FeedParser:
"""A feed-style parser of email."""
- def __init__(self, _factory=message.Message):
- """_factory is called with no arguments to create a new message obj"""
+ def __init__(self, _factory=message.Message, *, policy=policy.default):
+ """_factory is called with no arguments to create a new message obj
+
+ The policy keyword specifies a policy object that controls a number of
+ aspects of the parser's operation. The default policy maintains
+ backward compatibility.
+
+ """
self._factory = _factory
+ self.policy = policy
self._input = BufferedSubFile()
self._msgstack = []
self._parse = self._parsegen().__next__
@@ -171,7 +176,8 @@ class FeedParser:
# Look for final set of defects
if root.get_content_maintype() == 'multipart' \
and not root.is_multipart():
- root.defects.append(errors.MultipartInvariantViolationDefect())
+ defect = errors.MultipartInvariantViolationDefect()
+ self.policy.handle_defect(root, defect)
return root
def _new_message(self):
@@ -284,7 +290,8 @@ class FeedParser:
# defined a boundary. That's a problem which we'll handle by
# reading everything until the EOF and marking the message as
# defective.
- self._cur.defects.append(errors.NoBoundaryInMultipartDefect())
+ defect = errors.NoBoundaryInMultipartDefect()
+ self.policy.handle_defect(self._cur, defect)
lines = []
for line in self._input:
if line is NeedMoreData:
@@ -293,6 +300,11 @@ class FeedParser:
lines.append(line)
self._cur.set_payload(EMPTYSTRING.join(lines))
return
+ # Make sure a valid content type was specified per RFC 2045:6.4.
+ if (self._cur.get('content-transfer-encoding', '8bit').lower()
+ not in ('7bit', '8bit', 'binary')):
+ defect = errors.InvalidMultipartContentTransferEncodingDefect()
+ self.policy.handle_defect(self._cur, defect)
# Create a line match predicate which matches the inter-part
# boundary as well as the end-of-multipart boundary. Don't push
# this onto the input stream until we've scanned past the
@@ -388,7 +400,8 @@ class FeedParser:
# that as a defect and store the captured text as the payload.
# Everything from here to the EOF is epilogue.
if capturing_preamble:
- self._cur.defects.append(errors.StartBoundaryNotFoundDefect())
+ defect = errors.StartBoundaryNotFoundDefect()
+ self.policy.handle_defect(self._cur, defect)
self._cur.set_payload(EMPTYSTRING.join(preamble))
epilogue = []
for line in self._input:
@@ -440,7 +453,7 @@ class FeedParser:
# is illegal, so let's note the defect, store the illegal
# line, and ignore it for purposes of headers.
defect = errors.FirstHeaderLineIsContinuationDefect(line)
- self._cur.defects.append(defect)
+ self.policy.handle_defect(self._cur, defect)
continue
lastvalue.append(line)
continue
diff --git a/Lib/email/generator.py b/Lib/email/generator.py
index 430ee73..edba13f 100644
--- a/Lib/email/generator.py
+++ b/Lib/email/generator.py
@@ -13,8 +13,10 @@ import random
import warnings
from io import StringIO, BytesIO
+from email import policy
from email.header import Header
from email.message import _has_surrogates
+import email.charset as _charset
UNDERSCORE = '_'
NL = '\n' # XXX: no longer used by the code below.
@@ -33,7 +35,8 @@ class Generator:
# Public interface
#
- def __init__(self, outfp, mangle_from_=True, maxheaderlen=78):
+ def __init__(self, outfp, mangle_from_=True, maxheaderlen=None, *,
+ policy=policy.default):
"""Create the generator for message flattening.
outfp is the output file-like object for writing the message to. It
@@ -49,16 +52,23 @@ class Generator:
defined in the Header class. Set maxheaderlen to zero to disable
header wrapping. The default is 78, as recommended (but not required)
by RFC 2822.
+
+ The policy keyword specifies a policy object that controls a number of
+ aspects of the generator's operation. The default policy maintains
+ backward compatibility.
+
"""
self._fp = outfp
self._mangle_from_ = mangle_from_
- self._maxheaderlen = maxheaderlen
+ self._maxheaderlen = (maxheaderlen if maxheaderlen is not None else
+ policy.max_line_length)
+ self.policy = policy
def write(self, s):
# Just delegate to the file object
self._fp.write(s)
- def flatten(self, msg, unixfrom=False, linesep='\n'):
+ def flatten(self, msg, unixfrom=False, linesep=None):
r"""Print the message object tree rooted at msg to the output file
specified when the Generator instance was created.
@@ -70,17 +80,15 @@ class Generator:
Note that for subobjects, no From_ line is printed.
linesep specifies the characters used to indicate a new line in
- the output. The default value is the most useful for typical
- Python applications, but it can be set to \r\n to produce RFC-compliant
- line separators when needed.
+ the output. The default value is determined by the policy.
"""
# We use the _XXX constants for operating on data that comes directly
# from the msg, and _encoded_XXX constants for operating on data that
# has already been converted (to bytes in the BytesGenerator) and
# inserted into a temporary buffer.
- self._NL = linesep
- self._encoded_NL = self._encode(linesep)
+ self._NL = linesep if linesep is not None else self.policy.linesep
+ self._encoded_NL = self._encode(self._NL)
self._EMPTY = ''
self._encoded_EMTPY = self._encode('')
if unixfrom:
@@ -297,10 +305,12 @@ class Generator:
# message/rfc822. Such messages are generated by, for example,
# Groupwise when forwarding unadorned messages. (Issue 7970.) So
# in that case we just emit the string body.
- payload = msg.get_payload()
+ payload = msg._payload
if isinstance(payload, list):
g.flatten(msg.get_payload(0), unixfrom=False, linesep=self._NL)
payload = s.getvalue()
+ else:
+ payload = self._encode(payload)
self._fp.write(payload)
# This used to be a module level function; we use a classmethod for this
@@ -336,7 +346,10 @@ class BytesGenerator(Generator):
Functionally identical to the base Generator except that the output is
bytes and not string. When surrogates were used in the input to encode
- bytes, these are decoded back to bytes for output.
+ bytes, these are decoded back to bytes for output. If the policy has
+ must_be_7bit set true, then the message is transformed such that the
+ non-ASCII bytes are properly content transfer encoded, using the
+ charset unknown-8bit.
The outfp object must accept bytes in its write method.
"""
@@ -359,21 +372,25 @@ class BytesGenerator(Generator):
# strings with 8bit bytes.
for h, v in msg._headers:
self.write('%s: ' % h)
- if isinstance(v, Header):
- self.write(v.encode(maxlinelen=self._maxheaderlen)+self._NL)
- elif _has_surrogates(v):
- # If we have raw 8bit data in a byte string, we have no idea
- # what the encoding is. There is no safe way to split this
- # string. If it's ascii-subset, then we could do a normal
- # ascii split, but if it's multibyte then we could break the
- # string. There's no way to know so the least harm seems to
- # be to not split the string and risk it being too long.
- self.write(v+NL)
+ if isinstance(v, str):
+ if _has_surrogates(v):
+ if not self.policy.must_be_7bit:
+ # If we have raw 8bit data in a byte string, we have no idea
+ # what the encoding is. There is no safe way to split this
+ # string. If it's ascii-subset, then we could do a normal
+ # ascii split, but if it's multibyte then we could break the
+ # string. There's no way to know so the least harm seems to
+ # be to not split the string and risk it being too long.
+ self.write(v+NL)
+ continue
+ h = Header(v, charset=_charset.UNKNOWN8BIT, header_name=h)
+ else:
+ h = Header(v, header_name=h)
else:
- # Header's got lots of smarts and this string is safe...
- header = Header(v, maxlinelen=self._maxheaderlen,
- header_name=h)
- self.write(header.encode(linesep=self._NL)+self._NL)
+ # Assume it is a Header-like object.
+ h = v
+ self.write(h.encode(linesep=self._NL,
+ maxlinelen=self._maxheaderlen)+self._NL)
# A blank line always separates headers from body
self.write(self._NL)
@@ -382,7 +399,7 @@ class BytesGenerator(Generator):
# just write it back out.
if msg._payload is None:
return
- if _has_surrogates(msg._payload):
+ if _has_surrogates(msg._payload) and not self.policy.must_be_7bit:
self.write(msg._payload)
else:
super(BytesGenerator,self)._handle_text(msg)
diff --git a/Lib/email/parser.py b/Lib/email/parser.py
index 6caaff5..0f92160 100644
--- a/Lib/email/parser.py
+++ b/Lib/email/parser.py
@@ -4,18 +4,19 @@
"""A parser of RFC 2822 and MIME email messages."""
-__all__ = ['Parser', 'HeaderParser']
+__all__ = ['Parser', 'HeaderParser', 'BytesParser', 'BytesHeaderParser']
import warnings
from io import StringIO, TextIOWrapper
from email.feedparser import FeedParser
from email.message import Message
+from email import policy
class Parser:
- def __init__(self, *args, **kws):
+ def __init__(self, _class=Message, *, policy=policy.default):
"""Parser of RFC 2822 and MIME email messages.
Creates an in-memory object tree representing the email message, which
@@ -30,28 +31,14 @@ class Parser:
_class is the class to instantiate for new message objects when they
must be created. This class must have a constructor that can take
zero arguments. Default is Message.Message.
+
+ The policy keyword specifies a policy object that controls a number of
+ aspects of the parser's operation. The default policy maintains
+ backward compatibility.
+
"""
- if len(args) >= 1:
- if '_class' in kws:
- raise TypeError("Multiple values for keyword arg '_class'")
- kws['_class'] = args[0]
- if len(args) == 2:
- if 'strict' in kws:
- raise TypeError("Multiple values for keyword arg 'strict'")
- kws['strict'] = args[1]
- if len(args) > 2:
- raise TypeError('Too many arguments')
- if '_class' in kws:
- self._class = kws['_class']
- del kws['_class']
- else:
- self._class = Message
- if 'strict' in kws:
- warnings.warn("'strict' argument is deprecated (and ignored)",
- DeprecationWarning, 2)
- del kws['strict']
- if kws:
- raise TypeError('Unexpected keyword arguments')
+ self._class = _class
+ self.policy = policy
def parse(self, fp, headersonly=False):
"""Create a message structure from the data in a file.
@@ -61,7 +48,7 @@ class Parser:
parsing after reading the headers or not. The default is False,
meaning it parses the entire contents of the file.
"""
- feedparser = FeedParser(self._class)
+ feedparser = FeedParser(self._class, policy=self.policy)
if headersonly:
feedparser._set_headersonly()
while True:
@@ -134,3 +121,11 @@ class BytesParser:
"""
text = text.decode('ASCII', errors='surrogateescape')
return self.parser.parsestr(text, headersonly)
+
+
+class BytesHeaderParser(BytesParser):
+ def parse(self, fp, headersonly=True):
+ return BytesParser.parse(self, fp, headersonly=True)
+
+ def parsebytes(self, text, headersonly=True):
+ return BytesParser.parsebytes(self, text, headersonly=True)
diff --git a/Lib/email/policy.py b/Lib/email/policy.py
new file mode 100644
index 0000000..88877a2
--- /dev/null
+++ b/Lib/email/policy.py
@@ -0,0 +1,174 @@
+"""Policy framework for the email package.
+
+Allows fine grained feature control of how the package parses and emits data.
+"""
+
+__all__ = [
+ 'Policy',
+ 'default',
+ 'strict',
+ 'SMTP',
+ 'HTTP',
+ ]
+
+
+class _PolicyBase:
+
+ """Policy Object basic framework.
+
+ This class is useless unless subclassed. A subclass should define
+ class attributes with defaults for any values that are to be
+ managed by the Policy object. The constructor will then allow
+ non-default values to be set for these attributes at instance
+ creation time. The instance will be callable, taking these same
+ attributes keyword arguments, and returning a new instance
+ identical to the called instance except for those values changed
+ by the keyword arguments. Instances may be added, yielding new
+ instances with any non-default values from the right hand
+ operand overriding those in the left hand operand. That is,
+
+ A + B == A(<non-default values of B>)
+
+ The repr of an instance can be used to reconstruct the object
+ if and only if the repr of the values can be used to reconstruct
+ those values.
+
+ """
+
+ def __init__(self, **kw):
+ """Create new Policy, possibly overriding some defaults.
+
+ See class docstring for a list of overridable attributes.
+
+ """
+ for name, value in kw.items():
+ if hasattr(self, name):
+ super(_PolicyBase,self).__setattr__(name, value)
+ else:
+ raise TypeError(
+ "{!r} is an invalid keyword argument for {}".format(
+ name, self.__class__.__name__))
+
+ def __repr__(self):
+ args = [ "{}={!r}".format(name, value)
+ for name, value in self.__dict__.items() ]
+ return "{}({})".format(self.__class__.__name__, args if args else '')
+
+ def clone(self, **kw):
+ """Return a new instance with specified attributes changed.
+
+ The new instance has the same attribute values as the current object,
+ except for the changes passed in as keyword arguments.
+
+ """
+ for attr, value in self.__dict__.items():
+ if attr not in kw:
+ kw[attr] = value
+ return self.__class__(**kw)
+
+ def __setattr__(self, name, value):
+ if hasattr(self, name):
+ msg = "{!r} object attribute {!r} is read-only"
+ else:
+ msg = "{!r} object has no attribute {!r}"
+ raise AttributeError(msg.format(self.__class__.__name__, name))
+
+ def __add__(self, other):
+ """Non-default values from right operand override those from left.
+
+ The object returned is a new instance of the subclass.
+
+ """
+ return self.clone(**other.__dict__)
+
+
+class Policy(_PolicyBase):
+
+ """Controls for how messages are interpreted and formatted.
+
+ Most of the classes and many of the methods in the email package
+ accept Policy objects as parameters. A Policy object contains a set
+ of values and functions that control how input is interpreted and how
+ output is rendered. For example, the parameter 'raise_on_defect'
+ controls whether or not an RFC violation throws an error or not,
+ while 'max_line_length' controls the maximum length of output lines
+ when a Message is serialized.
+
+ Any valid attribute may be overridden when a Policy is created by
+ passing it as a keyword argument to the constructor. Policy
+ objects are immutable, but a new Policy object can be created
+ with only certain values changed by calling the Policy instance
+ with keyword arguments. Policy objects can also be added,
+ producing a new Policy object in which the non-default attributes
+ set in the right hand operand overwrite those specified in the
+ left operand.
+
+ Settable attributes:
+
+ raise_on_defect -- If true, then defects should be raised
+ as errors. Default False.
+
+ linesep -- string containing the value to use as
+ separation between output lines. Default '\n'.
+
+ must_be_7bit -- output must contain only 7bit clean data.
+ Default False.
+
+ max_line_length -- maximum length of lines, excluding 'linesep',
+ during serialization. None means no line
+ wrapping is done. Default is 78.
+
+ Methods:
+
+ register_defect(obj, defect)
+ defect is a Defect instance. The default implementation appends defect
+ to the objs 'defects' attribute.
+
+ handle_defect(obj, defect)
+ intended to be called by parser code that finds a defect. If
+ raise_on_defect is True, defect is raised as an error, otherwise
+ register_defect is called.
+
+ """
+
+ raise_on_defect = False
+ linesep = '\n'
+ must_be_7bit = False
+ max_line_length = 78
+
+ def handle_defect(self, obj, defect):
+ """Based on policy, either raise defect or call register_defect.
+
+ handle_defect(obj, defect)
+
+ defect should be a Defect subclass, but in any case must be an
+ Exception subclass. obj is the object on which the defect should be
+ registered if it is not raised. If the raise_on_defect is True, the
+ defect is raised as an error, otherwise the object and the defect are
+ passed to register_defect.
+
+ This class is intended to be called by parsers that discover defects,
+ and will not be called from code using the library unless that code is
+ implementing an alternate parser.
+
+ """
+ if self.raise_on_defect:
+ raise defect
+ self.register_defect(obj, defect)
+
+ def register_defect(self, obj, defect):
+ """Record 'defect' on 'obj'.
+
+ Called by handle_defect if raise_on_defect is False. This method is
+ part of the Policy API so that Policy subclasses can implement custom
+ defect handling. The default implementation calls the append method
+ of the defects attribute of obj.
+
+ """
+ obj.defects.append(defect)
+
+
+default = Policy()
+strict = default.clone(raise_on_defect=True)
+SMTP = default.clone(linesep='\r\n')
+HTTP = default.clone(linesep='\r\n', max_line_length=None)
diff --git a/Lib/email/utils.py b/Lib/email/utils.py
index ac4da37..138f05d 100644
--- a/Lib/email/utils.py
+++ b/Lib/email/utils.py
@@ -11,12 +11,14 @@ __all__ = [
'encode_rfc2231',
'formataddr',
'formatdate',
+ 'format_datetime',
'getaddresses',
'make_msgid',
'mktime_tz',
'parseaddr',
'parsedate',
'parsedate_tz',
+ 'parsedate_to_datetime',
'unquote',
]
@@ -26,6 +28,7 @@ import time
import base64
import random
import socket
+import datetime
import urllib.parse
import warnings
from io import StringIO
@@ -37,11 +40,13 @@ from email._parseaddr import mktime_tz
# We need wormarounds for bugs in these methods in older Pythons (see below)
from email._parseaddr import parsedate as _parsedate
from email._parseaddr import parsedate_tz as _parsedate_tz
+from email._parseaddr import _parsedate_tz as __parsedate_tz
from quopri import decodestring as _qdecode
# Intrapackage imports
from email.encoders import _bencode, _qencode
+from email.charset import Charset
COMMASPACE = ', '
EMPTYSTRING = ''
@@ -50,27 +55,42 @@ CRLF = '\r\n'
TICK = "'"
specialsre = re.compile(r'[][\\()<>@,:;".]')
-escapesre = re.compile(r'[][\\()"]')
+escapesre = re.compile(r'[\\"]')
# Helpers
-def formataddr(pair):
+def formataddr(pair, charset='utf-8'):
"""The inverse of parseaddr(), this takes a 2-tuple of the form
(realname, email_address) and returns the string value suitable
for an RFC 2822 From, To or Cc header.
If the first element of pair is false, then the second element is
returned unmodified.
+
+ Optional charset if given is the character set that is used to encode
+ realname in case realname is not ASCII safe. Can be an instance of str or
+ a Charset-like object which has a header_encode method. Default is
+ 'utf-8'.
"""
name, address = pair
+ # The address MUST (per RFC) be ascii, so throw a UnicodeError if it isn't.
+ address.encode('ascii')
if name:
- quotes = ''
- if specialsre.search(name):
- quotes = '"'
- name = escapesre.sub(r'\\\g<0>', name)
- return '%s%s%s <%s>' % (quotes, name, quotes, address)
+ try:
+ name.encode('ascii')
+ except UnicodeEncodeError:
+ if isinstance(charset, str):
+ charset = Charset(charset)
+ encoded_name = charset.header_encode(name)
+ return "%s <%s>" % (encoded_name, address)
+ else:
+ quotes = ''
+ if specialsre.search(name):
+ quotes = '"'
+ name = escapesre.sub(r'\\\g<0>', name)
+ return '%s%s%s <%s>' % (quotes, name, quotes, address)
return address
@@ -94,6 +114,14 @@ ecre = re.compile(r'''
''', re.VERBOSE | re.IGNORECASE)
+def _format_timetuple_and_zone(timetuple, zone):
+ return '%s, %02d %s %04d %02d:%02d:%02d %s' % (
+ ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timetuple[6]],
+ timetuple[2],
+ ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][timetuple[1] - 1],
+ timetuple[0], timetuple[3], timetuple[4], timetuple[5],
+ zone)
def formatdate(timeval=None, localtime=False, usegmt=False):
"""Returns a date string as specified by RFC 2822, e.g.:
@@ -138,14 +166,25 @@ def formatdate(timeval=None, localtime=False, usegmt=False):
zone = 'GMT'
else:
zone = '-0000'
- return '%s, %02d %s %04d %02d:%02d:%02d %s' % (
- ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][now[6]],
- now[2],
- ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
- 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][now[1] - 1],
- now[0], now[3], now[4], now[5],
- zone)
+ return _format_timetuple_and_zone(now, zone)
+
+def format_datetime(dt, usegmt=False):
+ """Turn a datetime into a date string as specified in RFC 2822.
+ If usegmt is True, dt must be an aware datetime with an offset of zero. In
+ this case 'GMT' will be rendered instead of the normal +0000 required by
+ RFC2822. This is to support HTTP headers involving date stamps.
+ """
+ now = dt.timetuple()
+ if usegmt:
+ if dt.tzinfo is None or dt.tzinfo != datetime.timezone.utc:
+ raise ValueError("usegmt option requires a UTC datetime")
+ zone = 'GMT'
+ elif dt.tzinfo is None:
+ zone = '-0000'
+ else:
+ zone = dt.strftime("%z")
+ return _format_timetuple_and_zone(now, zone)
def make_msgid(idstring=None, domain=None):
@@ -187,6 +226,15 @@ def parsedate_tz(data):
return None
return _parsedate_tz(data)
+def parsedate_to_datetime(data):
+ if not data:
+ return None
+ *dtuple, tz = __parsedate_tz(data)
+ if tz is None:
+ return datetime.datetime(*dtuple[:6])
+ return datetime.datetime(*dtuple[:6],
+ tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))
+
def parseaddr(addr):
addrs = _AddressList(addr).addresslist
diff --git a/Lib/encodings/cp65001.py b/Lib/encodings/cp65001.py
new file mode 100644
index 0000000..287eb87
--- /dev/null
+++ b/Lib/encodings/cp65001.py
@@ -0,0 +1,40 @@
+"""
+Code page 65001: Windows UTF-8 (CP_UTF8).
+"""
+
+import codecs
+import functools
+
+if not hasattr(codecs, 'code_page_encode'):
+ raise LookupError("cp65001 encoding is only available on Windows")
+
+### Codec APIs
+
+encode = functools.partial(codecs.code_page_encode, 65001)
+decode = functools.partial(codecs.code_page_decode, 65001)
+
+class IncrementalEncoder(codecs.IncrementalEncoder):
+ def encode(self, input, final=False):
+ return encode(input, self.errors)[0]
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ _buffer_decode = decode
+
+class StreamWriter(codecs.StreamWriter):
+ encode = encode
+
+class StreamReader(codecs.StreamReader):
+ decode = decode
+
+### encodings module API
+
+def getregentry():
+ return codecs.CodecInfo(
+ name='cp65001',
+ encode=encode,
+ decode=decode,
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamreader=StreamReader,
+ streamwriter=StreamWriter,
+ )
diff --git a/Lib/encodings/idna.py b/Lib/encodings/idna.py
index 583bdf1..ea40585 100644
--- a/Lib/encodings/idna.py
+++ b/Lib/encodings/idna.py
@@ -153,6 +153,20 @@ class Codec(codecs.Codec):
if not input:
return b'', 0
+ try:
+ result = input.encode('ascii')
+ except UnicodeEncodeError:
+ pass
+ else:
+ # ASCII name: fast path
+ labels = result.split(b'.')
+ for label in labels[:-1]:
+ if not (0 < len(label) < 64):
+ raise UnicodeError("label empty or too long")
+ if len(labels[-1]) >= 64:
+ raise UnicodeError("label too long")
+ return result, len(input)
+
result = bytearray()
labels = dots.split(input)
if labels and not labels[-1]:
@@ -179,6 +193,14 @@ class Codec(codecs.Codec):
if not isinstance(input, bytes):
# XXX obviously wrong, see #3232
input = bytes(input)
+
+ if ace_prefix not in input:
+ # Fast path
+ try:
+ return input.decode('ascii'), len(input)
+ except UnicodeDecodeError:
+ pass
+
labels = input.split(b".")
if labels and len(labels[-1]) == 0:
diff --git a/Lib/fileinput.py b/Lib/fileinput.py
index 554beb2..dbbbb21 100644
--- a/Lib/fileinput.py
+++ b/Lib/fileinput.py
@@ -398,9 +398,8 @@ def hook_compressed(filename, mode):
def hook_encoded(encoding):
- import codecs
def openhook(filename, mode):
- return codecs.open(filename, mode, encoding)
+ return open(filename, mode, encoding=encoding)
return openhook
diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py
index 726fbe5..f446769 100644
--- a/Lib/fnmatch.py
+++ b/Lib/fnmatch.py
@@ -35,9 +35,9 @@ def fnmatch(name, pat):
pat = os.path.normcase(pat)
return fnmatchcase(name, pat)
-@functools.lru_cache(maxsize=250)
-def _compile_pattern(pat, is_bytes=False):
- if is_bytes:
+@functools.lru_cache(maxsize=250, typed=True)
+def _compile_pattern(pat):
+ if isinstance(pat, bytes):
pat_str = str(pat, 'ISO-8859-1')
res_str = translate(pat_str)
res = bytes(res_str, 'ISO-8859-1')
@@ -49,7 +49,7 @@ def filter(names, pat):
"""Return the subset of the list NAMES that match PAT."""
result = []
pat = os.path.normcase(pat)
- match = _compile_pattern(pat, isinstance(pat, bytes))
+ match = _compile_pattern(pat)
if os.path is posixpath:
# normcase on posix is NOP. Optimize it away from the loop.
for name in names:
@@ -67,7 +67,7 @@ def fnmatchcase(name, pat):
This is a version of fnmatch() which doesn't case-normalize
its arguments.
"""
- match = _compile_pattern(pat, isinstance(pat, bytes))
+ match = _compile_pattern(pat)
return match(name) is not None
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
index 8e53023..18887a6 100644
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -100,14 +100,15 @@ class FTP:
file = None
welcome = None
passiveserver = 1
- encoding = "latin1"
+ encoding = "latin-1"
# Initialization method (called by class instantiation).
# Initialize host to localhost, port to standard ftp port
# Optional arguments are host (for connect()),
# and user, passwd, acct (for login())
def __init__(self, host='', user='', passwd='', acct='',
- timeout=_GLOBAL_DEFAULT_TIMEOUT):
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
+ self.source_address = source_address
self.timeout = timeout
if host:
self.connect(host)
@@ -128,10 +129,12 @@ class FTP:
if self.sock is not None:
self.close()
- def connect(self, host='', port=0, timeout=-999):
+ def connect(self, host='', port=0, timeout=-999, source_address=None):
'''Connect to host. Arguments are:
- host: hostname to connect to (string, default previous host)
- port: port to connect to (integer, default previous port)
+ - source_address: a 2-tuple (host, port) for the socket to bind
+ to as its source address before connecting.
'''
if host != '':
self.host = host
@@ -139,7 +142,10 @@ class FTP:
self.port = port
if timeout != -999:
self.timeout = timeout
- self.sock = socket.create_connection((self.host, self.port), self.timeout)
+ if source_address is not None:
+ self.source_address = source_address
+ self.sock = socket.create_connection((self.host, self.port), self.timeout,
+ source_address=self.source_address)
self.af = self.sock.family
self.file = self.sock.makefile('r', encoding=self.encoding)
self.welcome = self.getresp()
@@ -169,10 +175,8 @@ class FTP:
# Internal: "sanitize" a string for printing
def sanitize(self, s):
- if s[:5] == 'pass ' or s[:5] == 'PASS ':
- i = len(s)
- while i > 5 and s[i-1] in {'\r', '\n'}:
- i = i-1
+ if s[:5] in {'pass ', 'PASS '}:
+ i = len(s.rstrip('\r\n'))
s = s[:5] + '*'*(i-5) + s[i:]
return repr(s)
@@ -335,7 +339,8 @@ class FTP:
size = None
if self.passiveserver:
host, port = self.makepasv()
- conn = socket.create_connection((host, port), self.timeout)
+ conn = socket.create_connection((host, port), self.timeout,
+ source_address=self.source_address)
try:
if rest is not None:
self.sendcmd("REST %s" % rest)
@@ -354,8 +359,7 @@ class FTP:
conn.close()
raise
else:
- sock = self.makeport()
- try:
+ with self.makeport() as sock:
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
@@ -367,8 +371,6 @@ class FTP:
conn, sockaddr = sock.accept()
if self.timeout is not _GLOBAL_DEFAULT_TIMEOUT:
conn.settimeout(self.timeout)
- finally:
- sock.close()
if resp[:3] == '150':
# this is conditional in case we received a 125
size = parse150(resp)
@@ -426,7 +428,7 @@ class FTP:
"""Retrieve data in line mode. A new port is created for you.
Args:
- cmd: A RETR, LIST, NLST, or MLSD command.
+ cmd: A RETR, LIST, or NLST command.
callback: An optional single parameter callable that is called
for each line with the trailing CRLF stripped.
[default: print_line()]
@@ -527,6 +529,34 @@ class FTP:
cmd = cmd + (' ' + arg)
self.retrlines(cmd, func)
+ def mlsd(self, path="", facts=[]):
+ '''List a directory in a standardized format by using MLSD
+ command (RFC-3659). If path is omitted the current directory
+ is assumed. "facts" is a list of strings representing the type
+ of information desired (e.g. ["type", "size", "perm"]).
+
+ Return a generator object yielding a tuple of two elements
+ for every file found in path.
+ First element is the file name, the second one is a dictionary
+ including a variable number of "facts" depending on the server
+ and whether "facts" argument has been provided.
+ '''
+ if facts:
+ self.sendcmd("OPTS MLST " + ";".join(facts) + ";")
+ if path:
+ cmd = "MLSD %s" % path
+ else:
+ cmd = "MLSD"
+ lines = []
+ self.retrlines(cmd, lines.append)
+ for line in lines:
+ facts_found, _, name = line.rstrip(CRLF).partition(' ')
+ entry = {}
+ for fact in facts_found[:-1].split(";"):
+ key, _, value = fact.partition("=")
+ entry[key.lower()] = value
+ yield (name, entry)
+
def rename(self, fromname, toname):
'''Rename a file.'''
resp = self.sendcmd('RNFR ' + fromname)
@@ -561,10 +591,7 @@ class FTP:
resp = self.sendcmd('SIZE ' + filename)
if resp[:3] == '213':
s = resp[3:].strip()
- try:
- return int(s)
- except (OverflowError, ValueError):
- return int(s)
+ return int(s)
def mkd(self, dirname):
'''Make a directory, return its full pathname.'''
@@ -596,11 +623,11 @@ class FTP:
def close(self):
'''Close the connection without assuming anything about it.'''
- if self.file:
+ if self.file is not None:
self.file.close()
+ if self.sock is not None:
self.sock.close()
- self.file = self.sock = None
-
+ self.file = self.sock = None
try:
import ssl
@@ -644,7 +671,7 @@ else:
def __init__(self, host='', user='', passwd='', acct='', keyfile=None,
certfile=None, context=None,
- timeout=_GLOBAL_DEFAULT_TIMEOUT):
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
@@ -655,7 +682,7 @@ else:
self.certfile = certfile
self.context = context
self._prot_p = False
- FTP.__init__(self, host, user, passwd, acct, timeout)
+ FTP.__init__(self, host, user, passwd, acct, timeout, source_address)
def login(self, user='', passwd='', acct='', secure=True):
if secure and not isinstance(self.sock, ssl.SSLSocket):
@@ -679,6 +706,14 @@ else:
self.file = self.sock.makefile(mode='r', encoding=self.encoding)
return resp
+ def ccc(self):
+ '''Switch back to a clear-text control connection.'''
+ if not isinstance(self.sock, ssl.SSLSocket):
+ raise ValueError("not using TLS")
+ resp = self.voidcmd('CCC')
+ self.sock = self.sock.unwrap()
+ return resp
+
def prot_p(self):
'''Set up secure data connection.'''
# PROT defines whether or not the data channel is to be protected.
@@ -715,8 +750,7 @@ else:
def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
self.voidcmd('TYPE I')
- conn = self.transfercmd(cmd, rest)
- try:
+ with self.transfercmd(cmd, rest) as conn:
while 1:
data = conn.recv(blocksize)
if not data:
@@ -725,8 +759,6 @@ else:
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
- finally:
- conn.close()
return self.voidresp()
def retrlines(self, cmd, callback = None):
@@ -734,7 +766,7 @@ else:
resp = self.sendcmd('TYPE A')
conn = self.transfercmd(cmd)
fp = conn.makefile('r', encoding=self.encoding)
- try:
+ with fp, conn:
while 1:
line = fp.readline()
if self.debugging > 2: print('*retr*', repr(line))
@@ -748,15 +780,11 @@ else:
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
- finally:
- fp.close()
- conn.close()
return self.voidresp()
def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None):
self.voidcmd('TYPE I')
- conn = self.transfercmd(cmd, rest)
- try:
+ with self.transfercmd(cmd, rest) as conn:
while 1:
buf = fp.read(blocksize)
if not buf: break
@@ -765,14 +793,11 @@ else:
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
- finally:
- conn.close()
return self.voidresp()
def storlines(self, cmd, fp, callback=None):
self.voidcmd('TYPE A')
- conn = self.transfercmd(cmd)
- try:
+ with self.transfercmd(cmd) as conn:
while 1:
buf = fp.readline()
if not buf: break
@@ -784,8 +809,6 @@ else:
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
- finally:
- conn.close()
return self.voidresp()
def abort(self):
@@ -818,11 +841,7 @@ def parse150(resp):
m = _150_re.match(resp)
if not m:
return None
- s = m.group(1)
- try:
- return int(s)
- except (OverflowError, ValueError):
- return int(s)
+ return int(m.group(1))
_227_re = None
diff --git a/Lib/functools.py b/Lib/functools.py
index 85ea257..fec4b9e 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -12,7 +12,7 @@ __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
from _functools import partial, reduce
-from collections import OrderedDict, namedtuple
+from collections import namedtuple
try:
from _thread import allocate_lock as Lock
except:
@@ -21,7 +21,8 @@ except:
# update_wrapper() and wraps() are tools to help write
# wrapper functions that can handle naive introspection
-WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__', '__annotations__')
+WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__',
+ '__annotations__')
WRAPPER_UPDATES = ('__dict__',)
def update_wrapper(wrapper,
wrapped,
@@ -114,14 +115,23 @@ def cmp_to_key(mycmp):
__hash__ = None
return K
+try:
+ from _functools import cmp_to_key
+except ImportError:
+ pass
+
_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
-def lru_cache(maxsize=100):
+def lru_cache(maxsize=100, typed=False):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
+ If *typed* is True, arguments of different types will be cached separately.
+ For example, f(3.0) and f(3) will be treated as distinct calls with
+ distinct results.
+
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
@@ -136,57 +146,74 @@ def lru_cache(maxsize=100):
# The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version).
- def decorating_function(user_function,
- tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
+ def decorating_function(user_function):
+ cache = dict()
hits = misses = 0
- kwd_mark = (object(),) # separates positional and keyword args
- lock = Lock() # needed because OrderedDict isn't threadsafe
+ cache_get = cache.get # bound method to lookup key or return None
+ _len = len # localize the global len() function
+ kwd_mark = (object(),) # separate positional and keyword args
+ lock = Lock() # because linkedlist updates aren't threadsafe
+ root = [] # root of the circular doubly linked list
+ root[:] = [root, root, None, None] # initialize by pointing to self
+ PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
+
+ def make_key(args, kwds, typed, tuple=tuple, sorted=sorted, type=type):
+ key = args
+ if kwds:
+ sorted_items = tuple(sorted(kwds.items()))
+ key += kwd_mark + sorted_items
+ if typed:
+ key += tuple(type(v) for v in args)
+ if kwds:
+ key += tuple(type(v) for k, v in sorted_items)
+ return key
if maxsize is None:
- cache = dict() # simple cache without ordering or size limit
-
@wraps(user_function)
def wrapper(*args, **kwds):
+ # simple caching without ordering or size limit
nonlocal hits, misses
- key = args
- if kwds:
- key += kwd_mark + tuple(sorted(kwds.items()))
- try:
- result = cache[key]
+ key = make_key(args, kwds, typed) if kwds or typed else args
+ result = cache_get(key, root) # root used here as a unique not-found sentinel
+ if result is not root:
hits += 1
return result
- except KeyError:
- pass
result = user_function(*args, **kwds)
cache[key] = result
misses += 1
return result
else:
- cache = OrderedDict() # ordered least recent to most recent
- cache_popitem = cache.popitem
- cache_renew = cache.move_to_end
-
@wraps(user_function)
def wrapper(*args, **kwds):
+ # size limited caching that tracks accesses by recency
nonlocal hits, misses
- key = args
- if kwds:
- key += kwd_mark + tuple(sorted(kwds.items()))
+ key = make_key(args, kwds, typed) if kwds or typed else args
with lock:
- try:
- result = cache[key]
- cache_renew(key) # record recent use of this key
+ link = cache_get(key)
+ if link is not None:
+ # record recent use of the key by moving it to the front of the list
+ link_prev, link_next, key, result = link
+ link_prev[NEXT] = link_next
+ link_next[PREV] = link_prev
+ last = root[PREV]
+ last[NEXT] = root[PREV] = link
+ link[PREV] = last
+ link[NEXT] = root
hits += 1
return result
- except KeyError:
- pass
result = user_function(*args, **kwds)
with lock:
- cache[key] = result # record recent use of this key
+ last = root[PREV]
+ link = [last, root, key, result]
+ cache[key] = last[NEXT] = root[PREV] = link
+ if _len(cache) > maxsize:
+ # purge least recently used cache entry
+ old_prev, old_next, old_key, old_result = root[NEXT]
+ root[NEXT] = old_next
+ old_next[PREV] = root
+ del cache[old_key]
misses += 1
- if len(cache) > maxsize:
- cache_popitem(0) # purge least recently used cache entry
return result
def cache_info():
@@ -199,6 +226,7 @@ def lru_cache(maxsize=100):
nonlocal hits, misses
with lock:
cache.clear()
+ root[:] = [root, root, None, None]
hits = misses = 0
wrapper.cache_info = cache_info
diff --git a/Lib/getopt.py b/Lib/getopt.py
index 980861d..3d6ecbd 100644
--- a/Lib/getopt.py
+++ b/Lib/getopt.py
@@ -19,7 +19,7 @@ option involved with the exception.
# Gerrit Holl <gerrit@nl.linux.org> moved the string-based exceptions
# to class-based exceptions.
#
-# Peter Åstrand <astrand@lysator.liu.se> added gnu_getopt().
+# Peter Åstrand <astrand@lysator.liu.se> added gnu_getopt().
#
# TODO for gnu_getopt():
#
@@ -34,6 +34,11 @@ option involved with the exception.
__all__ = ["GetoptError","error","getopt","gnu_getopt"]
import os
+try:
+ from gettext import gettext as _
+except ImportError:
+ # Bootstrapping Python: gettext's dependencies not built yet
+ def _(s): return s
class GetoptError(Exception):
opt = ''
@@ -153,10 +158,10 @@ def do_longs(opts, opt, longopts, args):
if has_arg:
if optarg is None:
if not args:
- raise GetoptError('option --%s requires argument' % opt, opt)
+ raise GetoptError(_('option --%s requires argument') % opt, opt)
optarg, args = args[0], args[1:]
elif optarg is not None:
- raise GetoptError('option --%s must not have an argument' % opt, opt)
+ raise GetoptError(_('option --%s must not have an argument') % opt, opt)
opts.append(('--' + opt, optarg or ''))
return opts, args
@@ -166,7 +171,7 @@ def do_longs(opts, opt, longopts, args):
def long_has_args(opt, longopts):
possibilities = [o for o in longopts if o.startswith(opt)]
if not possibilities:
- raise GetoptError('option --%s not recognized' % opt, opt)
+ raise GetoptError(_('option --%s not recognized') % opt, opt)
# Is there an exact match?
if opt in possibilities:
return False, opt
@@ -176,7 +181,7 @@ def long_has_args(opt, longopts):
if len(possibilities) > 1:
# XXX since possibilities contains all valid continuations, might be
# nice to work them into the error msg
- raise GetoptError('option --%s not a unique prefix' % opt, opt)
+ raise GetoptError(_('option --%s not a unique prefix') % opt, opt)
assert len(possibilities) == 1
unique_match = possibilities[0]
has_arg = unique_match.endswith('=')
@@ -190,7 +195,7 @@ def do_shorts(opts, optstring, shortopts, args):
if short_has_arg(opt, shortopts):
if optstring == '':
if not args:
- raise GetoptError('option -%s requires argument' % opt,
+ raise GetoptError(_('option -%s requires argument') % opt,
opt)
optstring, args = args[0], args[1:]
optarg, optstring = optstring, ''
@@ -203,7 +208,7 @@ def short_has_arg(opt, shortopts):
for i in range(len(shortopts)):
if opt == shortopts[i] != ':':
return shortopts.startswith(':', i+1)
- raise GetoptError('option -%s not recognized' % opt, opt)
+ raise GetoptError(_('option -%s not recognized') % opt, opt)
if __name__ == '__main__':
import sys
diff --git a/Lib/getpass.py b/Lib/getpass.py
index dc02bd1..0044742 100644
--- a/Lib/getpass.py
+++ b/Lib/getpass.py
@@ -72,7 +72,7 @@ def unix_getpass(prompt='Password: ', stream=None):
finally:
termios.tcsetattr(fd, tcsetattr_flags, old)
stream.flush() # issue7208
- except termios.error as e:
+ except termios.error:
if passwd is not None:
# _raw_input succeeded. The final tcsetattr failed. Reraise
# instead of leaving the terminal in an unknown state.
@@ -145,8 +145,6 @@ def getuser():
"""
- import os
-
for name in ('LOGNAME', 'USER', 'LNAME', 'USERNAME'):
user = os.environ.get(name)
if user:
diff --git a/Lib/gzip.py b/Lib/gzip.py
index 1de23b6..85c3e15 100644
--- a/Lib/gzip.py
+++ b/Lib/gzip.py
@@ -16,18 +16,6 @@ FTEXT, FHCRC, FEXTRA, FNAME, FCOMMENT = 1, 2, 4, 8, 16
READ, WRITE = 1, 2
-def U32(i):
- """Return i as an unsigned integer, assuming it fits in 32 bits.
- If it's >= 2GB when viewed as a 32-bit unsigned int, return a long.
- """
- if i < 0:
- i += 1 << 32
- return i
-
-def LOWU32(i):
- """Return the low-order 32 bits, as a non-negative int"""
- return i & 0xFFFFFFFF
-
def write32u(output, value):
# The L format writes the bit pattern correctly whether signed
# or unsigned.
@@ -153,7 +141,7 @@ class GzipFile(io.BufferedIOBase):
"""
if mode and ('t' in mode or 'U' in mode):
- raise IOError("Mode " + mode + " not supported")
+ raise ValueError("Invalid mode: {!r}".format(mode))
if mode and 'b' not in mode:
mode += 'b'
if fileobj is None:
@@ -164,10 +152,9 @@ class GzipFile(io.BufferedIOBase):
else:
filename = ''
if mode is None:
- if hasattr(fileobj, 'mode'): mode = fileobj.mode
- else: mode = 'rb'
+ mode = getattr(fileobj, 'mode', 'rb')
- if mode[0:1] == 'r':
+ if mode.startswith('r'):
self.mode = READ
# Set flag indicating start of a new member
self._new_member = True
@@ -182,7 +169,7 @@ class GzipFile(io.BufferedIOBase):
self.min_readsize = 100
fileobj = _PaddedFile(fileobj)
- elif mode[0:1] == 'w' or mode[0:1] == 'a':
+ elif mode.startswith(('w', 'a')):
self.mode = WRITE
self._init_write(filename)
self.compress = zlib.compressobj(compresslevel,
@@ -191,7 +178,7 @@ class GzipFile(io.BufferedIOBase):
zlib.DEF_MEM_LEVEL,
0)
else:
- raise IOError("Mode " + mode + " not supported")
+ raise ValueError("Invalid mode: {!r}".format(mode))
self.fileobj = fileobj
self.offset = 0
@@ -353,6 +340,28 @@ class GzipFile(io.BufferedIOBase):
self.offset += size
return chunk
+ def read1(self, size=-1):
+ self._check_closed()
+ if self.mode != READ:
+ import errno
+ raise IOError(errno.EBADF, "read1() on write-only GzipFile object")
+
+ if self.extrasize <= 0 and self.fileobj is None:
+ return b''
+
+ try:
+ self._read()
+ except EOFError:
+ pass
+ if size < 0 or size > self.extrasize:
+ size = self.extrasize
+
+ offset = self.offset - self.extrastart
+ chunk = self.extrabuf[offset: offset + size]
+ self.extrasize -= size
+ self.offset += size
+ return chunk
+
def peek(self, n):
if self.mode != READ:
import errno
diff --git a/Lib/http/client.py b/Lib/http/client.py
index 745b999..0002072 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -485,11 +485,17 @@ class HTTPResponse(io.RawIOBase):
self.close()
return b""
- if self.chunked:
- return self._read_chunked(amt)
+ if amt is not None:
+ # Amount is given, so call base class version
+ # (which is implemented in terms of self.readinto)
+ return super(HTTPResponse, self).read(amt)
+ else:
+ # Amount is not given (unbounded read) so we must check self.length
+ # and self.chunked
+
+ if self.chunked:
+ return self._readall_chunked()
- if amt is None:
- # unbounded read
if self.length is None:
s = self.fp.read()
else:
@@ -498,78 +504,127 @@ class HTTPResponse(io.RawIOBase):
self.close() # we read everything
return s
+ def readinto(self, b):
+ if self.fp is None:
+ return 0
+
+ if self._method == "HEAD":
+ self.close()
+ return 0
+
+ if self.chunked:
+ return self._readinto_chunked(b)
+
if self.length is not None:
- if amt > self.length:
+ if len(b) > self.length:
# clip the read to the "end of response"
- amt = self.length
+ b = memoryview(b)[0:self.length]
# we do not use _safe_read() here because this may be a .will_close
# connection, and the user is reading more bytes than will be provided
# (for example, reading in 1k chunks)
- s = self.fp.read(amt)
+ n = self.fp.readinto(b)
if self.length is not None:
- self.length -= len(s)
+ self.length -= n
if not self.length:
self.close()
- return s
+ return n
+
+ def _read_next_chunk_size(self):
+ # Read the next chunk size from the file
+ line = self.fp.readline(_MAXLINE + 1)
+ if len(line) > _MAXLINE:
+ raise LineTooLong("chunk size")
+ i = line.find(b";")
+ if i >= 0:
+ line = line[:i] # strip chunk-extensions
+ try:
+ return int(line, 16)
+ except ValueError:
+ # close the connection as protocol synchronisation is
+ # probably lost
+ self.close()
+ raise
- def _read_chunked(self, amt):
+ def _read_and_discard_trailer(self):
+ # read and discard trailer up to the CRLF terminator
+ ### note: we shouldn't have any trailers!
+ while True:
+ line = self.fp.readline(_MAXLINE + 1)
+ if len(line) > _MAXLINE:
+ raise LineTooLong("trailer line")
+ if not line:
+ # a vanishingly small number of sites EOF without
+ # sending the trailer
+ break
+ if line == b"\r\n":
+ break
+
+ def _readall_chunked(self):
assert self.chunked != _UNKNOWN
chunk_left = self.chunk_left
value = []
while True:
if chunk_left is None:
- line = self.fp.readline(_MAXLINE + 1)
- if len(line) > _MAXLINE:
- raise LineTooLong("chunk size")
- i = line.find(b";")
- if i >= 0:
- line = line[:i] # strip chunk-extensions
try:
- chunk_left = int(line, 16)
+ chunk_left = self._read_next_chunk_size()
+ if chunk_left == 0:
+ break
except ValueError:
- # close the connection as protocol synchronisation is
- # probably lost
- self.close()
raise IncompleteRead(b''.join(value))
- if chunk_left == 0:
- break
- if amt is None:
- value.append(self._safe_read(chunk_left))
- elif amt < chunk_left:
- value.append(self._safe_read(amt))
- self.chunk_left = chunk_left - amt
- return b''.join(value)
- elif amt == chunk_left:
- value.append(self._safe_read(amt))
+ value.append(self._safe_read(chunk_left))
+
+ # we read the whole chunk, get another
+ self._safe_read(2) # toss the CRLF at the end of the chunk
+ chunk_left = None
+
+ self._read_and_discard_trailer()
+
+ # we read everything; close the "file"
+ self.close()
+
+ return b''.join(value)
+
+ def _readinto_chunked(self, b):
+ assert self.chunked != _UNKNOWN
+ chunk_left = self.chunk_left
+
+ total_bytes = 0
+ mvb = memoryview(b)
+ while True:
+ if chunk_left is None:
+ try:
+ chunk_left = self._read_next_chunk_size()
+ if chunk_left == 0:
+ break
+ except ValueError:
+ raise IncompleteRead(bytes(b[0:total_bytes]))
+
+ if len(mvb) < chunk_left:
+ n = self._safe_readinto(mvb)
+ self.chunk_left = chunk_left - n
+ return total_bytes + n
+ elif len(mvb) == chunk_left:
+ n = self._safe_readinto(mvb)
self._safe_read(2) # toss the CRLF at the end of the chunk
self.chunk_left = None
- return b''.join(value)
+ return total_bytes + n
else:
- value.append(self._safe_read(chunk_left))
- amt -= chunk_left
+ temp_mvb = mvb[0:chunk_left]
+ n = self._safe_readinto(temp_mvb)
+ mvb = mvb[n:]
+ total_bytes += n
# we read the whole chunk, get another
self._safe_read(2) # toss the CRLF at the end of the chunk
chunk_left = None
- # read and discard trailer up to the CRLF terminator
- ### note: we shouldn't have any trailers!
- while True:
- line = self.fp.readline(_MAXLINE + 1)
- if len(line) > _MAXLINE:
- raise LineTooLong("trailer line")
- if not line:
- # a vanishingly small number of sites EOF without
- # sending the trailer
- break
- if line == b"\r\n":
- break
+ self._read_and_discard_trailer()
# we read everything; close the "file"
self.close()
- return b''.join(value)
+ return total_bytes
def _safe_read(self, amt):
"""Read the number of bytes requested, compensating for partial reads.
@@ -594,6 +649,22 @@ class HTTPResponse(io.RawIOBase):
amt -= len(chunk)
return b"".join(s)
+ def _safe_readinto(self, b):
+ """Same as _safe_read, but for reading into a buffer."""
+ total_bytes = 0
+ mvb = memoryview(b)
+ while total_bytes < len(b):
+ if MAXAMOUNT < len(mvb):
+ temp_mvb = mvb[0:MAXAMOUNT]
+ n = self.fp.readinto(temp_mvb)
+ else:
+ n = self.fp.readinto(mvb)
+ if not n:
+ raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b))
+ mvb = mvb[n:]
+ total_bytes += n
+ return total_bytes
+
def fileno(self):
return self.fp.fileno()
@@ -700,7 +771,7 @@ class HTTPConnection:
self.send(connect_bytes)
for header, value in self._tunnel_headers.items():
header_str = "%s: %s\r\n" % (header, value)
- header_bytes = header_str.encode("latin1")
+ header_bytes = header_str.encode("latin-1")
self.send(header_bytes)
self.send(b'\r\n')
@@ -940,7 +1011,7 @@ class HTTPConnection:
values = list(values)
for i, one_value in enumerate(values):
if hasattr(one_value, 'encode'):
- values[i] = one_value.encode('latin1')
+ values[i] = one_value.encode('latin-1')
elif isinstance(one_value, int):
values[i] = str(one_value).encode('ascii')
value = b'\r\n\t'.join(values)
diff --git a/Lib/http/server.py b/Lib/http/server.py
index 537df90..8326f30 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -105,6 +105,7 @@ import copy
DEFAULT_ERROR_MESSAGE = """\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
+<html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<title>Error response</title>
@@ -352,6 +353,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
"""
self.send_response_only(100)
+ self.flush_headers()
return True
def handle_one_request(self):
@@ -429,7 +431,8 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
self.wfile.write(content.encode('UTF-8', 'replace'))
def send_response(self, code, message=None):
- """Send the response header and log the response code.
+ """Add the response header to the headers buffer and log the
+ response code.
Also send two standard headers with the server software
version and the current date.
@@ -448,16 +451,19 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
else:
message = ''
if self.request_version != 'HTTP/0.9':
- self.wfile.write(("%s %d %s\r\n" %
- (self.protocol_version, code, message)).encode('latin1', 'strict'))
+ if not hasattr(self, '_headers_buffer'):
+ self._headers_buffer = []
+ self._headers_buffer.append(("%s %d %s\r\n" %
+ (self.protocol_version, code, message)).encode(
+ 'latin-1', 'strict'))
def send_header(self, keyword, value):
- """Send a MIME header."""
+ """Send a MIME header to the headers buffer."""
if self.request_version != 'HTTP/0.9':
if not hasattr(self, '_headers_buffer'):
self._headers_buffer = []
self._headers_buffer.append(
- ("%s: %s\r\n" % (keyword, value)).encode('latin1', 'strict'))
+ ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict'))
if keyword.lower() == 'connection':
if value.lower() == 'close':
@@ -469,6 +475,10 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
"""Send the blank line ending the MIME headers."""
if self.request_version != 'HTTP/0.9':
self._headers_buffer.append(b"\r\n")
+ self.flush_headers()
+
+ def flush_headers(self):
+ if hasattr(self, '_headers_buffer'):
self.wfile.write(b"".join(self._headers_buffer))
self._headers_buffer = []
@@ -722,10 +732,16 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
list.sort(key=lambda a: a.lower())
r = []
displaypath = html.escape(urllib.parse.unquote(self.path))
- r.append('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
- r.append("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
- r.append("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
- r.append("<hr>\n<ul>\n")
+ enc = sys.getfilesystemencoding()
+ title = 'Directory listing for %s' % displaypath
+ r.append('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" '
+ '"http://www.w3.org/TR/html4/strict.dtd">')
+ r.append('<html>\n<head>')
+ r.append('<meta http-equiv="Content-Type" '
+ 'content="text/html; charset=%s">' % enc)
+ r.append('<title>%s</title>\n</head>' % title)
+ r.append('<body>\n<h1>%s</h1>' % title)
+ r.append('<hr>\n<ul>')
for name in list:
fullname = os.path.join(path, name)
displayname = linkname = name
@@ -736,11 +752,10 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
- r.append('<li><a href="%s">%s</a>\n'
+ r.append('<li><a href="%s">%s</a></li>'
% (urllib.parse.quote(linkname), html.escape(displayname)))
- r.append("</ul>\n<hr>\n</body>\n</html>\n")
- enc = sys.getfilesystemencoding()
- encoded = ''.join(r).encode(enc)
+ r.append('</ul>\n<hr>\n</body>\n</html>\n')
+ encoded = '\n'.join(r).encode(enc)
f = io.BytesIO()
f.write(encoded)
f.seek(0)
@@ -892,11 +907,7 @@ def nobody_uid():
def executable(path):
"""Test for executable file."""
- try:
- st = os.stat(path)
- except os.error:
- return False
- return st.st_mode & 0o111 != 0
+ return os.access(path, os.X_OK)
class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
@@ -1010,7 +1021,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
scriptname)
return
ispy = self.is_python(scriptname)
- if not ispy:
+ if self.have_fork or not ispy:
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
@@ -1085,6 +1096,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
env.setdefault(k, "")
self.send_response(200, "Script output follows")
+ self.flush_headers()
decoded_query = query.replace('+', ' ')
diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py
index 6bf0a8c..6b75a8d 100644
--- a/Lib/idlelib/PyShell.py
+++ b/Lib/idlelib/PyShell.py
@@ -468,6 +468,10 @@ class ModifiedInterpreter(InteractiveInterpreter):
def kill_subprocess(self):
try:
+ self.rpcclt.listening_sock.close()
+ except AttributeError: # no socket
+ pass
+ try:
self.rpcclt.close()
except AttributeError: # no socket
pass
@@ -1217,6 +1221,16 @@ class PyShell(OutputWindow):
self.set_line_and_column()
def write(self, s, tags=()):
+ if isinstance(s, str) and len(s) and max(s) > '\uffff':
+ # Tk doesn't support outputting non-BMP characters
+ # Let's assume what printed string is not very long,
+ # find first non-BMP character and construct informative
+ # UnicodeEncodeError exception.
+ for start, char in enumerate(s):
+ if char > '\uffff':
+ break
+ raise UnicodeEncodeError("UCS-2", char, start, start+1,
+ 'Non-BMP character not supported in Tk')
try:
self.text.mark_gravity("iomark", "right")
OutputWindow.write(self, s, tags, "iomark")
diff --git a/Lib/idlelib/configHandler.py b/Lib/idlelib/configHandler.py
index 73b8db5..79315ef 100644
--- a/Lib/idlelib/configHandler.py
+++ b/Lib/idlelib/configHandler.py
@@ -145,7 +145,8 @@ class IdleUserConfParser(IdleConfParser):
except IOError:
os.unlink(fname)
cfgFile = open(fname, 'w')
- self.write(cfgFile)
+ with cfgFile:
+ self.write(cfgFile)
else:
self.RemoveFile()
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index cbcda05..76b8bb8 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "3.2.3rc1"
+IDLE_VERSION = "3.3.0a1"
diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py
index def4394..301305e 100644
--- a/Lib/idlelib/rpc.py
+++ b/Lib/idlelib/rpc.py
@@ -196,8 +196,12 @@ class SocketIO(object):
return ("ERROR", "Unsupported message type: %s" % how)
except SystemExit:
raise
+ except KeyboardInterrupt:
+ raise
except socket.error:
raise
+ except Exception as ex:
+ return ("CALLEXC", ex)
except:
msg = "*** Internal Error: rpc.py:SocketIO.localcall()\n\n"\
" Object: %s \n Method: %s \n Args: %s\n"
@@ -257,6 +261,9 @@ class SocketIO(object):
if how == "ERROR":
self.debug("decoderesponse: Internal ERROR:", what)
raise RuntimeError(what)
+ if how == "CALLEXC":
+ self.debug("decoderesponse: Call Exception:", what)
+ raise what
raise SystemError(how, what)
def decode_interrupthook(self):
diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py
index 25338ff..a161a93 100644
--- a/Lib/idlelib/run.py
+++ b/Lib/idlelib/run.py
@@ -6,6 +6,7 @@ import traceback
import _thread as thread
import threading
import queue
+import builtins
from idlelib import CallTips
from idlelib import AutoComplete
@@ -38,6 +39,21 @@ else:
return s
warnings.formatwarning = idle_formatwarning_subproc
+
+def handle_tk_events():
+ """Process any tk events that are ready to be dispatched if tkinter
+ has been imported, a tcl interpreter has been created and tk has been
+ loaded."""
+ tkinter = sys.modules.get('tkinter')
+ if tkinter and tkinter._default_root:
+ # tkinter has been imported, an Tcl interpreter was created and
+ # tk has been loaded.
+ root = tkinter._default_root
+ while root.tk.dooneevent(tkinter._tkinter.DONT_WAIT):
+ # Process pending events.
+ pass
+
+
# Thread shared globals: Establish a queue between a subthread (which handles
# the socket) and the main thread (which runs user code), plus global
# completion, exit and interruptable (the main thread) flags:
@@ -93,6 +109,7 @@ def main(del_exitfunc=False):
try:
seq, request = rpc.request_queue.get(block=True, timeout=0.05)
except queue.Empty:
+ handle_tk_events()
continue
method, args, kwargs = request
ret = method(*args, **kwargs)
@@ -245,6 +262,25 @@ class MyRPCServer(rpc.RPCServer):
thread.interrupt_main()
+def displayhook(value):
+ """Override standard display hook to use non-locale encoding"""
+ if value is None:
+ return
+ # Set '_' to None to avoid recursion
+ builtins._ = None
+ text = repr(value)
+ try:
+ sys.stdout.write(text)
+ except UnicodeEncodeError:
+ # let's use ascii while utf8-bmp codec doesn't present
+ encoding = 'ascii'
+ bytes = text.encode(encoding, 'backslashreplace')
+ text = bytes.decode(encoding, 'strict')
+ sys.stdout.write(text)
+ sys.stdout.write("\n")
+ builtins._ = value
+
+
class MyHandler(rpc.RPCHandler):
def handle(self):
@@ -254,6 +290,7 @@ class MyHandler(rpc.RPCHandler):
sys.stdin = self.console = self.get_remote_proxy("stdin")
sys.stdout = self.get_remote_proxy("stdout")
sys.stderr = self.get_remote_proxy("stderr")
+ sys.displayhook = displayhook
# page help() text to shell.
import pydoc # import must be done here to capture i/o binding
pydoc.pager = pydoc.plainpager
diff --git a/Lib/imaplib.py b/Lib/imaplib.py
index 1fcba21..bda2ae9 100644
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -249,15 +249,7 @@ class IMAP4:
def read(self, size):
"""Read 'size' bytes from remote."""
- chunks = []
- read = 0
- while read < size:
- data = self.file.read(min(size-read, 4096))
- if not data:
- break
- read += len(data)
- chunks.append(data)
- return b''.join(chunks)
+ return self.file.read(size)
def readline(self):
@@ -1177,25 +1169,40 @@ if HAVE_SSL:
"""IMAP4 client class over SSL connection
- Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile]]]])
+ Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context]]]]])
host - host's name (default: localhost);
- port - port number (default: standard IMAP4 SSL port).
+ port - port number (default: standard IMAP4 SSL port);
keyfile - PEM formatted file that contains your private key (default: None);
certfile - PEM formatted certificate chain file (default: None);
+ ssl_context - a SSLContext object that contains your certificate chain
+ and private key (default: None)
+ Note: if ssl_context is provided, then parameters keyfile or
+ certfile should not be set otherwise ValueError is thrown.
for more documentation see the docstring of the parent class IMAP4.
"""
- def __init__(self, host = '', port = IMAP4_SSL_PORT, keyfile = None, certfile = None):
+ def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None, certfile=None, ssl_context=None):
+ if ssl_context is not None and keyfile is not None:
+ raise ValueError("ssl_context and keyfile arguments are mutually "
+ "exclusive")
+ if ssl_context is not None and certfile is not None:
+ raise ValueError("ssl_context and certfile arguments are mutually "
+ "exclusive")
+
self.keyfile = keyfile
self.certfile = certfile
+ self.ssl_context = ssl_context
IMAP4.__init__(self, host, port)
def _create_socket(self):
sock = IMAP4._create_socket(self)
- return ssl.wrap_socket(sock, self.keyfile, self.certfile)
+ if self.ssl_context:
+ return self.ssl_context.wrap_socket(sock)
+ else:
+ return ssl.wrap_socket(sock, self.keyfile, self.certfile)
def open(self, host='', port=IMAP4_SSL_PORT):
"""Setup connection to remote server on "host:port".
diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py
index 2baaf93..57fb284 100644
--- a/Lib/importlib/__init__.py
+++ b/Lib/importlib/__init__.py
@@ -1,103 +1,19 @@
-"""A pure Python implementation of import.
-
-References on import:
-
- * Language reference
- http://docs.python.org/ref/import.html
- * __import__ function
- http://docs.python.org/lib/built-in-funcs.html
- * Packages
- http://www.python.org/doc/essays/packages.html
- * PEP 235: Import on Case-Insensitive Platforms
- http://www.python.org/dev/peps/pep-0235
- * PEP 275: Import Modules from Zip Archives
- http://www.python.org/dev/peps/pep-0273
- * PEP 302: New Import Hooks
- http://www.python.org/dev/peps/pep-0302/
- * PEP 328: Imports: Multi-line and Absolute/Relative
- http://www.python.org/dev/peps/pep-0328
-
-"""
-__all__ = ['__import__', 'import_module']
+"""A pure Python implementation of import."""
+__all__ = ['__import__', 'import_module', 'invalidate_caches']
from . import _bootstrap
-import os
-import re
-import tokenize
-
-# Bootstrap help #####################################################
-
-def _case_ok(directory, check):
- """Check if the directory contains something matching 'check'.
- No check is done if the file/directory exists or not.
+# To simplify imports in test code
+_w_long = _bootstrap._w_long
+_r_long = _bootstrap._r_long
- """
- if 'PYTHONCASEOK' in os.environ:
- return True
- elif check in os.listdir(directory if directory else os.getcwd()):
- return True
- return False
-
-
-def _w_long(x):
- """Convert a 32-bit integer to little-endian.
-
- XXX Temporary until marshal's long functions are exposed.
-
- """
- x = int(x)
- int_bytes = []
- int_bytes.append(x & 0xFF)
- int_bytes.append((x >> 8) & 0xFF)
- int_bytes.append((x >> 16) & 0xFF)
- int_bytes.append((x >> 24) & 0xFF)
- return bytearray(int_bytes)
+# Bootstrap help #####################################################
+import imp
+import sys
-def _r_long(int_bytes):
- """Convert 4 bytes in little-endian to an integer.
-
- XXX Temporary until marshal's long function are exposed.
-
- """
- x = int_bytes[0]
- x |= int_bytes[1] << 8
- x |= int_bytes[2] << 16
- x |= int_bytes[3] << 24
- return x
-
-
-# Required built-in modules.
-try:
- import posix as _os
-except ImportError:
- try:
- import nt as _os
- except ImportError:
- try:
- import os2 as _os
- except ImportError:
- raise ImportError('posix, nt, or os2 module required for importlib')
-_bootstrap._os = _os
-import imp, sys, marshal, errno, _io
-_bootstrap.imp = imp
-_bootstrap.sys = sys
-_bootstrap.marshal = marshal
-_bootstrap.errno = errno
-_bootstrap._io = _io
-import _warnings
-_bootstrap._warnings = _warnings
-
-
-from os import sep
-# For os.path.join replacement; pull from Include/osdefs.h:SEP .
-_bootstrap.path_sep = sep
-
-_bootstrap._case_ok = _case_ok
-marshal._w_long = _w_long
-marshal._r_long = _r_long
+_bootstrap._setup(sys, imp)
# Public API #########################################################
@@ -105,6 +21,14 @@ marshal._r_long = _r_long
from ._bootstrap import __import__
+def invalidate_caches():
+ """Call the invalidate_caches() method on all finders stored in
+ sys.path_importer_caches (where implemented)."""
+ for finder in sys.path_importer_cache.values():
+ if hasattr(finder, 'invalidate_caches'):
+ finder.invalidate_caches()
+
+
def import_module(name, package=None):
"""Import a module.
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index 90eb1a7..e0f86fc 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -7,9 +7,10 @@ work. One should use importlib as the public-facing version of this module.
"""
-# Injected modules are '_warnings', 'imp', 'sys', 'marshal', 'errno', '_io',
+# Injected modules are '_warnings', 'imp', 'sys', 'marshal', '_io',
# and '_os' (a.k.a. 'posix', 'nt' or 'os2').
# Injected attribute is path_sep.
+# Most injection is handled by _setup().
#
# When editing this code be aware that code executed at import time CANNOT
# reference any injected objects! This includes not only global code but also
@@ -18,11 +19,57 @@ work. One should use importlib as the public-facing version of this module.
# Bootstrap-related code ######################################################
+CASE_INSENSITIVE_PLATFORMS = 'win', 'cygwin', 'darwin'
+
+
+def _make_relax_case():
+ if any(map(sys.platform.startswith, CASE_INSENSITIVE_PLATFORMS)):
+ def _relax_case():
+ """True if filenames must be checked case-insensitively."""
+ return b'PYTHONCASEOK' in _os.environ
+ else:
+ def _relax_case():
+ """True if filenames must be checked case-insensitively."""
+ return False
+ return _relax_case
+
+
+# TODO: Expose from marshal
+def _w_long(x):
+ """Convert a 32-bit integer to little-endian.
+
+ XXX Temporary until marshal's long functions are exposed.
+
+ """
+ x = int(x)
+ int_bytes = []
+ int_bytes.append(x & 0xFF)
+ int_bytes.append((x >> 8) & 0xFF)
+ int_bytes.append((x >> 16) & 0xFF)
+ int_bytes.append((x >> 24) & 0xFF)
+ return bytearray(int_bytes)
+
+
+# TODO: Expose from marshal
+def _r_long(int_bytes):
+ """Convert 4 bytes in little-endian to an integer.
+
+ XXX Temporary until marshal's long function are exposed.
+
+ """
+ x = int_bytes[0]
+ x |= int_bytes[1] << 8
+ x |= int_bytes[2] << 16
+ x |= int_bytes[3] << 24
+ return x
+
+
+
# XXX Could also expose Modules/getpath.c:joinpath()
def _path_join(*args):
"""Replacement for os.path.join."""
return path_sep.join(x[:-len(path_sep)] if x.endswith(path_sep) else x
- for x in args if x)
+ for x in args if x)
def _path_exists(path):
@@ -80,10 +127,32 @@ def _path_absolute(path):
return _path_join(_os.getcwd(), path)
+def _write_atomic(path, data):
+ """Best-effort function to write data to a path atomically.
+ Be prepared to handle a FileExistsError if concurrent writing of the
+ temporary file is attempted."""
+ # id() is used to generate a pseudo-random filename.
+ path_tmp = '{}.{}'.format(path, id(path))
+ fd = _os.open(path_tmp, _os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, 0o666)
+ try:
+ # We first write data to a temporary file, and then use os.replace() to
+ # perform an atomic rename.
+ with _io.FileIO(fd, 'wb') as file:
+ file.write(data)
+ _os.replace(path_tmp, path)
+ except OSError:
+ try:
+ _os.unlink(path_tmp)
+ except OSError:
+ pass
+ raise
+
+
def _wrap(new, old):
"""Simple substitute for functools.wraps."""
- for replace in ['__module__', '__name__', '__doc__']:
- setattr(new, replace, getattr(old, replace))
+ for replace in ['__module__', '__name__', '__qualname__', '__doc__']:
+ if hasattr(old, replace):
+ setattr(new, replace, getattr(old, replace))
new.__dict__.update(old.__dict__)
@@ -91,28 +160,29 @@ code_type = type(_wrap.__code__)
# Finder/loader utility code ##################################################
+
def set_package(fxn):
"""Set __package__ on the returned module."""
- def wrapper(*args, **kwargs):
+ def set_package_wrapper(*args, **kwargs):
module = fxn(*args, **kwargs)
if not hasattr(module, '__package__') or module.__package__ is None:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
return module
- _wrap(wrapper, fxn)
- return wrapper
+ _wrap(set_package_wrapper, fxn)
+ return set_package_wrapper
def set_loader(fxn):
"""Set __loader__ on the returned module."""
- def wrapper(self, *args, **kwargs):
+ def set_loader_wrapper(self, *args, **kwargs):
module = fxn(self, *args, **kwargs)
if not hasattr(module, '__loader__'):
module.__loader__ = self
return module
- _wrap(wrapper, fxn)
- return wrapper
+ _wrap(set_loader_wrapper, fxn)
+ return set_loader_wrapper
def module_for_loader(fxn):
@@ -128,7 +198,7 @@ def module_for_loader(fxn):
the second argument.
"""
- def decorated(self, fullname, *args, **kwargs):
+ def module_for_loader_wrapper(self, fullname, *args, **kwargs):
module = sys.modules.get(fullname)
is_reload = bool(module)
if not is_reload:
@@ -143,8 +213,8 @@ def module_for_loader(fxn):
if not is_reload:
del sys.modules[fullname]
raise
- _wrap(decorated, fxn)
- return decorated
+ _wrap(module_for_loader_wrapper, fxn)
+ return module_for_loader_wrapper
def _check_name(method):
@@ -155,32 +225,32 @@ def _check_name(method):
compared against. If the comparison fails then ImportError is raised.
"""
- def inner(self, name, *args, **kwargs):
+ def _check_name_wrapper(self, name, *args, **kwargs):
if self._name != name:
raise ImportError("loader cannot handle %s" % name)
return method(self, name, *args, **kwargs)
- _wrap(inner, method)
- return inner
+ _wrap(_check_name_wrapper, method)
+ return _check_name_wrapper
def _requires_builtin(fxn):
"""Decorator to verify the named module is built-in."""
- def wrapper(self, fullname):
+ def _requires_builtin_wrapper(self, fullname):
if fullname not in sys.builtin_module_names:
raise ImportError("{0} is not a built-in module".format(fullname))
return fxn(self, fullname)
- _wrap(wrapper, fxn)
- return wrapper
+ _wrap(_requires_builtin_wrapper, fxn)
+ return _requires_builtin_wrapper
def _requires_frozen(fxn):
"""Decorator to verify the named module is frozen."""
- def wrapper(self, fullname):
+ def _requires_frozen_wrapper(self, fullname):
if not imp.is_frozen(fullname):
raise ImportError("{0} is not a frozen module".format(fullname))
return fxn(self, fullname)
- _wrap(wrapper, fxn)
- return wrapper
+ _wrap(_requires_frozen_wrapper, fxn)
+ return _requires_frozen_wrapper
def _suffix_list(suffix_type):
@@ -240,7 +310,7 @@ class BuiltinImporter:
@classmethod
@_requires_builtin
def is_package(cls, fullname):
- """Return None as built-in module are never packages."""
+ """Return None as built-in modules are never packages."""
return False
@@ -302,25 +372,42 @@ class _LoaderBasics:
filename = self.get_filename(fullname).rpartition(path_sep)[2]
return filename.rsplit('.', 1)[0] == '__init__'
- def _bytes_from_bytecode(self, fullname, data, source_mtime):
+ def _bytes_from_bytecode(self, fullname, data, source_stats):
"""Return the marshalled bytes from bytecode, verifying the magic
- number and timestamp along the way.
+ number, timestamp and source size along the way.
- If source_mtime is None then skip the timestamp check.
+ If source_stats is None then skip the timestamp check.
"""
magic = data[:4]
raw_timestamp = data[4:8]
+ raw_size = data[8:12]
if len(magic) != 4 or magic != imp.get_magic():
raise ImportError("bad magic number in {}".format(fullname))
elif len(raw_timestamp) != 4:
raise EOFError("bad timestamp in {}".format(fullname))
- elif source_mtime is not None:
- if marshal._r_long(raw_timestamp) != source_mtime:
- raise ImportError("bytecode is stale for {}".format(fullname))
+ elif len(raw_size) != 4:
+ raise EOFError("bad size in {}".format(fullname))
+ if source_stats is not None:
+ try:
+ source_mtime = int(source_stats['mtime'])
+ except KeyError:
+ pass
+ else:
+ if _r_long(raw_timestamp) != source_mtime:
+ raise ImportError(
+ "bytecode is stale for {}".format(fullname))
+ try:
+ source_size = source_stats['size'] & 0xFFFFFFFF
+ except KeyError:
+ pass
+ else:
+ if _r_long(raw_size) != source_size:
+ raise ImportError(
+ "bytecode is stale for {}".format(fullname))
# Can't return the code object as errors from marshal loading need to
# propagate even when source is available.
- return data[8:]
+ return data[12:]
@module_for_loader
def _load_module(self, module, *, sourceless=False):
@@ -348,11 +435,20 @@ class SourceLoader(_LoaderBasics):
def path_mtime(self, path):
"""Optional method that returns the modification time (an int) for the
specified path, where path is a str.
+ """
+ raise NotImplementedError
- Implementing this method allows the loader to read bytecode files.
+ def path_stats(self, path):
+ """Optional method returning a metadata dict for the specified path
+ to by the path (str).
+ Possible keys:
+ - 'mtime' (mandatory) is the numeric timestamp of last source
+ code modification;
+ - 'size' (optional) is the size in bytes of the source code.
+ Implementing this method allows the loader to read bytecode files.
"""
- raise NotImplementedError
+ return {'mtime': self.path_mtime(path)}
def set_data(self, path, data):
"""Optional method which writes data (bytes) to a file path (a str).
@@ -378,7 +474,7 @@ class SourceLoader(_LoaderBasics):
def get_code(self, fullname):
"""Concrete implementation of InspectLoader.get_code.
- Reading of bytecode requires path_mtime to be implemented. To write
+ Reading of bytecode requires path_stats to be implemented. To write
bytecode, set_data must also be implemented.
"""
@@ -387,10 +483,11 @@ class SourceLoader(_LoaderBasics):
source_mtime = None
if bytecode_path is not None:
try:
- source_mtime = self.path_mtime(source_path)
+ st = self.path_stats(source_path)
except NotImplementedError:
pass
else:
+ source_mtime = int(st['mtime'])
try:
data = self.get_data(bytecode_path)
except IOError:
@@ -398,12 +495,13 @@ class SourceLoader(_LoaderBasics):
else:
try:
bytes_data = self._bytes_from_bytecode(fullname, data,
- source_mtime)
+ st)
except (ImportError, EOFError):
pass
else:
found = marshal.loads(bytes_data)
if isinstance(found, code_type):
+ imp._fix_co_filename(found, source_path)
return found
else:
msg = "Non-code object in {}"
@@ -412,12 +510,13 @@ class SourceLoader(_LoaderBasics):
code_object = compile(source_bytes, source_path, 'exec',
dont_inherit=True)
if (not sys.dont_write_bytecode and bytecode_path is not None and
- source_mtime is not None):
+ source_mtime is not None):
# If e.g. Jython ever implements imp.cache_from_source to have
# their own cached file format, this block of code will most likely
# throw an exception.
data = bytearray(imp.get_magic())
- data.extend(marshal._w_long(source_mtime))
+ data.extend(_w_long(source_mtime))
+ data.extend(_w_long(len(source_bytes)))
data.extend(marshal.dumps(code_object))
try:
self.set_data(bytecode_path, data)
@@ -462,9 +561,10 @@ class _SourceFileLoader(_FileLoader, SourceLoader):
"""Concrete implementation of SourceLoader using the file system."""
- def path_mtime(self, path):
- """Return the modification time for the path."""
- return int(_os.stat(path).st_mtime)
+ def path_stats(self, path):
+ """Return the metadat for the path."""
+ st = _os.stat(path)
+ return {'mtime': st.st_mtime, 'size': st.st_size}
def set_data(self, path, data):
"""Write bytes data to a file."""
@@ -479,28 +579,19 @@ class _SourceFileLoader(_FileLoader, SourceLoader):
parent = _path_join(parent, part)
try:
_os.mkdir(parent)
- except OSError as exc:
+ except FileExistsError:
# Probably another Python process already created the dir.
- if exc.errno == errno.EEXIST:
- continue
- else:
- raise
- except IOError as exc:
+ continue
+ except PermissionError:
# If can't get proper access, then just forget about writing
# the data.
- if exc.errno == errno.EACCES:
- return
- else:
- raise
- try:
- with _io.FileIO(path, 'wb') as file:
- file.write(data)
- except IOError as exc:
- # Don't worry if you can't write bytecode.
- if exc.errno == errno.EACCES:
return
- else:
- raise
+ try:
+ _write_atomic(path, data)
+ except (PermissionError, FileExistsError):
+ # Don't worry if you can't write bytecode or someone is writing
+ # it at the same time.
+ pass
class _SourcelessFileLoader(_FileLoader, _LoaderBasics):
@@ -534,12 +625,6 @@ class _ExtensionFileLoader:
"""
def __init__(self, name, path):
- """Initialize the loader.
-
- If is_pkg is True then an exception is raised as extension modules
- cannot be the __init__ module for an extension module.
-
- """
self._name = name
self._path = path
@@ -608,6 +693,8 @@ class PathFinder:
the default hook, for which ImportError is raised.
"""
+ if path == '':
+ path = '.'
try:
finder = sys.path_importer_cache[path]
except KeyError:
@@ -659,29 +746,61 @@ class _FileFinder:
for suffix in detail.suffixes)
self.packages = packages
self.modules = modules
- self.path = path
+ # Base (directory) path
+ self.path = path or '.'
+ self._path_mtime = -1
+ self._path_cache = set()
+ self._relaxed_path_cache = set()
+
+ def invalidate_caches(self):
+ """Invalidate the directory mtime."""
+ self._path_mtime = -1
def find_module(self, fullname):
"""Try to find a loader for the specified module."""
tail_module = fullname.rpartition('.')[2]
- base_path = _path_join(self.path, tail_module)
- if _path_isdir(base_path) and _case_ok(self.path, tail_module):
- for suffix, loader in self.packages:
- init_filename = '__init__' + suffix
- full_path = _path_join(base_path, init_filename)
- if (_path_isfile(full_path) and
- _case_ok(base_path, init_filename)):
- return loader(fullname, full_path)
- else:
- msg = "Not importing directory {}: missing __init__"
- _warnings.warn(msg.format(base_path), ImportWarning)
+ try:
+ mtime = _os.stat(self.path).st_mtime
+ except OSError:
+ mtime = -1
+ if mtime != self._path_mtime:
+ self._fill_cache()
+ self._path_mtime = mtime
+ # tail_module keeps the original casing, for __file__ and friends
+ if _relax_case():
+ cache = self._relaxed_path_cache
+ cache_module = tail_module.lower()
+ else:
+ cache = self._path_cache
+ cache_module = tail_module
+ if cache_module in cache:
+ base_path = _path_join(self.path, tail_module)
+ if _path_isdir(base_path):
+ for suffix, loader in self.packages:
+ init_filename = '__init__' + suffix
+ full_path = _path_join(base_path, init_filename)
+ if _path_isfile(full_path):
+ return loader(fullname, full_path)
+ else:
+ msg = "Not importing directory {}: missing __init__"
+ _warnings.warn(msg.format(base_path), ImportWarning)
for suffix, loader in self.modules:
- mod_filename = tail_module + suffix
- full_path = _path_join(self.path, mod_filename)
- if _path_isfile(full_path) and _case_ok(self.path, mod_filename):
- return loader(fullname, full_path)
+ if cache_module + suffix in cache:
+ full_path = _path_join(self.path, tail_module + suffix)
+ if _path_isfile(full_path):
+ return loader(fullname, full_path)
return None
+ def _fill_cache(self):
+ """Fill the cache of potential modules and packages for this directory."""
+ path = self.path
+ contents = _os.listdir(path)
+ # We store two cached versions, to handle runtime changes of the
+ # PYTHONCASEOK environment variable.
+ self._path_cache = set(contents)
+ self._relaxed_path_cache = set(fn.lower() for fn in contents)
+
+
class _SourceFinderDetails:
loader = _SourceFileLoader
@@ -756,40 +875,100 @@ class _ImportLockContext:
imp.release_lock()
+def _resolve_name(name, package, level):
+ """Resolve a relative module name to an absolute one."""
+ bits = package.rsplit('.', level - 1)
+ if len(bits) < level:
+ raise ValueError('attempted relative import beyond top-level package')
+ base = bits[0]
+ return '{0}.{1}'.format(base, name) if name else base
+
+
+def _find_module(name, path):
+ """Find a module's loader."""
+ meta_path = sys.meta_path + _IMPLICIT_META_PATH
+ for finder in meta_path:
+ loader = finder.find_module(name, path)
+ if loader is not None:
+ # The parent import may have already imported this module.
+ if name not in sys.modules:
+ return loader
+ else:
+ return sys.modules[name].__loader__
+ else:
+ return None
+
+
+def _sanity_check(name, package, level):
+ """Verify arguments are "sane"."""
+ if not isinstance(name, str):
+ raise TypeError("module name must be str, not {}".format(type(name)))
+ if level < 0:
+ raise ValueError('level must be >= 0')
+ if package:
+ if not isinstance(package, str):
+ raise TypeError("__package__ not set to a string")
+ elif package not in sys.modules:
+ msg = ("Parent module {0!r} not loaded, cannot perform relative "
+ "import")
+ raise SystemError(msg.format(package))
+ if not name and level == 0:
+ raise ValueError("Empty module name")
+
+
_IMPLICIT_META_PATH = [BuiltinImporter, FrozenImporter, _DefaultPathFinder]
-_ERR_MSG = 'No module named {}'
+_ERR_MSG = 'No module named {!r}'
+
+def _find_and_load(name, import_):
+ """Find and load the module."""
+ path = None
+ parent = name.rpartition('.')[0]
+ if parent:
+ if parent not in sys.modules:
+ import_(parent)
+ # Backwards-compatibility; be nicer to skip the dict lookup.
+ parent_module = sys.modules[parent]
+ try:
+ path = parent_module.__path__
+ except AttributeError:
+ msg = (_ERR_MSG + '; {} is not a package').format(name, parent)
+ raise ImportError(msg)
+ loader = _find_module(name, path)
+ if loader is None:
+ raise ImportError(_ERR_MSG.format(name))
+ elif name not in sys.modules:
+ # The parent import may have already imported this module.
+ loader.load_module(name)
+ # Backwards-compatibility; be nicer to skip the dict lookup.
+ module = sys.modules[name]
+ if parent:
+ # Set the module as an attribute on its parent.
+ parent_module = sys.modules[parent]
+ setattr(parent_module, name.rpartition('.')[2], module)
+ # Set __package__ if the loader did not.
+ if not hasattr(module, '__package__') or module.__package__ is None:
+ try:
+ module.__package__ = module.__name__
+ if not hasattr(module, '__path__'):
+ module.__package__ = module.__package__.rpartition('.')[0]
+ except AttributeError:
+ pass
+ return module
+
def _gcd_import(name, package=None, level=0):
"""Import and return the module based on its name, the package the call is
being made from, and the level adjustment.
This function represents the greatest common denominator of functionality
- between import_module and __import__. This includes settting __package__ if
+ between import_module and __import__. This includes setting __package__ if
the loader did not.
"""
- if package:
- if not hasattr(package, 'rindex'):
- raise ValueError("__package__ not set to a string")
- elif package not in sys.modules:
- msg = ("Parent module {0!r} not loaded, cannot perform relative "
- "import")
- raise SystemError(msg.format(package))
- if not name and level == 0:
- raise ValueError("Empty module name")
+ _sanity_check(name, package, level)
if level > 0:
- dot = len(package)
- for x in range(level, 1, -1):
- try:
- dot = package.rindex('.', 0, dot)
- except ValueError:
- raise ValueError("attempted relative import beyond "
- "top-level package")
- if name:
- name = "{0}.{1}".format(package[:dot], name)
- else:
- name = package[:dot]
+ name = _resolve_name(name, package, level)
with _ImportLockContext():
try:
module = sys.modules[name]
@@ -799,45 +978,46 @@ def _gcd_import(name, package=None, level=0):
raise ImportError(message)
return module
except KeyError:
- pass
- parent = name.rpartition('.')[0]
- path = None
- if parent:
- if parent not in sys.modules:
- _gcd_import(parent)
- # Backwards-compatibility; be nicer to skip the dict lookup.
- parent_module = sys.modules[parent]
- try:
- path = parent_module.__path__
- except AttributeError:
- msg = (_ERR_MSG + '; {} is not a package').format(name, parent)
- raise ImportError(msg)
- meta_path = sys.meta_path + _IMPLICIT_META_PATH
- for finder in meta_path:
- loader = finder.find_module(name, path)
- if loader is not None:
- # The parent import may have already imported this module.
- if name not in sys.modules:
- loader.load_module(name)
- break
- else:
- raise ImportError(_ERR_MSG.format(name))
- # Backwards-compatibility; be nicer to skip the dict lookup.
- module = sys.modules[name]
- if parent:
- # Set the module as an attribute on its parent.
- setattr(parent_module, name.rpartition('.')[2], module)
- # Set __package__ if the loader did not.
- if not hasattr(module, '__package__') or module.__package__ is None:
- # Watch out for what comes out of sys.modules to not be a module,
- # e.g. an int.
+ pass # Don't want to chain the exception
+ return _find_and_load(name, _gcd_import)
+
+
+def _handle_fromlist(module, fromlist, import_):
+ """Figure out what __import__ should return.
+
+ The import_ parameter is a callable which takes the name of module to
+ import. It is required to decouple the function from assuming importlib's
+ import implementation is desired.
+
+ """
+ # The hell that is fromlist ...
+ # If a package was imported, try to import stuff from fromlist.
+ if hasattr(module, '__path__'):
+ if '*' in fromlist and hasattr(module, '__all__'):
+ fromlist = list(fromlist)
+ fromlist.remove('*')
+ fromlist.extend(module.__all__)
+ for x in (y for y in fromlist if not hasattr(module, y)):
try:
- module.__package__ = module.__name__
- if not hasattr(module, '__path__'):
- module.__package__ = module.__package__.rpartition('.')[0]
- except AttributeError:
+ import_('{0}.{1}'.format(module.__name__, x))
+ except ImportError:
pass
- return module
+ return module
+
+
+def _calc___package__(globals):
+ """Calculate what __package__ should be.
+
+ __package__ is not guaranteed to be defined or could be set to None
+ to represent that its proper value is unknown.
+
+ """
+ package = globals.get('__package__')
+ if package is None:
+ package = globals['__name__']
+ if '__path__' not in globals:
+ package = package.rpartition('.')[0]
+ return package
def __import__(name, globals={}, locals={}, fromlist=[], level=0):
@@ -851,20 +1031,11 @@ def __import__(name, globals={}, locals={}, fromlist=[], level=0):
import (e.g. ``from ..pkg import mod`` would have a 'level' of 2).
"""
- if not hasattr(name, 'rpartition'):
- raise TypeError("module name must be str, not {}".format(type(name)))
if level == 0:
module = _gcd_import(name)
else:
- # __package__ is not guaranteed to be defined or could be set to None
- # to represent that it's proper value is unknown
- package = globals.get('__package__')
- if package is None:
- package = globals['__name__']
- if '__path__' not in globals:
- package = package.rpartition('.')[0]
+ package = _calc___package__(globals)
module = _gcd_import(name, package, level)
- # The hell that is fromlist ...
if not fromlist:
# Return up to the first dot in 'name'. This is complicated by the fact
# that 'name' may be relative.
@@ -876,15 +1047,62 @@ def __import__(name, globals={}, locals={}, fromlist=[], level=0):
cut_off = len(name) - len(name.partition('.')[0])
return sys.modules[module.__name__[:-cut_off]]
else:
- # If a package was imported, try to import stuff from fromlist.
- if hasattr(module, '__path__'):
- if '*' in fromlist and hasattr(module, '__all__'):
- fromlist = list(fromlist)
- fromlist.remove('*')
- fromlist.extend(module.__all__)
- for x in (y for y in fromlist if not hasattr(module,y)):
- try:
- _gcd_import('{0}.{1}'.format(module.__name__, x))
- except ImportError:
- pass
- return module
+ return _handle_fromlist(module, fromlist, _gcd_import)
+
+
+def _setup(sys_module, imp_module):
+ """Setup importlib by importing needed built-in modules and injecting them
+ into the global namespace.
+
+ As sys is needed for sys.modules access and imp is needed to load built-in
+ modules, those two modules must be explicitly passed in.
+
+ """
+ global imp, sys
+ imp = imp_module
+ sys = sys_module
+
+ for module in (imp, sys):
+ if not hasattr(module, '__loader__'):
+ module.__loader__ = BuiltinImporter
+
+ self_module = sys.modules[__name__]
+ for builtin_name in ('_io', '_warnings', 'builtins', 'marshal'):
+ if builtin_name not in sys.modules:
+ builtin_module = BuiltinImporter.load_module(builtin_name)
+ else:
+ builtin_module = sys.modules[builtin_name]
+ setattr(self_module, builtin_name, builtin_module)
+
+ for builtin_os, path_sep in [('posix', '/'), ('nt', '\\'), ('os2', '\\')]:
+ if builtin_os in sys.modules:
+ os_module = sys.modules[builtin_os]
+ break
+ else:
+ try:
+ os_module = BuiltinImporter.load_module(builtin_os)
+ # TODO: rip out os2 code after 3.3 is released as per PEP 11
+ if builtin_os == 'os2' and 'EMX GCC' in sys.version:
+ path_sep = '/'
+ break
+ except ImportError:
+ continue
+ else:
+ raise ImportError('importlib requires posix or nt')
+ setattr(self_module, '_os', os_module)
+ setattr(self_module, 'path_sep', path_sep)
+ # Constants
+ setattr(self_module, '_relax_case', _make_relax_case())
+
+
+def _install(sys_module, imp_module):
+ """Install importlib as the implementation of import.
+
+ It is assumed that imp and sys have been imported and injected into the
+ global namespace for the module prior to calling this function.
+
+ """
+ _setup(sys_module, imp_module)
+ orig_import = builtins.__import__
+ builtins.__import__ = __import__
+ builtins.__original_import__ = orig_import
diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py
index fa343f8..22a7c1a 100644
--- a/Lib/importlib/abc.py
+++ b/Lib/importlib/abc.py
@@ -1,15 +1,11 @@
"""Abstract base classes related to import."""
from . import _bootstrap
from . import machinery
-from . import util
import abc
import imp
-import io
import marshal
-import os.path
import sys
import tokenize
-import types
import warnings
@@ -123,7 +119,20 @@ class SourceLoader(_bootstrap.SourceLoader, ResourceLoader, ExecutionLoader):
def path_mtime(self, path):
"""Return the (int) modification time for the path (str)."""
- raise NotImplementedError
+ if self.path_stats.__func__ is SourceLoader.path_stats:
+ raise NotImplementedError
+ return int(self.path_stats(path)['mtime'])
+
+ def path_stats(self, path):
+ """Return a metadata dict for the source pointed to by the path (str).
+ Possible keys:
+ - 'mtime' (mandatory) is the numeric timestamp of last source
+ code modification;
+ - 'size' (optional) is the size in bytes of the source code.
+ """
+ if self.path_mtime.__func__ is SourceLoader.path_mtime:
+ raise NotImplementedError
+ return {'mtime': self.path_mtime(path)}
def set_data(self, path, data):
"""Write the bytes to the path (if possible).
@@ -195,7 +204,7 @@ class PyLoader(SourceLoader):
"use SourceLoader instead. "
"See the importlib documentation on how to be "
"compatible with Python 3.1 onwards.",
- PendingDeprecationWarning)
+ DeprecationWarning)
path = self.source_path(fullname)
if path is None:
raise ImportError
@@ -234,7 +243,7 @@ class PyPycLoader(PyLoader):
"removal in Python 3.4; use SourceLoader instead. "
"If Python 3.1 compatibility is required, see the "
"latest documentation for PyLoader.",
- PendingDeprecationWarning)
+ DeprecationWarning)
source_timestamp = self.source_mtime(fullname)
# Try to use bytecode if it is available.
bytecode_path = self.bytecode_path(fullname)
@@ -243,15 +252,17 @@ class PyPycLoader(PyLoader):
try:
magic = data[:4]
if len(magic) < 4:
- raise ImportError("bad magic number in {}".format(fullname))
+ raise ImportError(
+ "bad magic number in {}".format(fullname))
raw_timestamp = data[4:8]
if len(raw_timestamp) < 4:
raise EOFError("bad timestamp in {}".format(fullname))
- pyc_timestamp = marshal._r_long(raw_timestamp)
+ pyc_timestamp = _bootstrap._r_long(raw_timestamp)
bytecode = data[8:]
# Verify that the magic number is valid.
if imp.get_magic() != magic:
- raise ImportError("bad magic number in {}".format(fullname))
+ raise ImportError(
+ "bad magic number in {}".format(fullname))
# Verify that the bytecode is not stale (only matters when
# there is source to fall back on.
if source_timestamp:
@@ -279,7 +290,7 @@ class PyPycLoader(PyLoader):
# Generate bytecode and write it out.
if not sys.dont_write_bytecode:
data = bytearray(imp.get_magic())
- data.extend(marshal._w_long(source_timestamp))
+ data.extend(_bootstrap._w_long(source_timestamp))
data.extend(marshal.dumps(code_object))
self.write_bytecode(fullname, data)
return code_object
diff --git a/Lib/importlib/test/__main__.py b/Lib/importlib/test/__main__.py
index decc53d..92171b2 100644
--- a/Lib/importlib/test/__main__.py
+++ b/Lib/importlib/test/__main__.py
@@ -4,26 +4,27 @@ Specifying the ``--builtin`` flag will run tests, where applicable, with
builtins.__import__ instead of importlib.__import__.
"""
-import importlib
from importlib.test.import_ import util
import os.path
from test.support import run_unittest
-import sys
import unittest
def test_main():
- if '__pycache__' in __file__:
- parts = __file__.split(os.path.sep)
- start_dir = sep.join(parts[:-2])
- else:
- start_dir = os.path.dirname(__file__)
+ start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
- if '--builtin' in sys.argv:
- util.using___import__ = True
run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
if __name__ == '__main__':
+ import argparse
+
+ parser = argparse.ArgumentParser(description='Execute the importlib test '
+ 'suite')
+ parser.add_argument('-b', '--builtin', action='store_true', default=False,
+ help='use builtins.__import__() instead of importlib')
+ args = parser.parse_args()
+ if args.builtin:
+ util.using___import__ = True
test_main()
diff --git a/Lib/importlib/test/benchmark.py b/Lib/importlib/test/benchmark.py
index b5de6c6..87b1775 100644
--- a/Lib/importlib/test/benchmark.py
+++ b/Lib/importlib/test/benchmark.py
@@ -9,9 +9,11 @@ from .source import util as source_util
import decimal
import imp
import importlib
+import json
import os
import py_compile
import sys
+import tabnanny
import timeit
@@ -59,7 +61,7 @@ def builtin_mod(seconds, repeat):
def source_wo_bytecode(seconds, repeat):
- """Source w/o bytecode: simple"""
+ """Source w/o bytecode: small"""
sys.dont_write_bytecode = True
try:
name = '__importlib_test_benchmark__'
@@ -73,23 +75,30 @@ def source_wo_bytecode(seconds, repeat):
sys.dont_write_bytecode = False
-def decimal_wo_bytecode(seconds, repeat):
- """Source w/o bytecode: decimal"""
- name = 'decimal'
- decimal_bytecode = imp.cache_from_source(decimal.__file__)
- if os.path.exists(decimal_bytecode):
- os.unlink(decimal_bytecode)
- sys.dont_write_bytecode = True
- try:
- for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
- seconds=seconds):
- yield result
- finally:
- sys.dont_write_bytecode = False
+def _wo_bytecode(module):
+ name = module.__name__
+ def benchmark_wo_bytecode(seconds, repeat):
+ """Source w/o bytecode: {}"""
+ bytecode_path = imp.cache_from_source(module.__file__)
+ if os.path.exists(bytecode_path):
+ os.unlink(bytecode_path)
+ sys.dont_write_bytecode = True
+ try:
+ for result in bench(name, lambda: sys.modules.pop(name),
+ repeat=repeat, seconds=seconds):
+ yield result
+ finally:
+ sys.dont_write_bytecode = False
+
+ benchmark_wo_bytecode.__doc__ = benchmark_wo_bytecode.__doc__.format(name)
+ return benchmark_wo_bytecode
+
+tabnanny_wo_bytecode = _wo_bytecode(tabnanny)
+decimal_wo_bytecode = _wo_bytecode(decimal)
def source_writing_bytecode(seconds, repeat):
- """Source writing bytecode: simple"""
+ """Source writing bytecode: small"""
assert not sys.dont_write_bytecode
name = '__importlib_test_benchmark__'
with source_util.create_modules(name) as mapping:
@@ -101,19 +110,27 @@ def source_writing_bytecode(seconds, repeat):
yield result
-def decimal_writing_bytecode(seconds, repeat):
- """Source writing bytecode: decimal"""
- assert not sys.dont_write_bytecode
- name = 'decimal'
- def cleanup():
- sys.modules.pop(name)
- os.unlink(imp.cache_from_source(decimal.__file__))
- for result in bench(name, cleanup, repeat=repeat, seconds=seconds):
- yield result
+def _writing_bytecode(module):
+ name = module.__name__
+ def writing_bytecode_benchmark(seconds, repeat):
+ """Source writing bytecode: {}"""
+ assert not sys.dont_write_bytecode
+ def cleanup():
+ sys.modules.pop(name)
+ os.unlink(imp.cache_from_source(module.__file__))
+ for result in bench(name, cleanup, repeat=repeat, seconds=seconds):
+ yield result
+
+ writing_bytecode_benchmark.__doc__ = (
+ writing_bytecode_benchmark.__doc__.format(name))
+ return writing_bytecode_benchmark
+
+tabnanny_writing_bytecode = _writing_bytecode(tabnanny)
+decimal_writing_bytecode = _writing_bytecode(decimal)
def source_using_bytecode(seconds, repeat):
- """Bytecode w/ source: simple"""
+ """Source w/ bytecode: small"""
name = '__importlib_test_benchmark__'
with source_util.create_modules(name) as mapping:
py_compile.compile(mapping[name])
@@ -123,27 +140,56 @@ def source_using_bytecode(seconds, repeat):
yield result
-def decimal_using_bytecode(seconds, repeat):
- """Bytecode w/ source: decimal"""
- name = 'decimal'
- py_compile.compile(decimal.__file__)
- for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
- seconds=seconds):
- yield result
+def _using_bytecode(module):
+ name = module.__name__
+ def using_bytecode_benchmark(seconds, repeat):
+ """Source w/ bytecode: {}"""
+ py_compile.compile(module.__file__)
+ for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
+ seconds=seconds):
+ yield result
+ using_bytecode_benchmark.__doc__ = (
+ using_bytecode_benchmark.__doc__.format(name))
+ return using_bytecode_benchmark
-def main(import_):
+tabnanny_using_bytecode = _using_bytecode(tabnanny)
+decimal_using_bytecode = _using_bytecode(decimal)
+
+
+def main(import_, options):
+ if options.source_file:
+ with options.source_file:
+ prev_results = json.load(options.source_file)
+ else:
+ prev_results = {}
__builtins__.__import__ = import_
benchmarks = (from_cache, builtin_mod,
- source_using_bytecode, source_wo_bytecode,
source_writing_bytecode,
- decimal_using_bytecode, decimal_writing_bytecode,
- decimal_wo_bytecode,)
+ source_wo_bytecode, source_using_bytecode,
+ tabnanny_writing_bytecode,
+ tabnanny_wo_bytecode, tabnanny_using_bytecode,
+ decimal_writing_bytecode,
+ decimal_wo_bytecode, decimal_using_bytecode,
+ )
+ if options.benchmark:
+ for b in benchmarks:
+ if b.__doc__ == options.benchmark:
+ benchmarks = [b]
+ break
+ else:
+ print('Unknown benchmark: {!r}'.format(options.benchmark,
+ file=sys.stderr))
+ sys.exit(1)
seconds = 1
seconds_plural = 's' if seconds > 1 else ''
repeat = 3
- header = "Measuring imports/second over {} second{}, best out of {}\n"
- print(header.format(seconds, seconds_plural, repeat))
+ header = ('Measuring imports/second over {} second{}, best out of {}\n'
+ 'Entire benchmark run should take about {} seconds\n'
+ 'Using {!r} as __import__\n')
+ print(header.format(seconds, seconds_plural, repeat,
+ len(benchmarks) * seconds * repeat, __import__))
+ new_results = {}
for benchmark in benchmarks:
print(benchmark.__doc__, "[", end=' ')
sys.stdout.flush()
@@ -154,19 +200,40 @@ def main(import_):
sys.stdout.flush()
assert not sys.dont_write_bytecode
print("]", "best is", format(max(results), ',d'))
+ new_results[benchmark.__doc__] = results
+ if prev_results:
+ print('\n\nComparing new vs. old\n')
+ for benchmark in benchmarks:
+ benchmark_name = benchmark.__doc__
+ old_result = max(prev_results[benchmark_name])
+ new_result = max(new_results[benchmark_name])
+ result = '{:,d} vs. {:,d} ({:%})'.format(new_result,
+ old_result,
+ new_result/old_result)
+ print(benchmark_name, ':', result)
+ if options.dest_file:
+ with options.dest_file:
+ json.dump(new_results, options.dest_file, indent=2)
if __name__ == '__main__':
- import optparse
+ import argparse
- parser = optparse.OptionParser()
- parser.add_option('-b', '--builtin', dest='builtin', action='store_true',
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
- options, args = parser.parse_args()
- if args:
- raise RuntimeError("unrecognized args: {}".format(args))
+ parser.add_argument('-r', '--read', dest='source_file',
+ type=argparse.FileType('r'),
+ help='file to read benchmark data from to compare '
+ 'against')
+ parser.add_argument('-w', '--write', dest='dest_file',
+ type=argparse.FileType('w'),
+ help='file to write benchmark data to')
+ parser.add_argument('--benchmark', dest='benchmark',
+ help='specific benchmark to run')
+ options = parser.parse_args()
import_ = __import__
if not options.builtin:
import_ = importlib.__import__
- main(import_)
+ main(import_, options)
diff --git a/Lib/importlib/test/extension/test_case_sensitivity.py b/Lib/importlib/test/extension/test_case_sensitivity.py
index e062fb6..add830d 100644
--- a/Lib/importlib/test/extension/test_case_sensitivity.py
+++ b/Lib/importlib/test/extension/test_case_sensitivity.py
@@ -20,12 +20,18 @@ class ExtensionModuleCaseSensitivityTest(unittest.TestCase):
def test_case_sensitive(self):
with support.EnvironmentVarGuard() as env:
env.unset('PYTHONCASEOK')
+ if b'PYTHONCASEOK' in _bootstrap._os.environ:
+ self.skipTest('os.environ changes not reflected in '
+ '_os.environ')
loader = self.find_module()
self.assertIsNone(loader)
def test_case_insensitivity(self):
with support.EnvironmentVarGuard() as env:
env.set('PYTHONCASEOK', '1')
+ if b'PYTHONCASEOK' not in _bootstrap._os.environ:
+ self.skipTest('os.environ changes not reflected in '
+ '_os.environ')
loader = self.find_module()
self.assertTrue(hasattr(loader, 'load_module'))
diff --git a/Lib/importlib/test/import_/test___package__.py b/Lib/importlib/test/import_/test___package__.py
index 5056ae5..783cde1 100644
--- a/Lib/importlib/test/import_/test___package__.py
+++ b/Lib/importlib/test/import_/test___package__.py
@@ -67,7 +67,7 @@ class Using__package__(unittest.TestCase):
def test_bunk__package__(self):
globals = {'__package__': 42}
- with self.assertRaises(ValueError):
+ with self.assertRaises(TypeError):
import_util.import_('', globals, {}, ['relimport'], 1)
diff --git a/Lib/importlib/test/import_/test_api.py b/Lib/importlib/test/import_/test_api.py
index 9075d42..2fa1f90 100644
--- a/Lib/importlib/test/import_/test_api.py
+++ b/Lib/importlib/test/import_/test_api.py
@@ -12,6 +12,13 @@ class APITest(unittest.TestCase):
with self.assertRaises(TypeError):
util.import_(42)
+ def test_negative_level(self):
+ # Raise ValueError when a negative level is specified.
+ # PEP 328 did away with sys.module None entries and the ambiguity of
+ # absolute/relative imports.
+ with self.assertRaises(ValueError):
+ util.import_('os', globals(), level=-1)
+
def test_main():
from test.support import run_unittest
diff --git a/Lib/importlib/test/import_/test_path.py b/Lib/importlib/test/import_/test_path.py
index 2faa231..5713319 100644
--- a/Lib/importlib/test/import_/test_path.py
+++ b/Lib/importlib/test/import_/test_path.py
@@ -73,6 +73,16 @@ class FinderTests(unittest.TestCase):
loader = machinery.PathFinder.find_module(module)
self.assertTrue(loader is importer)
+ def test_path_importer_cache_empty_string(self):
+ # The empty string should create a finder using the cwd.
+ path = ''
+ module = '<test module>'
+ importer = util.mock_modules(module)
+ hook = import_util.mock_path_hook(os.curdir, importer=importer)
+ with util.import_state(path=[path], path_hooks=[hook]):
+ loader = machinery.PathFinder.find_module(module)
+ self.assertIs(loader, importer)
+ self.assertIn(os.curdir, sys.path_importer_cache)
class DefaultPathFinderTests(unittest.TestCase):
diff --git a/Lib/importlib/test/regrtest.py b/Lib/importlib/test/regrtest.py
index b103ae7d..dc0eb97 100644
--- a/Lib/importlib/test/regrtest.py
+++ b/Lib/importlib/test/regrtest.py
@@ -5,13 +5,6 @@ invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
-XXX FAILING
- * test_import
- - test_incorrect_code_name
- file name differing between __file__ and co_filename (r68360 on trunk)
- - test_import_by_filename
- exception for trying to import by file name does not match
-
"""
import importlib
import sys
diff --git a/Lib/importlib/test/source/test_abc_loader.py b/Lib/importlib/test/source/test_abc_loader.py
index 3245907..01acda4 100644
--- a/Lib/importlib/test/source/test_abc_loader.py
+++ b/Lib/importlib/test/source/test_abc_loader.py
@@ -40,8 +40,10 @@ class SourceLoaderMock(SourceOnlyLoaderMock):
def __init__(self, path, magic=imp.get_magic()):
super().__init__(path)
self.bytecode_path = imp.cache_from_source(self.path)
+ self.source_size = len(self.source)
data = bytearray(magic)
- data.extend(marshal._w_long(self.source_mtime))
+ data.extend(importlib._w_long(self.source_mtime))
+ data.extend(importlib._w_long(self.source_size))
code_object = compile(self.source, self.path, 'exec',
dont_inherit=True)
data.extend(marshal.dumps(code_object))
@@ -56,9 +58,9 @@ class SourceLoaderMock(SourceOnlyLoaderMock):
else:
raise IOError
- def path_mtime(self, path):
+ def path_stats(self, path):
assert path == self.path
- return self.source_mtime
+ return {'mtime': self.source_mtime, 'size': self.source_size}
def set_data(self, path, data):
self.written[path] = bytes(data)
@@ -102,7 +104,7 @@ class PyLoaderMock(abc.PyLoader):
warnings.simplefilter("always")
path = super().get_filename(name)
assert len(w) == 1
- assert issubclass(w[0].category, PendingDeprecationWarning)
+ assert issubclass(w[0].category, DeprecationWarning)
return path
@@ -198,7 +200,7 @@ class PyPycLoaderMock(abc.PyPycLoader, PyLoaderMock):
warnings.simplefilter("always")
code_object = super().get_code(name)
assert len(w) == 1
- assert issubclass(w[0].category, PendingDeprecationWarning)
+ assert issubclass(w[0].category, DeprecationWarning)
return code_object
class PyLoaderTests(testing_abc.LoaderTests):
@@ -656,7 +658,8 @@ class SourceLoaderBytecodeTests(SourceLoaderTestHarness):
if bytecode_written:
self.assertIn(self.cached, self.loader.written)
data = bytearray(imp.get_magic())
- data.extend(marshal._w_long(self.loader.source_mtime))
+ data.extend(importlib._w_long(self.loader.source_mtime))
+ data.extend(importlib._w_long(self.loader.source_size))
data.extend(marshal.dumps(code_object))
self.assertEqual(self.loader.written[self.cached], bytes(data))
@@ -847,7 +850,7 @@ class AbstractMethodImplTests(unittest.TestCase):
# Required abstractmethods.
self.raises_NotImplementedError(ins, 'get_filename', 'get_data')
# Optional abstractmethods.
- self.raises_NotImplementedError(ins,'path_mtime', 'set_data')
+ self.raises_NotImplementedError(ins,'path_stats', 'set_data')
def test_PyLoader(self):
self.raises_NotImplementedError(self.PyLoader(), 'source_path',
diff --git a/Lib/importlib/test/source/test_case_sensitivity.py b/Lib/importlib/test/source/test_case_sensitivity.py
index 73777de..569f516 100644
--- a/Lib/importlib/test/source/test_case_sensitivity.py
+++ b/Lib/importlib/test/source/test_case_sensitivity.py
@@ -37,6 +37,9 @@ class CaseSensitivityTest(unittest.TestCase):
def test_sensitive(self):
with test_support.EnvironmentVarGuard() as env:
env.unset('PYTHONCASEOK')
+ if b'PYTHONCASEOK' in _bootstrap._os.environ:
+ self.skipTest('os.environ changes not reflected in '
+ '_os.environ')
sensitive, insensitive = self.sensitivity_test()
self.assertTrue(hasattr(sensitive, 'load_module'))
self.assertIn(self.name, sensitive.get_filename(self.name))
@@ -45,6 +48,9 @@ class CaseSensitivityTest(unittest.TestCase):
def test_insensitive(self):
with test_support.EnvironmentVarGuard() as env:
env.set('PYTHONCASEOK', '1')
+ if b'PYTHONCASEOK' not in _bootstrap._os.environ:
+ self.skipTest('os.environ changes not reflected in '
+ '_os.environ')
sensitive, insensitive = self.sensitivity_test()
self.assertTrue(hasattr(sensitive, 'load_module'))
self.assertIn(self.name, sensitive.get_filename(self.name))
diff --git a/Lib/importlib/test/source/test_file_loader.py b/Lib/importlib/test/source/test_file_loader.py
index c7a7d8f..21e718f 100644
--- a/Lib/importlib/test/source/test_file_loader.py
+++ b/Lib/importlib/test/source/test_file_loader.py
@@ -71,11 +71,6 @@ class SimpleTest(unittest.TestCase):
module_dict_id = id(module.__dict__)
with open(mapping['_temp'], 'w') as file:
file.write("testing_var = 42\n")
- # For filesystems where the mtime is only to a second granularity,
- # everything that has happened above can be too fast;
- # force an mtime on the source that is guaranteed to be different
- # than the original mtime.
- loader.path_mtime = self.fake_mtime(loader.path_mtime)
module = loader.load_module('_temp')
self.assertTrue('testing_var' in module.__dict__,
"'testing_var' not in "
@@ -215,10 +210,17 @@ class BadBytecodeTest(unittest.TestCase):
del_source=del_source)
test('_temp', mapping, bc_path)
+ def _test_partial_size(self, test, *, del_source=False):
+ with source_util.create_modules('_temp') as mapping:
+ bc_path = self.manipulate_bytecode('_temp', mapping,
+ lambda bc: bc[:11],
+ del_source=del_source)
+ test('_temp', mapping, bc_path)
+
def _test_no_marshal(self, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
- lambda bc: bc[:8],
+ lambda bc: bc[:12],
del_source=del_source)
file_path = mapping['_temp'] if not del_source else bc_path
with self.assertRaises(EOFError):
@@ -227,7 +229,7 @@ class BadBytecodeTest(unittest.TestCase):
def _test_non_code_marshal(self, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bytecode_path = self.manipulate_bytecode('_temp', mapping,
- lambda bc: bc[:8] + marshal.dumps(b'abcd'),
+ lambda bc: bc[:12] + marshal.dumps(b'abcd'),
del_source=del_source)
file_path = mapping['_temp'] if not del_source else bytecode_path
with self.assertRaises(ImportError):
@@ -236,7 +238,7 @@ class BadBytecodeTest(unittest.TestCase):
def _test_bad_marshal(self, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bytecode_path = self.manipulate_bytecode('_temp', mapping,
- lambda bc: bc[:8] + b'<test>',
+ lambda bc: bc[:12] + b'<test>',
del_source=del_source)
file_path = mapping['_temp'] if not del_source else bytecode_path
with self.assertRaises(EOFError):
@@ -260,7 +262,7 @@ class SourceLoaderBadBytecodeTest(BadBytecodeTest):
def test(name, mapping, bytecode_path):
self.import_(mapping[name], name)
with open(bytecode_path, 'rb') as file:
- self.assertGreater(len(file.read()), 8)
+ self.assertGreater(len(file.read()), 12)
self._test_empty_file(test)
@@ -268,7 +270,7 @@ class SourceLoaderBadBytecodeTest(BadBytecodeTest):
def test(name, mapping, bytecode_path):
self.import_(mapping[name], name)
with open(bytecode_path, 'rb') as file:
- self.assertGreater(len(file.read()), 8)
+ self.assertGreater(len(file.read()), 12)
self._test_partial_magic(test)
@@ -279,7 +281,7 @@ class SourceLoaderBadBytecodeTest(BadBytecodeTest):
def test(name, mapping, bytecode_path):
self.import_(mapping[name], name)
with open(bytecode_path, 'rb') as file:
- self.assertGreater(len(file.read()), 8)
+ self.assertGreater(len(file.read()), 12)
self._test_magic_only(test)
@@ -301,11 +303,22 @@ class SourceLoaderBadBytecodeTest(BadBytecodeTest):
def test(name, mapping, bc_path):
self.import_(mapping[name], name)
with open(bc_path, 'rb') as file:
- self.assertGreater(len(file.read()), 8)
+ self.assertGreater(len(file.read()), 12)
self._test_partial_timestamp(test)
@source_util.writes_bytecode_files
+ def test_partial_size(self):
+ # When the size is partial, regenerate the .pyc, else
+ # raise EOFError.
+ def test(name, mapping, bc_path):
+ self.import_(mapping[name], name)
+ with open(bc_path, 'rb') as file:
+ self.assertGreater(len(file.read()), 12)
+
+ self._test_partial_size(test)
+
+ @source_util.writes_bytecode_files
def test_no_marshal(self):
# When there is only the magic number and timestamp, raise EOFError.
self._test_no_marshal()
@@ -400,6 +413,13 @@ class SourcelessLoaderBadBytecodeTest(BadBytecodeTest):
self._test_partial_timestamp(test, del_source=True)
+ def test_partial_size(self):
+ def test(name, mapping, bytecode_path):
+ with self.assertRaises(EOFError):
+ self.import_(bytecode_path, name)
+
+ self._test_partial_size(test, del_source=True)
+
def test_no_marshal(self):
self._test_no_marshal(del_source=True)
diff --git a/Lib/importlib/test/source/test_finder.py b/Lib/importlib/test/source/test_finder.py
index 7b9088d..68e9ae7 100644
--- a/Lib/importlib/test/source/test_finder.py
+++ b/Lib/importlib/test/source/test_finder.py
@@ -143,6 +143,13 @@ class FinderTests(abc.FinderTests):
finally:
os.unlink('mod.py')
+ def test_invalidate_caches(self):
+ # invalidate_caches() should reset the mtime.
+ finder = _bootstrap._FileFinder('', _bootstrap._SourceFinderDetails())
+ finder._path_mtime = 42
+ finder.invalidate_caches()
+ self.assertEqual(finder._path_mtime, -1)
+
def test_main():
from test.support import run_unittest
diff --git a/Lib/importlib/test/test_api.py b/Lib/importlib/test/test_api.py
index a151626..cc147c2 100644
--- a/Lib/importlib/test/test_api.py
+++ b/Lib/importlib/test/test_api.py
@@ -84,6 +84,34 @@ class ImportModuleTests(unittest.TestCase):
importlib.import_module('a.b')
self.assertEqual(b_load_count, 1)
+
+class InvalidateCacheTests(unittest.TestCase):
+
+ def test_method_called(self):
+ # If defined the method should be called.
+ class InvalidatingNullFinder:
+ def __init__(self, *ignored):
+ self.called = False
+ def find_module(self, *args):
+ return None
+ def invalidate_caches(self):
+ self.called = True
+
+ key = 'gobledeegook'
+ ins = InvalidatingNullFinder()
+ sys.path_importer_cache[key] = ins
+ self.addCleanup(lambda: sys.path_importer_cache.__delitem__(key))
+ importlib.invalidate_caches()
+ self.assertTrue(ins.called)
+
+ def test_method_lacking(self):
+ # There should be no issues if the method is not defined.
+ key = 'gobbledeegook'
+ sys.path_importer_cache[key] = imp.NullImporter('abc')
+ self.addCleanup(lambda: sys.path_importer_cache.__delitem__(key))
+ importlib.invalidate_caches() # Shouldn't trigger an exception.
+
+
def test_main():
from test.support import run_unittest
run_unittest(ImportModuleTests)
diff --git a/Lib/importlib/test/test_util.py b/Lib/importlib/test/test_util.py
index 602447f..c7cdad1 100644
--- a/Lib/importlib/test/test_util.py
+++ b/Lib/importlib/test/test_util.py
@@ -59,6 +59,11 @@ class ModuleForLoaderTests(unittest.TestCase):
self.raise_exception(name)
self.assertIs(module, sys.modules[name])
+ def test_decorator_attrs(self):
+ def fxn(self, module): pass
+ wrapped = util.module_for_loader(fxn)
+ self.assertEqual(wrapped.__name__, fxn.__name__)
+ self.assertEqual(wrapped.__qualname__, fxn.__qualname__)
class SetPackageTests(unittest.TestCase):
@@ -108,6 +113,11 @@ class SetPackageTests(unittest.TestCase):
module.__package__ = value
self.verify(module, value)
+ def test_decorator_attrs(self):
+ def fxn(module): pass
+ wrapped = util.set_package(fxn)
+ self.assertEqual(wrapped.__name__, fxn.__name__)
+ self.assertEqual(wrapped.__qualname__, fxn.__qualname__)
def test_main():
from test import support
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 2031755..fc9f612 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -33,7 +33,6 @@ import sys
import os
import types
import itertools
-import string
import re
import imp
import tokenize
@@ -931,6 +930,43 @@ def formatargvalues(args, varargs, varkw, locals,
specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
return '(' + ', '.join(specs) + ')'
+def _missing_arguments(f_name, argnames, pos, values):
+ names = [repr(name) for name in argnames if name not in values]
+ missing = len(names)
+ if missing == 1:
+ s = names[0]
+ elif missing == 2:
+ s = "{} and {}".format(*names)
+ else:
+ tail = ", {} and {}".format(names[-2:])
+ del names[-2:]
+ s = ", ".join(names) + tail
+ raise TypeError("%s() missing %i required %s argument%s: %s" %
+ (f_name, missing,
+ "positional" if pos else "keyword-only",
+ "" if missing == 1 else "s", s))
+
+def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
+ atleast = len(args) - defcount
+ kwonly_given = len([arg for arg in kwonly if arg in values])
+ if varargs:
+ plural = atleast != 1
+ sig = "at least %d" % (atleast,)
+ elif defcount:
+ plural = True
+ sig = "from %d to %d" % (atleast, len(args))
+ else:
+ plural = len(args) != 1
+ sig = str(len(args))
+ kwonly_sig = ""
+ if kwonly_given:
+ msg = " positional argument%s (and %d keyword-only argument%s)"
+ kwonly_sig = (msg % ("s" if given != 1 else "", kwonly_given,
+ "s" if kwonly_given != 1 else ""))
+ raise TypeError("%s() takes %s positional argument%s but %d%s %s given" %
+ (f_name, sig, "s" if plural else "", given, kwonly_sig,
+ "was" if given == 1 and not kwonly_given else "were"))
+
def getcallargs(func, *positional, **named):
"""Get the mapping of arguments to values.
@@ -942,64 +978,53 @@ def getcallargs(func, *positional, **named):
f_name = func.__name__
arg2value = {}
+
if ismethod(func) and func.__self__ is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.__self__,) + positional
num_pos = len(positional)
- num_total = num_pos + len(named)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
- for arg, value in zip(args, positional):
- arg2value[arg] = value
+
+ n = min(num_pos, num_args)
+ for i in range(n):
+ arg2value[args[i]] = positional[i]
if varargs:
- if num_pos > num_args:
- arg2value[varargs] = positional[-(num_pos-num_args):]
- else:
- arg2value[varargs] = ()
- elif 0 < num_args < num_pos:
- raise TypeError('%s() takes %s %d positional %s (%d given)' % (
- f_name, 'at most' if defaults else 'exactly', num_args,
- 'arguments' if num_args > 1 else 'argument', num_total))
- elif num_args == 0 and num_total:
- if varkw or kwonlyargs:
- if num_pos:
- # XXX: We should use num_pos, but Python also uses num_total:
- raise TypeError('%s() takes exactly 0 positional arguments '
- '(%d given)' % (f_name, num_total))
- else:
- raise TypeError('%s() takes no arguments (%d given)' %
- (f_name, num_total))
-
- for arg in itertools.chain(args, kwonlyargs):
- if arg in named:
- if arg in arg2value:
- raise TypeError("%s() got multiple values for keyword "
- "argument '%s'" % (f_name, arg))
- else:
- arg2value[arg] = named.pop(arg)
- for kwonlyarg in kwonlyargs:
- if kwonlyarg not in arg2value:
- try:
- arg2value[kwonlyarg] = kwonlydefaults[kwonlyarg]
- except KeyError:
- raise TypeError("%s() needs keyword-only argument %s" %
- (f_name, kwonlyarg))
- if defaults: # fill in any missing values with the defaults
- for arg, value in zip(args[-num_defaults:], defaults):
- if arg not in arg2value:
- arg2value[arg] = value
+ arg2value[varargs] = tuple(positional[n:])
+ possible_kwargs = set(args + kwonlyargs)
if varkw:
- arg2value[varkw] = named
- elif named:
- unexpected = next(iter(named))
- raise TypeError("%s() got an unexpected keyword argument '%s'" %
- (f_name, unexpected))
- unassigned = num_args - len([arg for arg in args if arg in arg2value])
- if unassigned:
- num_required = num_args - num_defaults
- raise TypeError('%s() takes %s %d %s (%d given)' % (
- f_name, 'at least' if defaults else 'exactly', num_required,
- 'arguments' if num_required > 1 else 'argument', num_total))
+ arg2value[varkw] = {}
+ for kw, value in named.items():
+ if kw not in possible_kwargs:
+ if not varkw:
+ raise TypeError("%s() got an unexpected keyword argument %r" %
+ (f_name, kw))
+ arg2value[varkw][kw] = value
+ continue
+ if kw in arg2value:
+ raise TypeError("%s() got multiple values for argument %r" %
+ (f_name, kw))
+ arg2value[kw] = value
+ if num_pos > num_args and not varargs:
+ _too_many(f_name, args, kwonlyargs, varargs, num_defaults,
+ num_pos, arg2value)
+ if num_pos < num_args:
+ req = args[:num_args - num_defaults]
+ for arg in req:
+ if arg not in arg2value:
+ _missing_arguments(f_name, req, True, arg2value)
+ for i, arg in enumerate(args[num_args - num_defaults:]):
+ if arg not in arg2value:
+ arg2value[arg] = defaults[i]
+ missing = 0
+ for kwarg in kwonlyargs:
+ if kwarg not in arg2value:
+ if kwarg in kwonlydefaults:
+ arg2value[kwarg] = kwonlydefaults[kwarg]
+ else:
+ missing += 1
+ if missing:
+ _missing_arguments(f_name, kwonlyargs, False, arg2value)
return arg2value
# -------------------------------------------------- stack frame extraction
diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py
index 3174e31..e7c0539 100644
--- a/Lib/json/decoder.py
+++ b/Lib/json/decoder.py
@@ -121,8 +121,7 @@ def py_scanstring(s, end, strict=True,
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
- # Check for surrogate pair on UCS-4 systems
- if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
+ if 0xd800 <= uni <= 0xdbff:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
diff --git a/Lib/lib2to3/__main__.py b/Lib/lib2to3/__main__.py
new file mode 100644
index 0000000..80688ba
--- /dev/null
+++ b/Lib/lib2to3/__main__.py
@@ -0,0 +1,4 @@
+import sys
+from .main import main
+
+sys.exit(main("lib2to3.fixes"))
diff --git a/Lib/lib2to3/fixer_base.py b/Lib/lib2to3/fixer_base.py
index afc0467..b176056 100644
--- a/Lib/lib2to3/fixer_base.py
+++ b/Lib/lib2to3/fixer_base.py
@@ -27,7 +27,6 @@ class BaseFix(object):
pattern_tree = None # Tree representation of the pattern
options = None # Options object passed to initializer
filename = None # The filename (set by set_filename)
- logger = None # A logger (set by set_filename)
numbers = itertools.count(1) # For new_name()
used_names = set() # A set of all used NAMEs
order = "post" # Does the fixer prefer pre- or post-order traversal
@@ -70,12 +69,11 @@ class BaseFix(object):
with_tree=True)
def set_filename(self, filename):
- """Set the filename, and a logger derived from it.
+ """Set the filename.
The main refactoring tool should call this.
"""
self.filename = filename
- self.logger = logging.getLogger(filename)
def match(self, node):
"""Returns match for a given parse tree node.
diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py
index fa4942f3..17cbf0a 100644
--- a/Lib/lib2to3/pytree.py
+++ b/Lib/lib2to3/pytree.py
@@ -109,26 +109,6 @@ class Base(object):
"""
raise NotImplementedError
- def set_prefix(self, prefix):
- """
- Set the prefix for the node (see Leaf class).
-
- DEPRECATED; use the prefix property directly.
- """
- warnings.warn("set_prefix() is deprecated; use the prefix property",
- DeprecationWarning, stacklevel=2)
- self.prefix = prefix
-
- def get_prefix(self):
- """
- Return the prefix for the node (see Leaf class).
-
- DEPRECATED; use the prefix property directly.
- """
- warnings.warn("get_prefix() is deprecated; use the prefix property",
- DeprecationWarning, stacklevel=2)
- return self.prefix
-
def replace(self, new):
"""Replace this node with a new one in the parent."""
assert self.parent is not None, str(self)
diff --git a/Lib/lib2to3/refactor.py b/Lib/lib2to3/refactor.py
index 7cd034a..1e85810 100644
--- a/Lib/lib2to3/refactor.py
+++ b/Lib/lib2to3/refactor.py
@@ -566,7 +566,7 @@ class RefactoringTool(object):
block_lineno = None
indent = None
lineno = 0
- for line in input.splitlines(True):
+ for line in input.splitlines(keepends=True):
lineno += 1
if line.lstrip().startswith(self.PS1):
if block is not None:
@@ -610,7 +610,7 @@ class RefactoringTool(object):
filename, lineno, err.__class__.__name__, err)
return block
if self.refactor_tree(tree, filename):
- new = str(tree).splitlines(True)
+ new = str(tree).splitlines(keepends=True)
# Undo the adjustment of the line numbers in wrap_toks() below.
clipped, new = new[:lineno-1], new[lineno-1:]
assert clipped == ["\n"] * (lineno-1), clipped
diff --git a/Lib/lib2to3/tests/test_pytree.py b/Lib/lib2to3/tests/test_pytree.py
index ac7d900..a2ab1f3 100644
--- a/Lib/lib2to3/tests/test_pytree.py
+++ b/Lib/lib2to3/tests/test_pytree.py
@@ -31,23 +31,6 @@ class TestNodes(support.TestCase):
"""Unit tests for nodes (Base, Leaf, Node)."""
- if sys.version_info >= (2,6):
- # warnings.catch_warnings is new in 2.6.
- def test_deprecated_prefix_methods(self):
- l = pytree.Leaf(100, "foo")
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always", DeprecationWarning)
- self.assertEqual(l.get_prefix(), "")
- l.set_prefix("hi")
- self.assertEqual(l.prefix, "hi")
- self.assertEqual(len(w), 2)
- for warning in w:
- self.assertTrue(warning.category is DeprecationWarning)
- self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
- "use the prefix property")
- self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
- "use the prefix property")
-
def test_instantiate_base(self):
if __debug__:
# Test that instantiating Base() raises an AssertionError
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index 4191b22..e79018f 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -36,15 +36,9 @@ __all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
'getLogRecordFactory', 'setLogRecordFactory', 'lastResort']
try:
- import codecs
-except ImportError:
- codecs = None
-
-try:
- import _thread as thread
import threading
-except ImportError:
- thread = None
+except ImportError: #pragma: no cover
+ threading = None
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "production"
@@ -65,16 +59,16 @@ else:
_srcfile = __file__
_srcfile = os.path.normcase(_srcfile)
-# next bit filched from 1.5.2's inspect.py
-def currentframe():
- """Return the frame object for the caller's stack frame."""
- try:
- raise Exception
- except:
- return sys.exc_info()[2].tb_frame.f_back
-if hasattr(sys, '_getframe'): currentframe = lambda: sys._getframe(3)
-# done filching
+if hasattr(sys, '_getframe'):
+ currentframe = lambda: sys._getframe(3)
+else: #pragma: no cover
+ def currentframe():
+ """Return the frame object for the caller's stack frame."""
+ try:
+ raise Exception
+ except:
+ return sys.exc_info()[2].tb_frame.f_back
# _srcfile is only used in conjunction with sys._getframe().
# To provide compatibility with older versions of Python, set _srcfile
@@ -92,22 +86,22 @@ _startTime = time.time()
#raiseExceptions is used to see if exceptions during handling should be
#propagated
#
-raiseExceptions = 1
+raiseExceptions = True
#
# If you don't want threading information in the log, set this to zero
#
-logThreads = 1
+logThreads = True
#
# If you don't want multiprocessing information in the log, set this to zero
#
-logMultiprocessing = 1
+logMultiprocessing = True
#
# If you don't want process information in the log, set this to zero
#
-logProcesses = 1
+logProcesses = True
#---------------------------------------------------------------------------
# Level related stuff
@@ -197,9 +191,9 @@ def _checkLevel(level):
#the lock would already have been acquired - so we need an RLock.
#The same argument applies to Loggers and Manager.loggerDict.
#
-if thread:
+if threading:
_lock = threading.RLock()
-else:
+else: #pragma: no cover
_lock = None
@@ -252,7 +246,7 @@ class LogRecord(object):
# during formatting, we test to see if the arg is present using
# 'if self.args:'. If the event being logged is e.g. 'Value is %d'
# and if the passed arg fails 'if self.args:' then no formatting
- # is done. For example, logger.warn('Value is %d', 0) would log
+ # is done. For example, logger.warning('Value is %d', 0) would log
# 'Value is %d' instead of 'Value is 0'.
# For the use case of passing a dictionary, this should not be a
# problem.
@@ -276,13 +270,13 @@ class LogRecord(object):
self.created = ct
self.msecs = (ct - int(ct)) * 1000
self.relativeCreated = (self.created - _startTime) * 1000
- if logThreads and thread:
- self.thread = thread.get_ident()
+ if logThreads and threading:
+ self.thread = threading.get_ident()
self.threadName = threading.current_thread().name
- else:
+ else: # pragma: no cover
self.thread = None
self.threadName = None
- if not logMultiprocessing:
+ if not logMultiprocessing: # pragma: no cover
self.processName = None
else:
self.processName = 'MainProcess'
@@ -294,7 +288,7 @@ class LogRecord(object):
# for an example
try:
self.processName = mp.current_process().name
- except Exception:
+ except Exception: #pragma: no cover
pass
if logProcesses and hasattr(os, 'getpid'):
self.process = os.getpid()
@@ -466,6 +460,9 @@ class Formatter(object):
self._fmt = self._style._fmt
self.datefmt = datefmt
+ default_time_format = '%Y-%m-%d %H:%M:%S'
+ default_msec_format = '%s,%03d'
+
def formatTime(self, record, datefmt=None):
"""
Return the creation time of the specified LogRecord as formatted text.
@@ -488,8 +485,8 @@ class Formatter(object):
if datefmt:
s = time.strftime(datefmt, ct)
else:
- t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
- s = "%s,%03d" % (t, record.msecs) # the use of % here is internal
+ t = time.strftime(self.default_time_format, ct)
+ s = self.default_msec_format % (t, record.msecs)
return s
def formatException(self, ei):
@@ -642,11 +639,11 @@ class Filter(object):
yes. If deemed appropriate, the record may be modified in-place.
"""
if self.nlen == 0:
- return 1
+ return True
elif self.name == record.name:
- return 1
+ return True
elif record.name.find(self.name, 0, self.nlen) != 0:
- return 0
+ return False
return (record.name[self.nlen] == ".")
class Filterer(object):
@@ -686,14 +683,14 @@ class Filterer(object):
Allow filters to be just callables.
"""
- rv = 1
+ rv = True
for f in self.filters:
if hasattr(f, 'filter'):
result = f.filter(record)
else:
result = f(record) # assume callable - will raise if not
if not result:
- rv = 0
+ rv = False
break
return rv
@@ -771,9 +768,9 @@ class Handler(Filterer):
"""
Acquire a thread lock for serializing access to the underlying I/O.
"""
- if thread:
+ if threading:
self.lock = threading.RLock()
- else:
+ else: #pragma: no cover
self.lock = None
def acquire(self):
@@ -792,7 +789,7 @@ class Handler(Filterer):
def setLevel(self, level):
"""
- Set the logging level of this handler.
+ Set the logging level of this handler. level must be an int or a str.
"""
self.level = _checkLevel(level)
@@ -888,7 +885,7 @@ class Handler(Filterer):
None, sys.stderr)
sys.stderr.write('Logged from file %s, line %s\n' % (
record.filename, record.lineno))
- except IOError:
+ except IOError: #pragma: no cover
pass # see issue 5971
finally:
del ei
@@ -941,7 +938,7 @@ class StreamHandler(Handler):
stream.write(msg)
stream.write(self.terminator)
self.flush()
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
@@ -950,14 +947,12 @@ class FileHandler(StreamHandler):
"""
A handler class which writes formatted logging records to disk files.
"""
- def __init__(self, filename, mode='a', encoding=None, delay=0):
+ def __init__(self, filename, mode='a', encoding=None, delay=False):
"""
Open the specified file and use it as the stream for logging.
"""
#keep the absolute path, otherwise derived classes which use this
#may come a cropper when the current directory changes
- if codecs is None:
- encoding = None
self.baseFilename = os.path.abspath(filename)
self.mode = mode
self.encoding = encoding
@@ -989,11 +984,7 @@ class FileHandler(StreamHandler):
Open the current base file with the (original) mode and encoding.
Return the resulting stream.
"""
- if self.encoding is None:
- stream = open(self.baseFilename, self.mode)
- else:
- stream = codecs.open(self.baseFilename, self.mode, self.encoding)
- return stream
+ return open(self.baseFilename, self.mode, encoding=self.encoding)
def emit(self, record):
"""
@@ -1205,13 +1196,13 @@ class Logger(Filterer):
self.name = name
self.level = _checkLevel(level)
self.parent = None
- self.propagate = 1
+ self.propagate = True
self.handlers = []
- self.disabled = 0
+ self.disabled = False
def setLevel(self, level):
"""
- Set the logging level of this logger.
+ Set the logging level of this logger. level must be an int or a str.
"""
self.level = _checkLevel(level)
@@ -1251,7 +1242,10 @@ class Logger(Filterer):
if self.isEnabledFor(WARNING):
self._log(WARNING, msg, args, **kwargs)
- warn = warning
+ def warn(self, msg, *args, **kwargs):
+ warnings.warn("The 'warn' method is deprecated, "
+ "use 'warning' instead", DeprecationWarning, 2)
+ self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
"""
@@ -1360,9 +1354,9 @@ class Logger(Filterer):
#IronPython can use logging.
try:
fn, lno, func, sinfo = self.findCaller(stack_info)
- except ValueError:
+ except ValueError: # pragma: no cover
fn, lno, func = "(unknown file)", 0, "(unknown function)"
- else:
+ else: # pragma: no cover
fn, lno, func = "(unknown file)", 0, "(unknown function)"
if exc_info:
if not isinstance(exc_info, tuple):
@@ -1474,7 +1468,7 @@ class Logger(Filterer):
Is this logger enabled for level 'level'?
"""
if self.manager.disable >= level:
- return 0
+ return False
return level >= self.getEffectiveLevel()
def getChild(self, suffix):
@@ -1564,7 +1558,10 @@ class LoggerAdapter(object):
"""
self.log(WARNING, msg, *args, **kwargs)
- warn = warning
+ def warn(self, msg, *args, **kwargs):
+ warnings.warn("The 'warn' method is deprecated, "
+ "use 'warning' instead", DeprecationWarning, 2)
+ self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
"""
@@ -1576,7 +1573,7 @@ class LoggerAdapter(object):
"""
Delegate an exception call to the underlying logger.
"""
- kwargs["exc_info"] = 1
+ kwargs["exc_info"] = True
self.log(ERROR, msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
@@ -1659,6 +1656,10 @@ def basicConfig(**kwargs):
stream Use the specified stream to initialize the StreamHandler. Note
that this argument is incompatible with 'filename' - if both
are present, 'stream' is ignored.
+ handlers If specified, this should be an iterable of already created
+ handlers, which will be added to the root handler. Any handler
+ in the list which does not have a formatter assigned will be
+ assigned the formatter created in this function.
Note that you could specify a stream created using open(filename, mode)
rather than passing the filename and mode in. However, it should be
@@ -1666,27 +1667,47 @@ def basicConfig(**kwargs):
using sys.stdout or sys.stderr), whereas FileHandler closes its stream
when the handler is closed.
- .. versionchanged: 3.2
+ .. versionchanged:: 3.2
Added the ``style`` parameter.
+
+ .. versionchanged:: 3.3
+ Added the ``handlers`` parameter. A ``ValueError`` is now thrown for
+ incompatible arguments (e.g. ``handlers`` specified together with
+ ``filename``/``filemode``, or ``filename``/``filemode`` specified
+ together with ``stream``, or ``handlers`` specified together with
+ ``stream``.
"""
# Add thread safety in case someone mistakenly calls
# basicConfig() from multiple threads
_acquireLock()
try:
if len(root.handlers) == 0:
- filename = kwargs.get("filename")
- if filename:
- mode = kwargs.get("filemode", 'a')
- hdlr = FileHandler(filename, mode)
+ handlers = kwargs.get("handlers")
+ if handlers is None:
+ if "stream" in kwargs and "filename" in kwargs:
+ raise ValueError("'stream' and 'filename' should not be "
+ "specified together")
else:
- stream = kwargs.get("stream")
- hdlr = StreamHandler(stream)
+ if "stream" in kwargs or "filename" in kwargs:
+ raise ValueError("'stream' or 'filename' should not be "
+ "specified together with 'handlers'")
+ if handlers is None:
+ filename = kwargs.get("filename")
+ if filename:
+ mode = kwargs.get("filemode", 'a')
+ h = FileHandler(filename, mode)
+ else:
+ stream = kwargs.get("stream")
+ h = StreamHandler(stream)
+ handlers = [h]
fs = kwargs.get("format", BASIC_FORMAT)
dfs = kwargs.get("datefmt", None)
style = kwargs.get("style", '%')
fmt = Formatter(fs, dfs, style)
- hdlr.setFormatter(fmt)
- root.addHandler(hdlr)
+ for h in handlers:
+ if h.formatter is None:
+ h.setFormatter(fmt)
+ root.addHandler(h)
level = kwargs.get("level")
if level is not None:
root.setLevel(level)
@@ -1750,7 +1771,10 @@ def warning(msg, *args, **kwargs):
basicConfig()
root.warning(msg, *args, **kwargs)
-warn = warning
+def warn(msg, *args, **kwargs):
+ warnings.warn("The 'warn' function is deprecated, "
+ "use 'warning' instead", DeprecationWarning, 2)
+ warning(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
"""
@@ -1835,10 +1859,10 @@ class NullHandler(Handler):
package.
"""
def handle(self, record):
- pass
+ """Stub."""
def emit(self, record):
- pass
+ """Stub."""
def createLock(self):
self.lock = None
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 373da2b..5ef5c91 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -24,13 +24,13 @@ Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
-import sys, logging, logging.handlers, socket, struct, os, traceback, re
-import types, io
+import sys, logging, logging.handlers, socket, struct, traceback, re
+import io
try:
import _thread as thread
import threading
-except ImportError:
+except ImportError: #pragma: no cover
thread = None
from socketserver import ThreadingTCPServer, StreamRequestHandler
@@ -98,9 +98,6 @@ def _resolve(name):
def _strip_spaces(alist):
return map(lambda x: x.strip(), alist)
-def _encoded(s):
- return s if isinstance(s, str) else s.encode('utf-8')
-
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp["formatters"]["keys"]
@@ -215,7 +212,7 @@ def _install_loggers(cp, handlers, disable_existing):
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
- existing.sort(key=_encoded)
+ existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
@@ -588,7 +585,7 @@ class DictConfigurator(BaseConfigurator):
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
- existing.sort(key=_encoded)
+ existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
@@ -786,7 +783,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
- if not thread:
+ if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
class ConfigStreamHandler(StreamRequestHandler):
@@ -804,7 +801,6 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
- import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
@@ -825,7 +821,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
file = io.StringIO(chunk)
try:
fileConfig(file)
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
traceback.print_exc()
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index 73ce031..f8632ce 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -24,18 +24,14 @@ To use, simply 'import logging.handlers' and log away!
"""
import logging, socket, os, pickle, struct, time, re
+from codecs import BOM_UTF8
from stat import ST_DEV, ST_INO, ST_MTIME
import queue
try:
import threading
-except ImportError:
+except ImportError: #pragma: no cover
threading = None
-try:
- import codecs
-except ImportError:
- codecs = None
-
#
# Some constants...
#
@@ -55,15 +51,15 @@ class BaseRotatingHandler(logging.FileHandler):
Not meant to be instantiated directly. Instead, use RotatingFileHandler
or TimedRotatingFileHandler.
"""
- def __init__(self, filename, mode, encoding=None, delay=0):
+ def __init__(self, filename, mode, encoding=None, delay=False):
"""
Use the specified filename for streamed logging
"""
- if codecs is None:
- encoding = None
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.mode = mode
self.encoding = encoding
+ self.namer = None
+ self.rotator = None
def emit(self, record):
"""
@@ -76,17 +72,55 @@ class BaseRotatingHandler(logging.FileHandler):
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
+ def rotation_filename(self, default_name):
+ """
+ Modify the filename of a log file when rotating.
+
+ This is provided so that a custom filename can be provided.
+
+ The default implementation calls the 'namer' attribute of the
+ handler, if it's callable, passing the default name to
+ it. If the attribute isn't callable (the default is None), the name
+ is returned unchanged.
+
+ :param default_name: The default name for the log file.
+ """
+ if not callable(self.namer):
+ result = default_name
+ else:
+ result = self.namer(default_name)
+ return result
+
+ def rotate(self, source, dest):
+ """
+ When rotating, rotate the current log.
+
+ The default implementation calls the 'rotator' attribute of the
+ handler, if it's callable, passing the source and dest arguments to
+ it. If the attribute isn't callable (the default is None), the source
+ is simply renamed to the destination.
+
+ :param source: The source filename. This is normally the base
+ filename, e.g. 'test.log'
+ :param dest: The destination filename. This is normally
+ what the source is rotated to, e.g. 'test.log.1'.
+ """
+ if not callable(self.rotator):
+ os.rename(source, dest)
+ else:
+ self.rotator(source, dest)
+
class RotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size.
"""
- def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=0):
+ def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
"""
Open the specified file and use it as the stream for logging.
@@ -127,16 +161,17 @@ class RotatingFileHandler(BaseRotatingHandler):
self.stream = None
if self.backupCount > 0:
for i in range(self.backupCount - 1, 0, -1):
- sfn = "%s.%d" % (self.baseFilename, i)
- dfn = "%s.%d" % (self.baseFilename, i + 1)
+ sfn = self.rotation_filename("%s.%d" % (self.baseFilename, i))
+ dfn = self.rotation_filename("%s.%d" % (self.baseFilename,
+ i + 1))
if os.path.exists(sfn):
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
- dfn = self.baseFilename + ".1"
+ dfn = self.rotation_filename(self.baseFilename + ".1")
if os.path.exists(dfn):
os.remove(dfn)
- os.rename(self.baseFilename, dfn)
+ self.rotate(self.baseFilename, dfn)
self.mode = 'w'
self.stream = self._open()
@@ -184,19 +219,19 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
if self.when == 'S':
self.interval = 1 # one second
self.suffix = "%Y-%m-%d_%H-%M-%S"
- self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}$"
+ self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'M':
self.interval = 60 # one minute
self.suffix = "%Y-%m-%d_%H-%M"
- self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}$"
+ self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'H':
self.interval = 60 * 60 # one hour
self.suffix = "%Y-%m-%d_%H"
- self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}$"
+ self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}(\.\w+)?$"
elif self.when == 'D' or self.when == 'MIDNIGHT':
self.interval = 60 * 60 * 24 # one day
self.suffix = "%Y-%m-%d"
- self.extMatch = r"^\d{4}-\d{2}-\d{2}$"
+ self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
elif self.when.startswith('W'):
self.interval = 60 * 60 * 24 * 7 # one week
if len(self.when) != 2:
@@ -205,7 +240,7 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
raise ValueError("Invalid day specified for weekly rollover: %s" % self.when)
self.dayOfWeek = int(self.when[1])
self.suffix = "%Y-%m-%d"
- self.extMatch = r"^\d{4}-\d{2}-\d{2}$"
+ self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
else:
raise ValueError("Invalid rollover interval specified: %s" % self.when)
@@ -338,10 +373,11 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
else:
addend = -3600
timeTuple = time.localtime(t + addend)
- dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
+ dfn = self.rotation_filename(self.baseFilename + "." +
+ time.strftime(self.suffix, timeTuple))
if os.path.exists(dfn):
os.remove(dfn)
- os.rename(self.baseFilename, dfn)
+ self.rotate(self.baseFilename, dfn)
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
@@ -381,7 +417,7 @@ class WatchedFileHandler(logging.FileHandler):
This handler is based on a suggestion and patch by Chad J.
Schroeder.
"""
- def __init__(self, filename, mode='a', encoding=None, delay=0):
+ def __init__(self, filename, mode='a', encoding=None, delay=False):
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
if not os.path.exists(self.baseFilename):
self.dev, self.ino = -1, -1
@@ -399,7 +435,7 @@ class WatchedFileHandler(logging.FileHandler):
"""
if not os.path.exists(self.baseFilename):
stat = None
- changed = 1
+ changed = True
else:
stat = os.stat(self.baseFilename)
changed = (stat[ST_DEV] != self.dev) or (stat[ST_INO] != self.ino)
@@ -429,15 +465,15 @@ class SocketHandler(logging.Handler):
"""
Initializes the handler with a specific host address and port.
- The attribute 'closeOnError' is set to 1 - which means that if
- a socket error occurs, the socket is silently closed and then
- reopened on the next logging call.
+ When the attribute *closeOnError* is set to True - if a socket error
+ occurs, the socket is silently closed and then reopened on the next
+ logging call.
"""
logging.Handler.__init__(self)
self.host = host
self.port = port
self.sock = None
- self.closeOnError = 0
+ self.closeOnError = False
self.retryTime = None
#
# Exponential backoff parameters.
@@ -454,8 +490,12 @@ class SocketHandler(logging.Handler):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if hasattr(s, 'settimeout'):
s.settimeout(timeout)
- s.connect((self.host, self.port))
- return s
+ try:
+ s.connect((self.host, self.port))
+ return s
+ except socket.error:
+ s.close()
+ raise
def createSocket(self):
"""
@@ -468,7 +508,7 @@ class SocketHandler(logging.Handler):
# is the first time back after a disconnect, or
# we've waited long enough.
if self.retryTime is None:
- attempt = 1
+ attempt = True
else:
attempt = (now >= self.retryTime)
if attempt:
@@ -501,14 +541,14 @@ class SocketHandler(logging.Handler):
try:
if hasattr(self.sock, "sendall"):
self.sock.sendall(s)
- else:
+ else: #pragma: no cover
sentsofar = 0
left = len(s)
while left > 0:
sent = self.sock.send(s[sentsofar:])
sentsofar = sentsofar + sent
left = left - sent
- except socket.error:
+ except socket.error: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
@@ -553,7 +593,7 @@ class SocketHandler(logging.Handler):
try:
s = self.makePickle(record)
self.send(s)
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
@@ -587,7 +627,7 @@ class DatagramHandler(SocketHandler):
Initializes the handler with a specific host address and port.
"""
SocketHandler.__init__(self, host, port)
- self.closeOnError = 0
+ self.closeOnError = False
def makeSocket(self):
"""
@@ -728,10 +768,10 @@ class SysLogHandler(logging.Handler):
self.socktype = socktype
if isinstance(address, str):
- self.unixsocket = 1
+ self.unixsocket = True
self._connect_unixsocket(address)
else:
- self.unixsocket = 0
+ self.unixsocket = False
self.socket = socket.socket(socket.AF_INET, socktype)
if socktype == socket.SOCK_STREAM:
self.socket.connect(address)
@@ -766,8 +806,7 @@ class SysLogHandler(logging.Handler):
"""
self.acquire()
try:
- if self.unixsocket:
- self.socket.close()
+ self.socket.close()
logging.Handler.close(self)
finally:
self.release()
@@ -782,6 +821,7 @@ class SysLogHandler(logging.Handler):
"""
return self.priority_map.get(levelName, "warning")
+ ident = '' # prepended to all messages
append_nul = True # some old syslog daemons expect a NUL terminator
def emit(self, record):
@@ -792,6 +832,8 @@ class SysLogHandler(logging.Handler):
exception information is present, it is NOT sent to the server.
"""
msg = self.format(record)
+ if self.ident:
+ msg = self.ident + msg
if self.append_nul:
msg += '\000'
"""
@@ -803,9 +845,7 @@ class SysLogHandler(logging.Handler):
prio = prio.encode('utf-8')
# Message is a string. Convert to bytes as required by RFC 5424
msg = msg.encode('utf-8')
- if codecs:
- msg = codecs.BOM_UTF8 + msg
- msg = prio + msg
+ msg = prio + BOM_UTF8 + msg
try:
if self.unixsocket:
try:
@@ -817,7 +857,7 @@ class SysLogHandler(logging.Handler):
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
@@ -827,7 +867,7 @@ class SMTPHandler(logging.Handler):
A handler class which sends an SMTP email for each logging event.
"""
def __init__(self, mailhost, fromaddr, toaddrs, subject,
- credentials=None, secure=None):
+ credentials=None, secure=None, timeout=1.0):
"""
Initialize the handler.
@@ -841,6 +881,8 @@ class SMTPHandler(logging.Handler):
will be either an empty tuple, or a single-value tuple with the name
of a keyfile, or a 2-value tuple with the names of the keyfile and
certificate file. (This tuple is passed to the `starttls` method).
+ A timeout in seconds can be specified for the SMTP connection (the
+ default is one second).
"""
logging.Handler.__init__(self)
if isinstance(mailhost, tuple):
@@ -857,6 +899,7 @@ class SMTPHandler(logging.Handler):
self.toaddrs = toaddrs
self.subject = subject
self.secure = secure
+ self.timeout = timeout
def getSubject(self, record):
"""
@@ -879,7 +922,7 @@ class SMTPHandler(logging.Handler):
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
- smtp = smtplib.SMTP(self.mailhost, port)
+ smtp = smtplib.SMTP(self.mailhost, port, timeout=self.timeout)
msg = self.format(record)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
self.fromaddr,
@@ -894,7 +937,7 @@ class SMTPHandler(logging.Handler):
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
@@ -981,7 +1024,7 @@ class NTEventLogHandler(logging.Handler):
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
@@ -1064,9 +1107,11 @@ class HTTPHandler(logging.Handler):
s = ('u%s:%s' % self.credentials).encode('utf-8')
s = 'Basic ' + base64.b64encode(s).strip()
h.putheader('Authorization', s)
- h.endheaders(data if self.method == "POST" else None)
+ h.endheaders()
+ if self.method == "POST":
+ h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
@@ -1248,7 +1293,7 @@ class QueueHandler(logging.Handler):
"""
try:
self.enqueue(self.prepare(record))
- except (KeyboardInterrupt, SystemExit):
+ except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
@@ -1345,6 +1390,16 @@ if threading:
except queue.Empty:
break
+ def enqueue_sentinel(self):
+ """
+ This is used to enqueue the sentinel record.
+
+ The base implementation uses put_nowait. You may want to override this
+ method if you want to use timeouts or work with custom queue
+ implementations.
+ """
+ self.queue.put_nowait(self._sentinel)
+
def stop(self):
"""
Stop the listener.
@@ -1354,6 +1409,6 @@ if threading:
may be some records still left on the queue, which won't be processed.
"""
self._stop.set()
- self.queue.put_nowait(self._sentinel)
+ self.enqueue_sentinel()
self._thread.join()
self._thread = None
diff --git a/Lib/lzma.py b/Lib/lzma.py
new file mode 100644
index 0000000..3786993
--- /dev/null
+++ b/Lib/lzma.py
@@ -0,0 +1,401 @@
+"""Interface to the liblzma compression library.
+
+This module provides a class for reading and writing compressed files,
+classes for incremental (de)compression, and convenience functions for
+one-shot (de)compression.
+
+These classes and functions support both the XZ and legacy LZMA
+container formats, as well as raw compressed data streams.
+"""
+
+__all__ = [
+ "CHECK_NONE", "CHECK_CRC32", "CHECK_CRC64", "CHECK_SHA256",
+ "CHECK_ID_MAX", "CHECK_UNKNOWN",
+ "FILTER_LZMA1", "FILTER_LZMA2", "FILTER_DELTA", "FILTER_X86", "FILTER_IA64",
+ "FILTER_ARM", "FILTER_ARMTHUMB", "FILTER_POWERPC", "FILTER_SPARC",
+ "FORMAT_AUTO", "FORMAT_XZ", "FORMAT_ALONE", "FORMAT_RAW",
+ "MF_HC3", "MF_HC4", "MF_BT2", "MF_BT3", "MF_BT4",
+ "MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME",
+
+ "LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError",
+ "compress", "decompress", "check_is_supported",
+]
+
+import io
+from _lzma import *
+
+
+_MODE_CLOSED = 0
+_MODE_READ = 1
+_MODE_READ_EOF = 2
+_MODE_WRITE = 3
+
+_BUFFER_SIZE = 8192
+
+
+class LZMAFile(io.BufferedIOBase):
+
+ """A file object providing transparent LZMA (de)compression.
+
+ An LZMAFile can act as a wrapper for an existing file object, or
+ refer directly to a named file on disk.
+
+ Note that LZMAFile provides a *binary* file interface - data read
+ is returned as bytes, and data to be written must be given as bytes.
+ """
+
+ def __init__(self, filename=None, mode="r", *,
+ fileobj=None, format=None, check=-1,
+ preset=None, filters=None):
+ """Open an LZMA-compressed file.
+
+ If filename is given, open the named file. Otherwise, operate on
+ the file object given by fileobj. Exactly one of these two
+ parameters should be provided.
+
+ mode can be "r" for reading (default), "w" for (over)writing, or
+ "a" for appending.
+
+ format specifies the container format to use for the file.
+ If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the
+ default is FORMAT_XZ.
+
+ check specifies the integrity check to use. This argument can
+ only be used when opening a file for writing. For FORMAT_XZ,
+ the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not
+ support integrity checks - for these formats, check must be
+ omitted, or be CHECK_NONE.
+
+ When opening a file for reading, the *preset* argument is not
+ meaningful, and should be omitted. The *filters* argument should
+ also be omitted, except when format is FORMAT_RAW (in which case
+ it is required).
+
+ When opening a file for writing, the settings used by the
+ compressor can be specified either as a preset compression
+ level (with the *preset* argument), or in detail as a custom
+ filter chain (with the *filters* argument). For FORMAT_XZ and
+ FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset
+ level. For FORMAT_RAW, the caller must always specify a filter
+ chain; the raw compressor does not support preset compression
+ levels.
+
+ preset (if provided) should be an integer in the range 0-9,
+ optionally OR-ed with the constant PRESET_EXTREME.
+
+ filters (if provided) should be a sequence of dicts. Each dict
+ should have an entry for "id" indicating ID of the filter, plus
+ additional entries for options to the filter.
+ """
+ self._fp = None
+ self._closefp = False
+ self._mode = _MODE_CLOSED
+ self._pos = 0
+ self._size = -1
+
+ if mode == "r":
+ if check != -1:
+ raise ValueError("Cannot specify an integrity check "
+ "when opening a file for reading")
+ if preset is not None:
+ raise ValueError("Cannot specify a preset compression "
+ "level when opening a file for reading")
+ if format is None:
+ format = FORMAT_AUTO
+ mode_code = _MODE_READ
+ # Save the args to pass to the LZMADecompressor initializer.
+ # If the file contains multiple compressed streams, each
+ # stream will need a separate decompressor object.
+ self._init_args = {"format":format, "filters":filters}
+ self._decompressor = LZMADecompressor(**self._init_args)
+ self._buffer = None
+ elif mode in ("w", "a"):
+ if format is None:
+ format = FORMAT_XZ
+ mode_code = _MODE_WRITE
+ self._compressor = LZMACompressor(format=format, check=check,
+ preset=preset, filters=filters)
+ else:
+ raise ValueError("Invalid mode: {!r}".format(mode))
+
+ if filename is not None and fileobj is None:
+ mode += "b"
+ self._fp = open(filename, mode)
+ self._closefp = True
+ self._mode = mode_code
+ elif fileobj is not None and filename is None:
+ self._fp = fileobj
+ self._mode = mode_code
+ else:
+ raise ValueError("Must give exactly one of filename and fileobj")
+
+ def close(self):
+ """Flush and close the file.
+
+ May be called more than once without error. Once the file is
+ closed, any other operation on it will raise a ValueError.
+ """
+ if self._mode == _MODE_CLOSED:
+ return
+ try:
+ if self._mode in (_MODE_READ, _MODE_READ_EOF):
+ self._decompressor = None
+ self._buffer = None
+ elif self._mode == _MODE_WRITE:
+ self._fp.write(self._compressor.flush())
+ self._compressor = None
+ finally:
+ try:
+ if self._closefp:
+ self._fp.close()
+ finally:
+ self._fp = None
+ self._closefp = False
+ self._mode = _MODE_CLOSED
+
+ @property
+ def closed(self):
+ """True if this file is closed."""
+ return self._mode == _MODE_CLOSED
+
+ def fileno(self):
+ """Return the file descriptor for the underlying file."""
+ self._check_not_closed()
+ return self._fp.fileno()
+
+ def seekable(self):
+ """Return whether the file supports seeking."""
+ return self.readable() and self._fp.seekable()
+
+ def readable(self):
+ """Return whether the file was opened for reading."""
+ self._check_not_closed()
+ return self._mode in (_MODE_READ, _MODE_READ_EOF)
+
+ def writable(self):
+ """Return whether the file was opened for writing."""
+ self._check_not_closed()
+ return self._mode == _MODE_WRITE
+
+ # Mode-checking helper functions.
+
+ def _check_not_closed(self):
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ def _check_can_read(self):
+ if not self.readable():
+ raise io.UnsupportedOperation("File not open for reading")
+
+ def _check_can_write(self):
+ if not self.writable():
+ raise io.UnsupportedOperation("File not open for writing")
+
+ def _check_can_seek(self):
+ if not self.readable():
+ raise io.UnsupportedOperation("Seeking is only supported "
+ "on files open for reading")
+ if not self._fp.seekable():
+ raise io.UnsupportedOperation("The underlying file object "
+ "does not support seeking")
+
+ # Fill the readahead buffer if it is empty. Returns False on EOF.
+ def _fill_buffer(self):
+ if self._buffer:
+ return True
+
+ if self._decompressor.unused_data:
+ rawblock = self._decompressor.unused_data
+ else:
+ rawblock = self._fp.read(_BUFFER_SIZE)
+
+ if not rawblock:
+ if self._decompressor.eof:
+ self._mode = _MODE_READ_EOF
+ self._size = self._pos
+ return False
+ else:
+ raise EOFError("Compressed file ended before the "
+ "end-of-stream marker was reached")
+
+ # Continue to next stream.
+ if self._decompressor.eof:
+ self._decompressor = LZMADecompressor(**self._init_args)
+
+ self._buffer = self._decompressor.decompress(rawblock)
+ return True
+
+ # Read data until EOF.
+ # If return_data is false, consume the data without returning it.
+ def _read_all(self, return_data=True):
+ blocks = []
+ while self._fill_buffer():
+ if return_data:
+ blocks.append(self._buffer)
+ self._pos += len(self._buffer)
+ self._buffer = None
+ if return_data:
+ return b"".join(blocks)
+
+ # Read a block of up to n bytes.
+ # If return_data is false, consume the data without returning it.
+ def _read_block(self, n, return_data=True):
+ blocks = []
+ while n > 0 and self._fill_buffer():
+ if n < len(self._buffer):
+ data = self._buffer[:n]
+ self._buffer = self._buffer[n:]
+ else:
+ data = self._buffer
+ self._buffer = None
+ if return_data:
+ blocks.append(data)
+ self._pos += len(data)
+ n -= len(data)
+ if return_data:
+ return b"".join(blocks)
+
+ def peek(self, size=-1):
+ """Return buffered data without advancing the file position.
+
+ Always returns at least one byte of data, unless at EOF.
+ The exact number of bytes returned is unspecified.
+ """
+ self._check_can_read()
+ if self._mode == _MODE_READ_EOF or not self._fill_buffer():
+ return b""
+ return self._buffer
+
+ def read(self, size=-1):
+ """Read up to size uncompressed bytes from the file.
+
+ If size is negative or omitted, read until EOF is reached.
+ Returns b"" if the file is already at EOF.
+ """
+ self._check_can_read()
+ if self._mode == _MODE_READ_EOF or size == 0:
+ return b""
+ elif size < 0:
+ return self._read_all()
+ else:
+ return self._read_block(size)
+
+ def read1(self, size=-1):
+ """Read up to size uncompressed bytes with at most one read
+ from the underlying stream.
+
+ Returns b"" if the file is at EOF.
+ """
+ self._check_can_read()
+ if (size == 0 or self._mode == _MODE_READ_EOF or
+ not self._fill_buffer()):
+ return b""
+ if 0 < size < len(self._buffer):
+ data = self._buffer[:size]
+ self._buffer = self._buffer[size:]
+ else:
+ data = self._buffer
+ self._buffer = None
+ self._pos += len(data)
+ return data
+
+ def write(self, data):
+ """Write a bytes object to the file.
+
+ Returns the number of uncompressed bytes written, which is
+ always len(data). Note that due to buffering, the file on disk
+ may not reflect the data written until close() is called.
+ """
+ self._check_can_write()
+ compressed = self._compressor.compress(data)
+ self._fp.write(compressed)
+ self._pos += len(data)
+ return len(data)
+
+ # Rewind the file to the beginning of the data stream.
+ def _rewind(self):
+ self._fp.seek(0, 0)
+ self._mode = _MODE_READ
+ self._pos = 0
+ self._decompressor = LZMADecompressor(**self._init_args)
+ self._buffer = None
+
+ def seek(self, offset, whence=0):
+ """Change the file position.
+
+ The new position is specified by offset, relative to the
+ position indicated by whence. Possible values for whence are:
+
+ 0: start of stream (default): offset must not be negative
+ 1: current stream position
+ 2: end of stream; offset must not be positive
+
+ Returns the new file position.
+
+ Note that seeking is emulated, sp depending on the parameters,
+ this operation may be extremely slow.
+ """
+ self._check_can_seek()
+
+ # Recalculate offset as an absolute file position.
+ if whence == 0:
+ pass
+ elif whence == 1:
+ offset = self._pos + offset
+ elif whence == 2:
+ # Seeking relative to EOF - we need to know the file's size.
+ if self._size < 0:
+ self._read_all(return_data=False)
+ offset = self._size + offset
+ else:
+ raise ValueError("Invalid value for whence: {}".format(whence))
+
+ # Make it so that offset is the number of bytes to skip forward.
+ if offset < self._pos:
+ self._rewind()
+ else:
+ offset -= self._pos
+
+ # Read and discard data until we reach the desired position.
+ if self._mode != _MODE_READ_EOF:
+ self._read_block(offset, return_data=False)
+
+ return self._pos
+
+ def tell(self):
+ """Return the current file position."""
+ self._check_not_closed()
+ return self._pos
+
+
+def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
+ """Compress a block of data.
+
+ Refer to LZMACompressor's docstring for a description of the
+ optional arguments *format*, *check*, *preset* and *filters*.
+
+ For incremental compression, use an LZMACompressor object instead.
+ """
+ comp = LZMACompressor(format, check, preset, filters)
+ return comp.compress(data) + comp.flush()
+
+
+def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
+ """Decompress a block of data.
+
+ Refer to LZMADecompressor's docstring for a description of the
+ optional arguments *format*, *check* and *filters*.
+
+ For incremental decompression, use a LZMADecompressor object instead.
+ """
+ results = []
+ while True:
+ decomp = LZMADecompressor(format, memlimit, filters)
+ results.append(decomp.decompress(data))
+ if not decomp.eof:
+ raise LZMAError("Compressed data ended before the "
+ "end-of-stream marker was reached")
+ if not decomp.unused_data:
+ return b"".join(results)
+ # There is unused data left over. Proceed to next stream.
+ data = decomp.unused_data
diff --git a/Lib/mailbox.py b/Lib/mailbox.py
index a677729..325b9c9 100644
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -1106,8 +1106,7 @@ class MH(Mailbox):
def get_sequences(self):
"""Return a name-to-key-list dictionary to define each sequence."""
results = {}
- f = open(os.path.join(self._path, '.mh_sequences'), 'r')
- try:
+ with open(os.path.join(self._path, '.mh_sequences'), 'r', encoding='ASCII') as f:
all_keys = set(self.keys())
for line in f:
try:
@@ -1126,13 +1125,11 @@ class MH(Mailbox):
except ValueError:
raise FormatError('Invalid sequence specification: %s' %
line.rstrip())
- finally:
- f.close()
return results
def set_sequences(self, sequences):
"""Set sequences using the given name-to-key-list dictionary."""
- f = open(os.path.join(self._path, '.mh_sequences'), 'r+')
+ f = open(os.path.join(self._path, '.mh_sequences'), 'r+', encoding='ASCII')
try:
os.close(os.open(f.name, os.O_WRONLY | os.O_TRUNC))
for name, keys in sequences.items():
diff --git a/Lib/mailcap.py b/Lib/mailcap.py
index 4ae13d7..99f4958 100644
--- a/Lib/mailcap.py
+++ b/Lib/mailcap.py
@@ -33,10 +33,10 @@ def getcaps():
def listmailcapfiles():
"""Return a list of all mailcap files found on the system."""
- # XXX Actually, this is Unix-specific
+ # This is mostly a Unix thing, but we use the OS path separator anyway
if 'MAILCAPS' in os.environ:
- str = os.environ['MAILCAPS']
- mailcaps = str.split(':')
+ pathstr = os.environ['MAILCAPS']
+ mailcaps = pathstr.split(os.pathsep)
else:
if 'HOME' in os.environ:
home = os.environ['HOME']
diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py
index 4b1e2f9..fa62326 100644
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -249,7 +249,6 @@ class MimeTypes:
yield ctype
i += 1
- default_encoding = sys.getdefaultencoding()
with _winreg.OpenKey(_winreg.HKEY_CLASSES_ROOT,
r'MIME\Database\Content Type') as mimedb:
for ctype in enum_types(mimedb):
@@ -434,6 +433,8 @@ def _default_mime_types():
'.ksh' : 'text/plain',
'.latex' : 'application/x-latex',
'.m1v' : 'video/mpeg',
+ '.m3u' : 'application/vnd.apple.mpegurl',
+ '.m3u8' : 'application/vnd.apple.mpegurl',
'.man' : 'application/x-troff-man',
'.me' : 'application/x-troff-me',
'.mht' : 'message/rfc822',
diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py
index f033ba9..c0910aa 100644
--- a/Lib/modulefinder.py
+++ b/Lib/modulefinder.py
@@ -1,6 +1,5 @@
"""Find modules used by a script, using introspection."""
-from __future__ import generators
import dis
import imp
import marshal
@@ -9,8 +8,6 @@ import sys
import types
import struct
-READ_MODE = "rU"
-
# XXX Clean up once str8's cstor matches bytes.
LOAD_CONST = bytes([dis.opname.index('LOAD_CONST')])
IMPORT_NAME = bytes([dis.opname.index('IMPORT_NAME')])
@@ -29,9 +26,7 @@ packagePathMap = {}
# A Public interface
def AddPackagePath(packagename, path):
- paths = packagePathMap.get(packagename, [])
- paths.append(path)
- packagePathMap[packagename] = paths
+ packagePathMap.setdefault(packagename, []).append(path)
replacePackageMap = {}
@@ -106,14 +101,14 @@ class ModuleFinder:
def run_script(self, pathname):
self.msg(2, "run_script", pathname)
- with open(pathname, READ_MODE) as fp:
+ with open(pathname) as fp:
stuff = ("", "r", imp.PY_SOURCE)
self.load_module('__main__', fp, pathname, stuff)
def load_file(self, pathname):
dir, name = os.path.split(pathname)
name, ext = os.path.splitext(name)
- with open(pathname, READ_MODE) as fp:
+ with open(pathname) as fp:
stuff = (ext, "r", imp.PY_SOURCE)
self.load_module(name, fp, pathname, stuff)
@@ -270,7 +265,8 @@ class ModuleFinder:
try:
m = self.load_module(fqname, fp, pathname, stuff)
finally:
- if fp: fp.close()
+ if fp:
+ fp.close()
if parent:
setattr(parent, partname, m)
self.msgout(3, "import_module ->", m)
@@ -662,4 +658,4 @@ if __name__ == '__main__':
try:
mf = test()
except KeyboardInterrupt:
- print("\n[interrupt]")
+ print("\n[interrupted]")
diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py
index e6e16c8..e012440 100644
--- a/Lib/multiprocessing/__init__.py
+++ b/Lib/multiprocessing/__init__.py
@@ -48,7 +48,7 @@ __all__ = [
'Manager', 'Pipe', 'cpu_count', 'log_to_stderr', 'get_logger',
'allow_connection_pickling', 'BufferTooShort', 'TimeoutError',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition',
- 'Event', 'Queue', 'JoinableQueue', 'Pool', 'Value', 'Array',
+ 'Event', 'Queue', 'SimpleQueue', 'JoinableQueue', 'Pool', 'Value', 'Array',
'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING',
]
@@ -223,6 +223,13 @@ def JoinableQueue(maxsize=0):
from multiprocessing.queues import JoinableQueue
return JoinableQueue(maxsize)
+def SimpleQueue():
+ '''
+ Returns a queue object
+ '''
+ from multiprocessing.queues import SimpleQueue
+ return SimpleQueue()
+
def Pool(processes=None, initializer=None, initargs=(), maxtasksperchild=None):
'''
Returns a process pool object
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
index df00f1d..ca0c973 100644
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -32,21 +32,31 @@
# SUCH DAMAGE.
#
-__all__ = [ 'Client', 'Listener', 'Pipe' ]
+__all__ = [ 'Client', 'Listener', 'Pipe', 'wait' ]
+import io
import os
import sys
+import pickle
+import select
import socket
+import struct
import errno
import time
import tempfile
import itertools
import _multiprocessing
-from multiprocessing import current_process, AuthenticationError
-from multiprocessing.util import get_temp_dir, Finalize, sub_debug, debug
-from multiprocessing.forking import duplicate, close
-
+from multiprocessing import current_process, AuthenticationError, BufferTooShort
+from multiprocessing.util import (
+ get_temp_dir, Finalize, sub_debug, debug, _eintr_retry)
+try:
+ from _multiprocessing import win32
+ from _subprocess import WAIT_OBJECT_0, WAIT_TIMEOUT, INFINITE
+except ImportError:
+ if sys.platform == 'win32':
+ raise
+ win32 = None
#
#
@@ -111,6 +121,306 @@ def address_type(address):
raise ValueError('address type of %r unrecognized' % address)
#
+# Connection classes
+#
+
+class _ConnectionBase:
+ _handle = None
+
+ def __init__(self, handle, readable=True, writable=True):
+ handle = handle.__index__()
+ if handle < 0:
+ raise ValueError("invalid handle")
+ if not readable and not writable:
+ raise ValueError(
+ "at least one of `readable` and `writable` must be True")
+ self._handle = handle
+ self._readable = readable
+ self._writable = writable
+
+ # XXX should we use util.Finalize instead of a __del__?
+
+ def __del__(self):
+ if self._handle is not None:
+ self._close()
+
+ def _check_closed(self):
+ if self._handle is None:
+ raise IOError("handle is closed")
+
+ def _check_readable(self):
+ if not self._readable:
+ raise IOError("connection is write-only")
+
+ def _check_writable(self):
+ if not self._writable:
+ raise IOError("connection is read-only")
+
+ def _bad_message_length(self):
+ if self._writable:
+ self._readable = False
+ else:
+ self.close()
+ raise IOError("bad message length")
+
+ @property
+ def closed(self):
+ """True if the connection is closed"""
+ return self._handle is None
+
+ @property
+ def readable(self):
+ """True if the connection is readable"""
+ return self._readable
+
+ @property
+ def writable(self):
+ """True if the connection is writable"""
+ return self._writable
+
+ def fileno(self):
+ """File descriptor or handle of the connection"""
+ self._check_closed()
+ return self._handle
+
+ def close(self):
+ """Close the connection"""
+ if self._handle is not None:
+ try:
+ self._close()
+ finally:
+ self._handle = None
+
+ def send_bytes(self, buf, offset=0, size=None):
+ """Send the bytes data from a bytes-like object"""
+ self._check_closed()
+ self._check_writable()
+ m = memoryview(buf)
+ # HACK for byte-indexing of non-bytewise buffers (e.g. array.array)
+ if m.itemsize > 1:
+ m = memoryview(bytes(m))
+ n = len(m)
+ if offset < 0:
+ raise ValueError("offset is negative")
+ if n < offset:
+ raise ValueError("buffer length < offset")
+ if size is None:
+ size = n - offset
+ elif size < 0:
+ raise ValueError("size is negative")
+ elif offset + size > n:
+ raise ValueError("buffer length < offset + size")
+ self._send_bytes(m[offset:offset + size])
+
+ def send(self, obj):
+ """Send a (picklable) object"""
+ self._check_closed()
+ self._check_writable()
+ buf = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
+ self._send_bytes(memoryview(buf))
+
+ def recv_bytes(self, maxlength=None):
+ """
+ Receive bytes data as a bytes object.
+ """
+ self._check_closed()
+ self._check_readable()
+ if maxlength is not None and maxlength < 0:
+ raise ValueError("negative maxlength")
+ buf = self._recv_bytes(maxlength)
+ if buf is None:
+ self._bad_message_length()
+ return buf.getvalue()
+
+ def recv_bytes_into(self, buf, offset=0):
+ """
+ Receive bytes data into a writeable buffer-like object.
+ Return the number of bytes read.
+ """
+ self._check_closed()
+ self._check_readable()
+ with memoryview(buf) as m:
+ # Get bytesize of arbitrary buffer
+ itemsize = m.itemsize
+ bytesize = itemsize * len(m)
+ if offset < 0:
+ raise ValueError("negative offset")
+ elif offset > bytesize:
+ raise ValueError("offset too large")
+ result = self._recv_bytes()
+ size = result.tell()
+ if bytesize < offset + size:
+ raise BufferTooShort(result.getvalue())
+ # Message can fit in dest
+ result.seek(0)
+ result.readinto(m[offset // itemsize :
+ (offset + size) // itemsize])
+ return size
+
+ def recv(self):
+ """Receive a (picklable) object"""
+ self._check_closed()
+ self._check_readable()
+ buf = self._recv_bytes()
+ return pickle.loads(buf.getbuffer())
+
+ def poll(self, timeout=0.0):
+ """Whether there is any input available to be read"""
+ self._check_closed()
+ self._check_readable()
+ return self._poll(timeout)
+
+
+if win32:
+
+ class PipeConnection(_ConnectionBase):
+ """
+ Connection class based on a Windows named pipe.
+ Overlapped I/O is used, so the handles must have been created
+ with FILE_FLAG_OVERLAPPED.
+ """
+ _got_empty_message = False
+
+ def _close(self, _CloseHandle=win32.CloseHandle):
+ _CloseHandle(self._handle)
+
+ def _send_bytes(self, buf):
+ ov, err = win32.WriteFile(self._handle, buf, overlapped=True)
+ try:
+ if err == win32.ERROR_IO_PENDING:
+ waitres = win32.WaitForMultipleObjects(
+ [ov.event], False, INFINITE)
+ assert waitres == WAIT_OBJECT_0
+ except:
+ ov.cancel()
+ raise
+ finally:
+ nwritten, err = ov.GetOverlappedResult(True)
+ assert err == 0
+ assert nwritten == len(buf)
+
+ def _recv_bytes(self, maxsize=None):
+ if self._got_empty_message:
+ self._got_empty_message = False
+ return io.BytesIO()
+ else:
+ bsize = 128 if maxsize is None else min(maxsize, 128)
+ try:
+ ov, err = win32.ReadFile(self._handle, bsize,
+ overlapped=True)
+ try:
+ if err == win32.ERROR_IO_PENDING:
+ waitres = win32.WaitForMultipleObjects(
+ [ov.event], False, INFINITE)
+ assert waitres == WAIT_OBJECT_0
+ except:
+ ov.cancel()
+ raise
+ finally:
+ nread, err = ov.GetOverlappedResult(True)
+ if err == 0:
+ f = io.BytesIO()
+ f.write(ov.getbuffer())
+ return f
+ elif err == win32.ERROR_MORE_DATA:
+ return self._get_more_data(ov, maxsize)
+ except IOError as e:
+ if e.winerror == win32.ERROR_BROKEN_PIPE:
+ raise EOFError
+ else:
+ raise
+ raise RuntimeError("shouldn't get here; expected KeyboardInterrupt")
+
+ def _poll(self, timeout):
+ if (self._got_empty_message or
+ win32.PeekNamedPipe(self._handle)[0] != 0):
+ return True
+ if timeout < 0:
+ timeout = None
+ return bool(wait([self], timeout))
+
+ def _get_more_data(self, ov, maxsize):
+ buf = ov.getbuffer()
+ f = io.BytesIO()
+ f.write(buf)
+ left = win32.PeekNamedPipe(self._handle)[1]
+ assert left > 0
+ if maxsize is not None and len(buf) + left > maxsize:
+ self._bad_message_length()
+ ov, err = win32.ReadFile(self._handle, left, overlapped=True)
+ rbytes, err = ov.GetOverlappedResult(True)
+ assert err == 0
+ assert rbytes == left
+ f.write(ov.getbuffer())
+ return f
+
+
+class Connection(_ConnectionBase):
+ """
+ Connection class based on an arbitrary file descriptor (Unix only), or
+ a socket handle (Windows).
+ """
+
+ if win32:
+ def _close(self, _close=win32.closesocket):
+ _close(self._handle)
+ _write = win32.send
+ _read = win32.recv
+ else:
+ def _close(self, _close=os.close):
+ _close(self._handle)
+ _write = os.write
+ _read = os.read
+
+ def _send(self, buf, write=_write):
+ remaining = len(buf)
+ while True:
+ n = write(self._handle, buf)
+ remaining -= n
+ if remaining == 0:
+ break
+ buf = buf[n:]
+
+ def _recv(self, size, read=_read):
+ buf = io.BytesIO()
+ handle = self._handle
+ remaining = size
+ while remaining > 0:
+ chunk = read(handle, remaining)
+ n = len(chunk)
+ if n == 0:
+ if remaining == size:
+ raise EOFError
+ else:
+ raise IOError("got end of file during message")
+ buf.write(chunk)
+ remaining -= n
+ return buf
+
+ def _send_bytes(self, buf):
+ # For wire compatibility with 3.2 and lower
+ n = len(buf)
+ self._send(struct.pack("!i", n))
+ # The condition is necessary to avoid "broken pipe" errors
+ # when sending a 0-length buffer if the other end closed the pipe.
+ if n > 0:
+ self._send(buf)
+
+ def _recv_bytes(self, maxsize=None):
+ buf = self._recv(4)
+ size, = struct.unpack("!i", buf.getvalue())
+ if maxsize is not None and size > maxsize:
+ return None
+ return self._recv(size)
+
+ def _poll(self, timeout):
+ if timeout < 0.0:
+ timeout = None
+ r = wait([self._handle], timeout)
+ return bool(r)
+
+
+#
# Public functions
#
@@ -186,21 +496,17 @@ if sys.platform != 'win32':
'''
if duplex:
s1, s2 = socket.socketpair()
- c1 = _multiprocessing.Connection(os.dup(s1.fileno()))
- c2 = _multiprocessing.Connection(os.dup(s2.fileno()))
- s1.close()
- s2.close()
+ c1 = Connection(s1.detach())
+ c2 = Connection(s2.detach())
else:
fd1, fd2 = os.pipe()
- c1 = _multiprocessing.Connection(fd1, writable=False)
- c2 = _multiprocessing.Connection(fd2, readable=False)
+ c1 = Connection(fd1, writable=False)
+ c2 = Connection(fd2, readable=False)
return c1, c2
else:
- from _multiprocessing import win32
-
def Pipe(duplex=True):
'''
Returns pair of connection objects at either end of a pipe
@@ -216,26 +522,26 @@ else:
obsize, ibsize = 0, BUFSIZE
h1 = win32.CreateNamedPipe(
- address, openmode,
+ address, openmode | win32.FILE_FLAG_OVERLAPPED |
+ win32.FILE_FLAG_FIRST_PIPE_INSTANCE,
win32.PIPE_TYPE_MESSAGE | win32.PIPE_READMODE_MESSAGE |
win32.PIPE_WAIT,
1, obsize, ibsize, win32.NMPWAIT_WAIT_FOREVER, win32.NULL
)
h2 = win32.CreateFile(
- address, access, 0, win32.NULL, win32.OPEN_EXISTING, 0, win32.NULL
+ address, access, 0, win32.NULL, win32.OPEN_EXISTING,
+ win32.FILE_FLAG_OVERLAPPED, win32.NULL
)
win32.SetNamedPipeHandleState(
h2, win32.PIPE_READMODE_MESSAGE, None, None
)
- try:
- win32.ConnectNamedPipe(h1, win32.NULL)
- except WindowsError as e:
- if e.args[0] != win32.ERROR_PIPE_CONNECTED:
- raise
+ overlapped = win32.ConnectNamedPipe(h1, overlapped=True)
+ _, err = overlapped.GetOverlappedResult(True)
+ assert err == 0
- c1 = _multiprocessing.PipeConnection(h1, writable=duplex)
- c2 = _multiprocessing.PipeConnection(h2, readable=duplex)
+ c1 = PipeConnection(h1, writable=duplex)
+ c2 = PipeConnection(h2, readable=duplex)
return c1, c2
@@ -250,11 +556,14 @@ class SocketListener(object):
def __init__(self, address, family, backlog=1):
self._socket = socket.socket(getattr(socket, family))
try:
- self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ # SO_REUSEADDR has different semantics on Windows (issue #2550).
+ if os.name == 'posix':
+ self._socket.setsockopt(socket.SOL_SOCKET,
+ socket.SO_REUSEADDR, 1)
self._socket.bind(address)
self._socket.listen(backlog)
self._address = self._socket.getsockname()
- except socket.error:
+ except OSError:
self._socket.close()
raise
self._family = family
@@ -270,7 +579,7 @@ class SocketListener(object):
def accept(self):
s, self._last_accepted = self._socket.accept()
fd = duplicate(s.fileno())
- conn = _multiprocessing.Connection(fd)
+ conn = Connection(fd)
s.close()
return conn
@@ -286,23 +595,9 @@ def SocketClient(address):
'''
family = address_type(address)
with socket.socket( getattr(socket, family) ) as s:
- t = _init_timeout()
-
- while 1:
- try:
- s.connect(address)
- except socket.error as e:
- if e.args[0] != errno.ECONNREFUSED or _check_timeout(t):
- debug('failed to connect to address %s', address)
- raise
- time.sleep(0.01)
- else:
- break
- else:
- raise
-
+ s.connect(address)
fd = duplicate(s.fileno())
- conn = _multiprocessing.Connection(fd)
+ conn = Connection(fd)
return conn
#
@@ -317,39 +612,41 @@ if sys.platform == 'win32':
'''
def __init__(self, address, backlog=None):
self._address = address
- handle = win32.CreateNamedPipe(
- address, win32.PIPE_ACCESS_DUPLEX,
- win32.PIPE_TYPE_MESSAGE | win32.PIPE_READMODE_MESSAGE |
- win32.PIPE_WAIT,
- win32.PIPE_UNLIMITED_INSTANCES, BUFSIZE, BUFSIZE,
- win32.NMPWAIT_WAIT_FOREVER, win32.NULL
- )
- self._handle_queue = [handle]
- self._last_accepted = None
+ self._handle_queue = [self._new_handle(first=True)]
+ self._last_accepted = None
sub_debug('listener created with address=%r', self._address)
-
self.close = Finalize(
self, PipeListener._finalize_pipe_listener,
args=(self._handle_queue, self._address), exitpriority=0
)
- def accept(self):
- newhandle = win32.CreateNamedPipe(
- self._address, win32.PIPE_ACCESS_DUPLEX,
+ def _new_handle(self, first=False):
+ flags = win32.PIPE_ACCESS_DUPLEX | win32.FILE_FLAG_OVERLAPPED
+ if first:
+ flags |= win32.FILE_FLAG_FIRST_PIPE_INSTANCE
+ return win32.CreateNamedPipe(
+ self._address, flags,
win32.PIPE_TYPE_MESSAGE | win32.PIPE_READMODE_MESSAGE |
win32.PIPE_WAIT,
win32.PIPE_UNLIMITED_INSTANCES, BUFSIZE, BUFSIZE,
win32.NMPWAIT_WAIT_FOREVER, win32.NULL
)
- self._handle_queue.append(newhandle)
+
+ def accept(self):
+ self._handle_queue.append(self._new_handle())
handle = self._handle_queue.pop(0)
+ ov = win32.ConnectNamedPipe(handle, overlapped=True)
try:
- win32.ConnectNamedPipe(handle, win32.NULL)
- except WindowsError as e:
- if e.args[0] != win32.ERROR_PIPE_CONNECTED:
- raise
- return _multiprocessing.PipeConnection(handle)
+ res = win32.WaitForMultipleObjects([ov.event], False, INFINITE)
+ except:
+ ov.cancel()
+ win32.CloseHandle(handle)
+ raise
+ finally:
+ _, err = ov.GetOverlappedResult(True)
+ assert err == 0
+ return PipeConnection(handle)
@staticmethod
def _finalize_pipe_listener(queue, address):
@@ -367,11 +664,12 @@ if sys.platform == 'win32':
win32.WaitNamedPipe(address, 1000)
h = win32.CreateFile(
address, win32.GENERIC_READ | win32.GENERIC_WRITE,
- 0, win32.NULL, win32.OPEN_EXISTING, 0, win32.NULL
+ 0, win32.NULL, win32.OPEN_EXISTING,
+ win32.FILE_FLAG_OVERLAPPED, win32.NULL
)
except WindowsError as e:
- if e.args[0] not in (win32.ERROR_SEM_TIMEOUT,
- win32.ERROR_PIPE_BUSY) or _check_timeout(t):
+ if e.winerror not in (win32.ERROR_SEM_TIMEOUT,
+ win32.ERROR_PIPE_BUSY) or _check_timeout(t):
raise
else:
break
@@ -381,7 +679,7 @@ if sys.platform == 'win32':
win32.SetNamedPipeHandleState(
h, win32.PIPE_READMODE_MESSAGE, None, None
)
- return _multiprocessing.PipeConnection(h)
+ return PipeConnection(h)
#
# Authentication stuff
@@ -438,10 +736,10 @@ class ConnectionWrapper(object):
return self._loads(s)
def _xml_dumps(obj):
- return xmlrpclib.dumps((obj,), None, None, None, 1).encode('utf8')
+ return xmlrpclib.dumps((obj,), None, None, None, 1).encode('utf-8')
def _xml_loads(s):
- (obj,), method = xmlrpclib.loads(s.decode('utf8'))
+ (obj,), method = xmlrpclib.loads(s.decode('utf-8'))
return obj
class XmlListener(Listener):
@@ -455,3 +753,126 @@ def XmlClient(*args, **kwds):
global xmlrpclib
import xmlrpc.client as xmlrpclib
return ConnectionWrapper(Client(*args, **kwds), _xml_dumps, _xml_loads)
+
+#
+# Wait
+#
+
+if sys.platform == 'win32':
+
+ def _exhaustive_wait(handles, timeout):
+ # Return ALL handles which are currently signalled. (Only
+ # returning the first signalled might create starvation issues.)
+ L = list(handles)
+ ready = []
+ while L:
+ res = win32.WaitForMultipleObjects(L, False, timeout)
+ if res == WAIT_TIMEOUT:
+ break
+ elif WAIT_OBJECT_0 <= res < WAIT_OBJECT_0 + len(L):
+ res -= WAIT_OBJECT_0
+ elif WAIT_ABANDONED_0 <= res < WAIT_ABANDONED_0 + len(L):
+ res -= WAIT_ABANDONED_0
+ else:
+ raise RuntimeError('Should not get here')
+ ready.append(L[res])
+ L = L[res+1:]
+ timeout = 0
+ return ready
+
+ _ready_errors = {win32.ERROR_BROKEN_PIPE, win32.ERROR_NETNAME_DELETED}
+
+ def wait(object_list, timeout=None):
+ '''
+ Wait till an object in object_list is ready/readable.
+
+ Returns list of those objects in object_list which are ready/readable.
+ '''
+ if timeout is None:
+ timeout = INFINITE
+ elif timeout < 0:
+ timeout = 0
+ else:
+ timeout = int(timeout * 1000 + 0.5)
+
+ object_list = list(object_list)
+ waithandle_to_obj = {}
+ ov_list = []
+ ready_objects = set()
+ ready_handles = set()
+
+ try:
+ for o in object_list:
+ try:
+ fileno = getattr(o, 'fileno')
+ except AttributeError:
+ waithandle_to_obj[o.__index__()] = o
+ else:
+ # start an overlapped read of length zero
+ try:
+ ov, err = win32.ReadFile(fileno(), 0, True)
+ except OSError as e:
+ err = e.winerror
+ if err not in _ready_errors:
+ raise
+ if err == win32.ERROR_IO_PENDING:
+ ov_list.append(ov)
+ waithandle_to_obj[ov.event] = o
+ else:
+ # If o.fileno() is an overlapped pipe handle and
+ # err == 0 then there is a zero length message
+ # in the pipe, but it HAS NOT been consumed.
+ ready_objects.add(o)
+ timeout = 0
+
+ ready_handles = _exhaustive_wait(waithandle_to_obj.keys(), timeout)
+ finally:
+ # request that overlapped reads stop
+ for ov in ov_list:
+ ov.cancel()
+
+ # wait for all overlapped reads to stop
+ for ov in ov_list:
+ try:
+ _, err = ov.GetOverlappedResult(True)
+ except OSError as e:
+ err = e.winerror
+ if err not in _ready_errors:
+ raise
+ if err != win32.ERROR_OPERATION_ABORTED:
+ o = waithandle_to_obj[ov.event]
+ ready_objects.add(o)
+ if err == 0:
+ # If o.fileno() is an overlapped pipe handle then
+ # a zero length message HAS been consumed.
+ if hasattr(o, '_got_empty_message'):
+ o._got_empty_message = True
+
+ ready_objects.update(waithandle_to_obj[h] for h in ready_handles)
+ return [o for o in object_list if o in ready_objects]
+
+else:
+
+ def wait(object_list, timeout=None):
+ '''
+ Wait till an object in object_list is ready/readable.
+
+ Returns list of those objects in object_list which are ready/readable.
+ '''
+ if timeout is not None:
+ if timeout <= 0:
+ return select.select(object_list, [], [], 0)[0]
+ else:
+ deadline = time.time() + timeout
+ while True:
+ try:
+ return select.select(object_list, [], [], timeout)[0]
+ except OSError as e:
+ if e.errno != errno.EINTR:
+ raise
+ if timeout is not None:
+ timeout = deadline - time.time()
+
+
+# Late import because of circular import
+from multiprocessing.forking import duplicate, close
diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py
index c4933d9..056acfc 100644
--- a/Lib/multiprocessing/dummy/__init__.py
+++ b/Lib/multiprocessing/dummy/__init__.py
@@ -46,12 +46,10 @@ import threading
import sys
import weakref
import array
-import itertools
-from multiprocessing import TimeoutError, cpu_count
from multiprocessing.dummy.connection import Pipe
from threading import Lock, RLock, Semaphore, BoundedSemaphore
-from threading import Event
+from threading import Event, Condition
from queue import Queue
#
@@ -84,17 +82,6 @@ class DummyProcess(threading.Thread):
#
#
-class Condition(threading._Condition):
- # XXX
- if sys.version_info < (3, 0):
- notify_all = threading._Condition.notify_all.__func__
- else:
- notify_all = threading._Condition.notify_all
-
-#
-#
-#
-
Process = DummyProcess
current_process = threading.current_thread
current_process()._children = weakref.WeakKeyDictionary()
diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py
index 4e24d6a..020508a 100644
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -55,18 +55,18 @@ def assert_spawning(self):
# Try making some callable types picklable
#
-from pickle import _Pickler as Pickler
+from pickle import Pickler
+from copyreg import dispatch_table
+
class ForkingPickler(Pickler):
- dispatch = Pickler.dispatch.copy()
+ _extra_reducers = {}
+ def __init__(self, *args):
+ Pickler.__init__(self, *args)
+ self.dispatch_table = dispatch_table.copy()
+ self.dispatch_table.update(self._extra_reducers)
@classmethod
def register(cls, type, reduce):
- def dispatcher(self, obj):
- rv = reduce(obj)
- if isinstance(rv, str):
- self.save_global(obj, rv)
- else:
- self.save_reduce(obj=obj, *rv)
- cls.dispatch[type] = dispatcher
+ cls._extra_reducers[type] = reduce
def _reduce_method(m):
if m.__self__ is None:
@@ -100,11 +100,12 @@ else:
#
if sys.platform != 'win32':
- import time
+ import select
exit = os._exit
duplicate = os.dup
close = os.close
+ _select = util._eintr_retry(select.select)
#
# We define a Popen class similar to the one from subprocess, but
@@ -118,14 +119,23 @@ if sys.platform != 'win32':
sys.stderr.flush()
self.returncode = None
+ r, w = os.pipe()
+ self.sentinel = r
+
self.pid = os.fork()
if self.pid == 0:
+ os.close(r)
if 'random' in sys.modules:
import random
random.seed()
code = process_obj._bootstrap()
os._exit(code)
+ # `w` will be closed when the child exits, at which point `r`
+ # will become ready for reading (using e.g. select()).
+ os.close(w)
+ util.Finalize(self, os.close, (r,))
+
def poll(self, flag=os.WNOHANG):
if self.returncode is None:
try:
@@ -143,26 +153,20 @@ if sys.platform != 'win32':
return self.returncode
def wait(self, timeout=None):
- if timeout is None:
- return self.poll(0)
- deadline = time.time() + timeout
- delay = 0.0005
- while 1:
- res = self.poll()
- if res is not None:
- break
- remaining = deadline - time.time()
- if remaining <= 0:
- break
- delay = min(delay * 2, remaining, 0.05)
- time.sleep(delay)
- return res
+ if self.returncode is None:
+ if timeout is not None:
+ r = _select([self.sentinel], [], [], timeout)[0]
+ if not r:
+ return None
+ # This shouldn't block if select() returned successfully.
+ return self.poll(os.WNOHANG if timeout == 0.0 else 0)
+ return self.returncode
def terminate(self):
if self.returncode is None:
try:
os.kill(self.pid, signal.SIGTERM)
- except OSError as e:
+ except OSError:
if self.wait(timeout=0.1) is None:
raise
@@ -178,11 +182,9 @@ else:
import _thread
import msvcrt
import _subprocess
- import time
- from pickle import dump, load, HIGHEST_PROTOCOL
- from _multiprocessing import win32, Connection, PipeConnection
- from .util import Finalize
+ from pickle import load, HIGHEST_PROTOCOL
+ from _multiprocessing import win32
def dump(obj, file, protocol=None):
ForkingPickler(file, protocol).dump(obj)
@@ -256,6 +258,7 @@ else:
self.pid = pid
self.returncode = None
self._handle = hp
+ self.sentinel = int(hp)
# send information to child
prep_data = get_preparation_data(process_obj._name)
@@ -409,6 +412,9 @@ else:
# Make (Pipe)Connection picklable
#
+ # Late import because of circular import
+ from .connection import Connection, PipeConnection
+
def reduce_connection(conn):
if not Popen.thread_is_spawning():
raise RuntimeError(
diff --git a/Lib/multiprocessing/heap.py b/Lib/multiprocessing/heap.py
index 0a25ef0..7366bd2 100644
--- a/Lib/multiprocessing/heap.py
+++ b/Lib/multiprocessing/heap.py
@@ -34,7 +34,6 @@
import bisect
import mmap
-import tempfile
import os
import sys
import threading
diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py
index 5588ead..eaf912c 100644
--- a/Lib/multiprocessing/managers.py
+++ b/Lib/multiprocessing/managers.py
@@ -39,19 +39,15 @@ __all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
# Imports
#
-import os
import sys
-import weakref
import threading
import array
import queue
from traceback import format_exc
-from pickle import PicklingError
from multiprocessing import Process, current_process, active_children, Pool, util, connection
from multiprocessing.process import AuthenticationString
-from multiprocessing.forking import exit, Popen, assert_spawning, ForkingPickler
-from multiprocessing.util import Finalize, info
+from multiprocessing.forking import exit, Popen, ForkingPickler
#
# Register some things for pickling
@@ -1070,11 +1066,12 @@ ArrayProxy = MakeProxyType('ArrayProxy', (
PoolProxy = MakeProxyType('PoolProxy', (
'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
- 'map', 'map_async', 'terminate'
+ 'map', 'map_async', 'starmap', 'starmap_async', 'terminate'
))
PoolProxy._method_to_typeid_ = {
'apply_async': 'AsyncResult',
'map_async': 'AsyncResult',
+ 'starmap_async': 'AsyncResult',
'imap': 'Iterator',
'imap_unordered': 'Iterator'
}
diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py
index 0c29e64..7039d16 100644
--- a/Lib/multiprocessing/pool.py
+++ b/Lib/multiprocessing/pool.py
@@ -64,6 +64,9 @@ job_counter = itertools.count()
def mapstar(args):
return list(map(*args))
+def starmapstar(args):
+ return list(itertools.starmap(args[0], args[1]))
+
#
# Code run by worker processes
#
@@ -248,7 +251,25 @@ class Pool(object):
in a list that is returned.
'''
assert self._state == RUN
- return self.map_async(func, iterable, chunksize).get()
+ return self._map_async(func, iterable, mapstar, chunksize).get()
+
+ def starmap(self, func, iterable, chunksize=None):
+ '''
+ Like `map()` method but the elements of the `iterable` are expected to
+ be iterables as well and will be unpacked as arguments. Hence
+ `func` and (a, b) becomes func(a, b).
+ '''
+ assert self._state == RUN
+ return self._map_async(func, iterable, starmapstar, chunksize).get()
+
+ def starmap_async(self, func, iterable, chunksize=None, callback=None,
+ error_callback=None):
+ '''
+ Asynchronous version of `starmap()` method.
+ '''
+ assert self._state == RUN
+ return self._map_async(func, iterable, starmapstar, chunksize,
+ callback, error_callback)
def imap(self, func, iterable, chunksize=1):
'''
@@ -302,6 +323,13 @@ class Pool(object):
Asynchronous version of `map()` method.
'''
assert self._state == RUN
+ return self._map_async(func, iterable, mapstar, chunksize)
+
+ def _map_async(self, func, iterable, mapper, chunksize=None, callback=None,
+ error_callback=None):
+ '''
+ Helper function to implement map, starmap and their async counterparts.
+ '''
if not hasattr(iterable, '__len__'):
iterable = list(iterable)
@@ -315,7 +343,7 @@ class Pool(object):
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = MapResult(self._cache, chunksize, len(iterable), callback,
error_callback=error_callback)
- self._taskqueue.put((((result._job, i, mapstar, (x,), {})
+ self._taskqueue.put((((result._job, i, mapper, (x,), {})
for i, x in enumerate(task_batches)), None))
return result
diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py
index 2b61ee9..b599f11 100644
--- a/Lib/multiprocessing/process.py
+++ b/Lib/multiprocessing/process.py
@@ -92,12 +92,16 @@ class Process(object):
'''
_Popen = None
- def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
+ def __init__(self, group=None, target=None, name=None, args=(), kwargs={},
+ *, daemon=None):
assert group is None, 'group argument must be None for now'
count = next(_current_process._counter)
self._identity = _current_process._identity + (count,)
self._authkey = _current_process._authkey
- self._daemonic = _current_process._daemonic
+ if daemon is not None:
+ self._daemonic = daemon
+ else:
+ self._daemonic = _current_process._daemonic
self._tempdir = _current_process._tempdir
self._parent_pid = os.getpid()
self._popen = None
@@ -130,6 +134,7 @@ class Process(object):
else:
from .forking import Popen
self._popen = Popen(self)
+ self._sentinel = self._popen.sentinel
_current_process._children.add(self)
def terminate(self):
@@ -216,6 +221,17 @@ class Process(object):
pid = ident
+ @property
+ def sentinel(self):
+ '''
+ Return a file descriptor (Unix) or handle (Windows) suitable for
+ waiting for process termination.
+ '''
+ try:
+ return self._sentinel
+ except AttributeError:
+ raise ValueError("process not started")
+
def __repr__(self):
if self is _current_process:
status = 'started'
diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py
index 51d9912..262fd85 100644
--- a/Lib/multiprocessing/queues.py
+++ b/Lib/multiprocessing/queues.py
@@ -39,12 +39,12 @@ import os
import threading
import collections
import time
-import atexit
import weakref
+import errno
from queue import Empty, Full
import _multiprocessing
-from multiprocessing import Pipe
+from multiprocessing.connection import Pipe
from multiprocessing.synchronize import Lock, BoundedSemaphore, Semaphore, Condition
from multiprocessing.util import debug, info, Finalize, register_after_fork
from multiprocessing.forking import assert_spawning
@@ -67,6 +67,8 @@ class Queue(object):
else:
self._wlock = Lock()
self._sem = BoundedSemaphore(maxsize)
+ # For use by concurrent.futures
+ self._ignore_epipe = False
self._after_fork()
@@ -75,11 +77,11 @@ class Queue(object):
def __getstate__(self):
assert_spawning(self)
- return (self._maxsize, self._reader, self._writer,
+ return (self._ignore_epipe, self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
- (self._maxsize, self._reader, self._writer,
+ (self._ignore_epipe, self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
@@ -182,7 +184,7 @@ class Queue(object):
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send,
- self._wlock, self._writer.close),
+ self._wlock, self._writer.close, self._ignore_epipe),
name='QueueFeederThread'
)
self._thread.daemon = True
@@ -233,7 +235,7 @@ class Queue(object):
notempty.release()
@staticmethod
- def _feed(buffer, notempty, send, writelock, close):
+ def _feed(buffer, notempty, send, writelock, close, ignore_epipe):
debug('starting thread to feed data to pipe')
from .util import is_exiting
@@ -275,6 +277,8 @@ class Queue(object):
except IndexError:
pass
except Exception as e:
+ if ignore_epipe and getattr(e, 'errno', 0) == errno.EPIPE:
+ return
# Since this runs in a daemon thread the resources it uses
# may be become unusable while the process is cleaning up.
# We ignore errors which happen after the process has
@@ -356,6 +360,7 @@ class SimpleQueue(object):
def __init__(self):
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
+ self._poll = self._reader.poll
if sys.platform == 'win32':
self._wlock = None
else:
@@ -363,7 +368,7 @@ class SimpleQueue(object):
self._make_methods()
def empty(self):
- return not self._reader.poll()
+ return not self._poll()
def __getstate__(self):
assert_spawning(self)
diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py
index 6e5e5bc..dda4a41 100644
--- a/Lib/multiprocessing/reduction.py
+++ b/Lib/multiprocessing/reduction.py
@@ -39,19 +39,20 @@ import os
import sys
import socket
import threading
+import struct
-import _multiprocessing
from multiprocessing import current_process
from multiprocessing.forking import Popen, duplicate, close, ForkingPickler
from multiprocessing.util import register_after_fork, debug, sub_debug
-from multiprocessing.connection import Client, Listener
+from multiprocessing.connection import Client, Listener, Connection
#
#
#
-if not(sys.platform == 'win32' or hasattr(_multiprocessing, 'recvfd')):
+if not(sys.platform == 'win32' or (hasattr(socket, 'CMSG_LEN') and
+ hasattr(socket, 'SCM_RIGHTS'))):
raise ImportError('pickling of connections not supported')
#
@@ -59,7 +60,6 @@ if not(sys.platform == 'win32' or hasattr(_multiprocessing, 'recvfd')):
#
if sys.platform == 'win32':
- import _subprocess
from _multiprocessing import win32
def send_handle(conn, handle, destination_pid):
@@ -77,10 +77,23 @@ if sys.platform == 'win32':
else:
def send_handle(conn, handle, destination_pid):
- _multiprocessing.sendfd(conn.fileno(), handle)
+ with socket.fromfd(conn.fileno(), socket.AF_UNIX, socket.SOCK_STREAM) as s:
+ s.sendmsg([b'x'], [(socket.SOL_SOCKET, socket.SCM_RIGHTS,
+ struct.pack("@i", handle))])
def recv_handle(conn):
- return _multiprocessing.recvfd(conn.fileno())
+ size = struct.calcsize("@i")
+ with socket.fromfd(conn.fileno(), socket.AF_UNIX, socket.SOCK_STREAM) as s:
+ msg, ancdata, flags, addr = s.recvmsg(1, socket.CMSG_LEN(size))
+ try:
+ cmsg_level, cmsg_type, cmsg_data = ancdata[0]
+ if (cmsg_level == socket.SOL_SOCKET and
+ cmsg_type == socket.SCM_RIGHTS):
+ return struct.unpack("@i", cmsg_data[:size])[0]
+ except (ValueError, IndexError, struct.error):
+ pass
+ raise RuntimeError('Invalid data received')
+
#
# Support for a per-process server thread which caches pickled handles
@@ -159,7 +172,7 @@ def rebuild_handle(pickled_data):
return new_handle
#
-# Register `_multiprocessing.Connection` with `ForkingPickler`
+# Register `Connection` with `ForkingPickler`
#
def reduce_connection(conn):
@@ -168,11 +181,11 @@ def reduce_connection(conn):
def rebuild_connection(reduced_handle, readable, writable):
handle = rebuild_handle(reduced_handle)
- return _multiprocessing.Connection(
+ return Connection(
handle, readable=readable, writable=writable
)
-ForkingPickler.register(_multiprocessing.Connection, reduce_connection)
+ForkingPickler.register(Connection, reduce_connection)
#
# Register `socket.socket` with `ForkingPickler`
@@ -201,6 +214,7 @@ ForkingPickler.register(socket.socket, reduce_socket)
#
if sys.platform == 'win32':
+ from multiprocessing.connection import PipeConnection
def reduce_pipe_connection(conn):
rh = reduce_handle(conn.fileno())
@@ -208,8 +222,8 @@ if sys.platform == 'win32':
def rebuild_pipe_connection(reduced_handle, readable, writable):
handle = rebuild_handle(reduced_handle)
- return _multiprocessing.PipeConnection(
+ return PipeConnection(
handle, readable=readable, writable=writable
)
- ForkingPickler.register(_multiprocessing.PipeConnection, reduce_pipe_connection)
+ ForkingPickler.register(PipeConnection, reduce_pipe_connection)
diff --git a/Lib/multiprocessing/sharedctypes.py b/Lib/multiprocessing/sharedctypes.py
index 1e694da..5826379 100644
--- a/Lib/multiprocessing/sharedctypes.py
+++ b/Lib/multiprocessing/sharedctypes.py
@@ -32,7 +32,6 @@
# SUCH DAMAGE.
#
-import sys
import ctypes
import weakref
diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py
index 70ae825..e35bbff 100644
--- a/Lib/multiprocessing/synchronize.py
+++ b/Lib/multiprocessing/synchronize.py
@@ -37,14 +37,11 @@ __all__ = [
]
import threading
-import os
import sys
-from time import time as _time, sleep as _sleep
-
import _multiprocessing
from multiprocessing.process import current_process
-from multiprocessing.util import Finalize, register_after_fork, debug
+from multiprocessing.util import register_after_fork, debug
from multiprocessing.forking import assert_spawning, Popen
# Try to import the mp.synchronize module cleanly, if it fails
diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py
index 30b7a85..0bbb87e 100644
--- a/Lib/multiprocessing/util.py
+++ b/Lib/multiprocessing/util.py
@@ -32,6 +32,7 @@
# SUCH DAMAGE.
#
+import functools
import itertools
import weakref
import atexit
@@ -84,7 +85,7 @@ def get_logger():
Returns logger used by multiprocessing
'''
global _logger
- import logging, atexit
+ import logging
logging._acquireLock()
try:
@@ -186,7 +187,11 @@ class Finalize(object):
_finalizer_registry[self._key] = self
- def __call__(self, wr=None):
+ def __call__(self, wr=None,
+ # Need to bind these locally because the globals can have
+ # been cleared at shutdown
+ _finalizer_registry=_finalizer_registry,
+ sub_debug=sub_debug):
'''
Run the callback unless it has already been called or cancelled
'''
@@ -315,3 +320,18 @@ class ForkAwareLocal(threading.local):
register_after_fork(self, lambda obj : obj.__dict__.clear())
def __reduce__(self):
return type(self), ()
+
+
+#
+# Automatic retry after EINTR
+#
+
+def _eintr_retry(func):
+ @functools.wraps(func)
+ def wrapped(*args, **kwargs):
+ while True:
+ try:
+ return func(*args, **kwargs)
+ except InterruptedError:
+ continue
+ return wrapped
diff --git a/Lib/nntplib.py b/Lib/nntplib.py
index 32bffd8..eb16206 100644
--- a/Lib/nntplib.py
+++ b/Lib/nntplib.py
@@ -351,6 +351,20 @@ class _NNTPBase:
# Log in and encryption setup order is left to subclasses.
self.authenticated = False
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ is_connected = lambda: hasattr(self, "file")
+ if is_connected():
+ try:
+ self.quit()
+ except (socket.error, EOFError):
+ pass
+ finally:
+ if is_connected():
+ self._close()
+
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
@@ -819,7 +833,7 @@ class _NNTPBase:
- list: list of (name,title) strings"""
warnings.warn("The XGTITLE extension is not actively used, "
"use descriptions() instead",
- PendingDeprecationWarning, 2)
+ DeprecationWarning, 2)
line_pat = re.compile('^([^ \t]+)[ \t]+(.*)$')
resp, raw_lines = self._longcmdstring('XGTITLE ' + group, file)
lines = []
@@ -837,7 +851,7 @@ class _NNTPBase:
path: directory path to article
"""
warnings.warn("The XPATH extension is not actively used",
- PendingDeprecationWarning, 2)
+ DeprecationWarning, 2)
resp = self._shortcmd('XPATH {0}'.format(id))
if not resp.startswith('223'):
diff --git a/Lib/numbers.py b/Lib/numbers.py
index ecfad7c..b206457 100644
--- a/Lib/numbers.py
+++ b/Lib/numbers.py
@@ -5,7 +5,7 @@
TODO: Fill out more detailed documentation on the operators."""
-from abc import ABCMeta, abstractmethod, abstractproperty
+from abc import ABCMeta, abstractmethod
__all__ = ["Number", "Complex", "Real", "Rational", "Integral"]
@@ -50,7 +50,8 @@ class Complex(Number):
"""True if self != 0. Called for bool(self)."""
return self != 0
- @abstractproperty
+ @property
+ @abstractmethod
def real(self):
"""Retrieve the real component of this number.
@@ -58,7 +59,8 @@ class Complex(Number):
"""
raise NotImplementedError
- @abstractproperty
+ @property
+ @abstractmethod
def imag(self):
"""Retrieve the imaginary component of this number.
@@ -272,11 +274,13 @@ class Rational(Real):
__slots__ = ()
- @abstractproperty
+ @property
+ @abstractmethod
def numerator(self):
raise NotImplementedError
- @abstractproperty
+ @property
+ @abstractmethod
def denominator(self):
raise NotImplementedError
diff --git a/Lib/opcode.py b/Lib/opcode.py
index 8e15d13..6fe20c7 100644
--- a/Lib/opcode.py
+++ b/Lib/opcode.py
@@ -43,7 +43,6 @@ def jabs_op(name, op):
# Instruction opcodes for compiled code
# Blank lines correspond to available opcodes
-def_op('STOP_CODE', 0)
def_op('POP_TOP', 1)
def_op('ROT_TWO', 2)
def_op('ROT_THREE', 3)
@@ -88,6 +87,7 @@ def_op('STORE_LOCALS', 69)
def_op('PRINT_EXPR', 70)
def_op('LOAD_BUILD_CLASS', 71)
+def_op('YIELD_FROM', 72)
def_op('INPLACE_LSHIFT', 75)
def_op('INPLACE_RSHIFT', 76)
diff --git a/Lib/optparse.py b/Lib/optparse.py
index d97a1f7..37764d3 100644
--- a/Lib/optparse.py
+++ b/Lib/optparse.py
@@ -86,10 +86,16 @@ def _repr(self):
# Id: errors.py 509 2006-04-20 00:58:24Z gward
try:
- from gettext import gettext
+ from gettext import gettext, ngettext
except ImportError:
def gettext(message):
return message
+
+ def ngettext(singular, plural, n):
+ if n == 1:
+ return singular
+ return plural
+
_ = gettext
@@ -411,11 +417,8 @@ def _parse_num(val, type):
def _parse_int(val):
return _parse_num(val, int)
-def _parse_long(val):
- return _parse_num(val, int)
-
_builtin_cvt = { "int" : (_parse_int, _("integer")),
- "long" : (_parse_long, _("long integer")),
+ "long" : (_parse_int, _("integer")),
"float" : (float, _("floating-point")),
"complex" : (complex, _("complex")) }
@@ -1483,11 +1486,10 @@ class OptionParser (OptionContainer):
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
- if nargs == 1:
- self.error(_("%s option requires an argument") % opt)
- else:
- self.error(_("%s option requires %d arguments")
- % (opt, nargs))
+ self.error(ngettext(
+ "%(option)s option requires %(number)d argument",
+ "%(option)s option requires %(number)d arguments",
+ nargs) % {"option": opt, "number": nargs})
elif nargs == 1:
value = rargs.pop(0)
else:
@@ -1522,11 +1524,10 @@ class OptionParser (OptionContainer):
nargs = option.nargs
if len(rargs) < nargs:
- if nargs == 1:
- self.error(_("%s option requires an argument") % opt)
- else:
- self.error(_("%s option requires %d arguments")
- % (opt, nargs))
+ self.error(ngettext(
+ "%(option)s option requires %(number)d argument",
+ "%(option)s option requires %(number)d arguments",
+ nargs) % {"option": opt, "number": nargs})
elif nargs == 1:
value = rargs.pop(0)
else:
diff --git a/Lib/os.py b/Lib/os.py
index 5862383..fe6cb11 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -24,6 +24,7 @@ and opendir), and leave all pathname manipulation to os.path
#'
import sys, errno
+import stat as st
_names = sys.builtin_module_names
@@ -32,6 +33,9 @@ __all__ = ["altsep", "curdir", "pardir", "sep", "pathsep", "linesep",
"defpath", "name", "path", "devnull",
"SEEK_SET", "SEEK_CUR", "SEEK_END"]
+def _exists(name):
+ return name in globals()
+
def _get_exports_list(module):
try:
return list(module.__all__)
@@ -120,7 +124,12 @@ def _get_masked_mode(mode):
umask(mask)
return mode & ~mask
-#'
+def _are_same_file(stat1, stat2):
+ """Helper function that checks whether two stat results refer to the same
+ file.
+ """
+ return (stat1.st_ino == stat2.st_ino and stat1.st_dev == stat2.st_dev)
+#
# Super directory utilities.
# (Inspired by Eric Raymond; the doc strings are mostly his)
@@ -151,7 +160,6 @@ def makedirs(name, mode=0o777, exist_ok=False):
try:
mkdir(name, mode)
except OSError as e:
- import stat as st
if not (e.errno == errno.EEXIST and exist_ok and path.isdir(name) and
st.S_IMODE(lstat(name).st_mode) == _get_masked_mode(mode)):
raise
@@ -298,6 +306,92 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
__all__.append("walk")
+if _exists("openat"):
+
+ def fwalk(top, topdown=True, onerror=None, followlinks=False):
+ """Directory tree generator.
+
+ This behaves exactly like walk(), except that it yields a 4-tuple
+
+ dirpath, dirnames, filenames, dirfd
+
+ `dirpath`, `dirnames` and `filenames` are identical to walk() output,
+ and `dirfd` is a file descriptor referring to the directory `dirpath`.
+
+ The advantage of walkfd() over walk() is that it's safe against symlink
+ races (when followlinks is False).
+
+ Caution:
+ Since fwalk() yields file descriptors, those are only valid until the
+ next iteration step, so you should dup() them if you want to keep them
+ for a longer period.
+
+ Example:
+
+ import os
+ for root, dirs, files, rootfd in os.fwalk('python/Lib/email'):
+ print(root, "consumes", end="")
+ print(sum([os.fstatat(rootfd, name).st_size for name in files]),
+ end="")
+ print("bytes in", len(files), "non-directory files")
+ if 'CVS' in dirs:
+ dirs.remove('CVS') # don't visit CVS directories
+ """
+ # Note: To guard against symlink races, we use the standard
+ # lstat()/open()/fstat() trick.
+ orig_st = lstat(top)
+ topfd = open(top, O_RDONLY)
+ try:
+ if (followlinks or (st.S_ISDIR(orig_st.st_mode) and
+ _are_same_file(orig_st, fstat(topfd)))):
+ for x in _fwalk(topfd, top, topdown, onerror, followlinks):
+ yield x
+ finally:
+ close(topfd)
+
+ def _fwalk(topfd, toppath, topdown, onerror, followlinks):
+ # Note: This uses O(depth of the directory tree) file descriptors: if
+ # necessary, it can be adapted to only require O(1) FDs, see issue
+ # #13734.
+
+ # whether to follow symlinks
+ flag = 0 if followlinks else AT_SYMLINK_NOFOLLOW
+
+ names = flistdir(topfd)
+ dirs, nondirs = [], []
+ for name in names:
+ # Here, we don't use AT_SYMLINK_NOFOLLOW to be consistent with
+ # walk() which reports symlinks to directories as directories. We do
+ # however check for symlinks before recursing into a subdirectory.
+ if st.S_ISDIR(fstatat(topfd, name).st_mode):
+ dirs.append(name)
+ else:
+ nondirs.append(name)
+
+ if topdown:
+ yield toppath, dirs, nondirs, topfd
+
+ for name in dirs:
+ try:
+ orig_st = fstatat(topfd, name, flag)
+ dirfd = openat(topfd, name, O_RDONLY)
+ except error as err:
+ if onerror is not None:
+ onerror(err)
+ return
+ try:
+ if followlinks or _are_same_file(orig_st, fstat(dirfd)):
+ dirpath = path.join(toppath, name)
+ for x in _fwalk(dirfd, dirpath, topdown, onerror, followlinks):
+ yield x
+ finally:
+ close(dirfd)
+
+ if not topdown:
+ yield toppath, dirs, nondirs, topfd
+
+ __all__.append("fwalk")
+
# Make sure os.environ exists, at least
try:
environ
@@ -434,7 +528,7 @@ def get_exec_path(env=None):
# Change environ to automatically call putenv(), unsetenv if they exist.
-from _abcoll import MutableMapping # Can't use collections (bootstrap)
+from collections.abc import MutableMapping
class _Environ(MutableMapping):
def __init__(self, data, encodekey, decodekey, encodevalue, decodevalue, putenv, unsetenv):
@@ -598,9 +692,6 @@ def _fscodec():
fsencode, fsdecode = _fscodec()
del _fscodec
-def _exists(name):
- return name in globals()
-
# Supply spawn*() (probably only for Unix)
if _exists("fork") and not _exists("spawnv") and _exists("execv"):
diff --git a/Lib/packaging/__init__.py b/Lib/packaging/__init__.py
new file mode 100644
index 0000000..93b6117
--- /dev/null
+++ b/Lib/packaging/__init__.py
@@ -0,0 +1,17 @@
+"""Support for packaging, distribution and installation of Python projects.
+
+Third-party tools can use parts of packaging as building blocks
+without causing the other modules to be imported:
+
+ import packaging.version
+ import packaging.metadata
+ import packaging.pypi.simple
+ import packaging.tests.pypi_server
+"""
+
+from logging import getLogger
+
+__all__ = ['__version__', 'logger']
+
+__version__ = "1.0a3"
+logger = getLogger('packaging')
diff --git a/Lib/packaging/_trove.py b/Lib/packaging/_trove.py
new file mode 100644
index 0000000..f527bc4
--- /dev/null
+++ b/Lib/packaging/_trove.py
@@ -0,0 +1,571 @@
+"""Temporary helper for create."""
+
+# XXX get the list from PyPI and cache it instead of hardcoding
+
+# XXX see if it would be more useful to store it as another structure
+# than a list of strings
+
+all_classifiers = [
+'Development Status :: 1 - Planning',
+'Development Status :: 2 - Pre-Alpha',
+'Development Status :: 3 - Alpha',
+'Development Status :: 4 - Beta',
+'Development Status :: 5 - Production/Stable',
+'Development Status :: 6 - Mature',
+'Development Status :: 7 - Inactive',
+'Environment :: Console',
+'Environment :: Console :: Curses',
+'Environment :: Console :: Framebuffer',
+'Environment :: Console :: Newt',
+'Environment :: Console :: svgalib',
+"Environment :: Handhelds/PDA's",
+'Environment :: MacOS X',
+'Environment :: MacOS X :: Aqua',
+'Environment :: MacOS X :: Carbon',
+'Environment :: MacOS X :: Cocoa',
+'Environment :: No Input/Output (Daemon)',
+'Environment :: Other Environment',
+'Environment :: Plugins',
+'Environment :: Web Environment',
+'Environment :: Web Environment :: Buffet',
+'Environment :: Web Environment :: Mozilla',
+'Environment :: Web Environment :: ToscaWidgets',
+'Environment :: Win32 (MS Windows)',
+'Environment :: X11 Applications',
+'Environment :: X11 Applications :: Gnome',
+'Environment :: X11 Applications :: GTK',
+'Environment :: X11 Applications :: KDE',
+'Environment :: X11 Applications :: Qt',
+'Framework :: BFG',
+'Framework :: Buildout',
+'Framework :: Buildout :: Extension',
+'Framework :: Buildout :: Recipe',
+'Framework :: Chandler',
+'Framework :: CherryPy',
+'Framework :: CubicWeb',
+'Framework :: Django',
+'Framework :: IDLE',
+'Framework :: Paste',
+'Framework :: Plone',
+'Framework :: Plone :: 3.2',
+'Framework :: Plone :: 3.3',
+'Framework :: Plone :: 4.0',
+'Framework :: Plone :: 4.1',
+'Framework :: Plone :: 4.2',
+'Framework :: Plone :: 4.3',
+'Framework :: Pylons',
+'Framework :: Setuptools Plugin',
+'Framework :: Trac',
+'Framework :: Tryton',
+'Framework :: TurboGears',
+'Framework :: TurboGears :: Applications',
+'Framework :: TurboGears :: Widgets',
+'Framework :: Twisted',
+'Framework :: ZODB',
+'Framework :: Zope2',
+'Framework :: Zope3',
+'Intended Audience :: Customer Service',
+'Intended Audience :: Developers',
+'Intended Audience :: Education',
+'Intended Audience :: End Users/Desktop',
+'Intended Audience :: Financial and Insurance Industry',
+'Intended Audience :: Healthcare Industry',
+'Intended Audience :: Information Technology',
+'Intended Audience :: Legal Industry',
+'Intended Audience :: Manufacturing',
+'Intended Audience :: Other Audience',
+'Intended Audience :: Religion',
+'Intended Audience :: Science/Research',
+'Intended Audience :: System Administrators',
+'Intended Audience :: Telecommunications Industry',
+'License :: Aladdin Free Public License (AFPL)',
+'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
+'License :: DFSG approved',
+'License :: Eiffel Forum License (EFL)',
+'License :: Free For Educational Use',
+'License :: Free For Home Use',
+'License :: Free for non-commercial use',
+'License :: Freely Distributable',
+'License :: Free To Use But Restricted',
+'License :: Freeware',
+'License :: Netscape Public License (NPL)',
+'License :: Nokia Open Source License (NOKOS)',
+'License :: OSI Approved',
+'License :: OSI Approved :: Academic Free License (AFL)',
+'License :: OSI Approved :: Apache Software License',
+'License :: OSI Approved :: Apple Public Source License',
+'License :: OSI Approved :: Artistic License',
+'License :: OSI Approved :: Attribution Assurance License',
+'License :: OSI Approved :: BSD License',
+'License :: OSI Approved :: Common Public License',
+'License :: OSI Approved :: Eiffel Forum License',
+'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)',
+'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)',
+'License :: OSI Approved :: GNU Affero General Public License v3',
+'License :: OSI Approved :: GNU Free Documentation License (FDL)',
+'License :: OSI Approved :: GNU General Public License (GPL)',
+'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
+'License :: OSI Approved :: IBM Public License',
+'License :: OSI Approved :: Intel Open Source License',
+'License :: OSI Approved :: ISC License (ISCL)',
+'License :: OSI Approved :: Jabber Open Source License',
+'License :: OSI Approved :: MIT License',
+'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)',
+'License :: OSI Approved :: Motosoto License',
+'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)',
+'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)',
+'License :: OSI Approved :: Nethack General Public License',
+'License :: OSI Approved :: Nokia Open Source License',
+'License :: OSI Approved :: Open Group Test Suite License',
+'License :: OSI Approved :: Python License (CNRI Python License)',
+'License :: OSI Approved :: Python Software Foundation License',
+'License :: OSI Approved :: Qt Public License (QPL)',
+'License :: OSI Approved :: Ricoh Source Code Public License',
+'License :: OSI Approved :: Sleepycat License',
+'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)',
+'License :: OSI Approved :: Sun Public License',
+'License :: OSI Approved :: University of Illinois/NCSA Open Source License',
+'License :: OSI Approved :: Vovida Software License 1.0',
+'License :: OSI Approved :: W3C License',
+'License :: OSI Approved :: X.Net License',
+'License :: OSI Approved :: zlib/libpng License',
+'License :: OSI Approved :: Zope Public License',
+'License :: Other/Proprietary License',
+'License :: Public Domain',
+'License :: Repoze Public License',
+'Natural Language :: Afrikaans',
+'Natural Language :: Arabic',
+'Natural Language :: Bengali',
+'Natural Language :: Bosnian',
+'Natural Language :: Bulgarian',
+'Natural Language :: Catalan',
+'Natural Language :: Chinese (Simplified)',
+'Natural Language :: Chinese (Traditional)',
+'Natural Language :: Croatian',
+'Natural Language :: Czech',
+'Natural Language :: Danish',
+'Natural Language :: Dutch',
+'Natural Language :: English',
+'Natural Language :: Esperanto',
+'Natural Language :: Finnish',
+'Natural Language :: French',
+'Natural Language :: German',
+'Natural Language :: Greek',
+'Natural Language :: Hebrew',
+'Natural Language :: Hindi',
+'Natural Language :: Hungarian',
+'Natural Language :: Icelandic',
+'Natural Language :: Indonesian',
+'Natural Language :: Italian',
+'Natural Language :: Japanese',
+'Natural Language :: Javanese',
+'Natural Language :: Korean',
+'Natural Language :: Latin',
+'Natural Language :: Latvian',
+'Natural Language :: Macedonian',
+'Natural Language :: Malay',
+'Natural Language :: Marathi',
+'Natural Language :: Norwegian',
+'Natural Language :: Panjabi',
+'Natural Language :: Persian',
+'Natural Language :: Polish',
+'Natural Language :: Portuguese',
+'Natural Language :: Portuguese (Brazilian)',
+'Natural Language :: Romanian',
+'Natural Language :: Russian',
+'Natural Language :: Serbian',
+'Natural Language :: Slovak',
+'Natural Language :: Slovenian',
+'Natural Language :: Spanish',
+'Natural Language :: Swedish',
+'Natural Language :: Tamil',
+'Natural Language :: Telugu',
+'Natural Language :: Thai',
+'Natural Language :: Turkish',
+'Natural Language :: Ukranian',
+'Natural Language :: Urdu',
+'Natural Language :: Vietnamese',
+'Operating System :: BeOS',
+'Operating System :: MacOS',
+'Operating System :: MacOS :: MacOS 9',
+'Operating System :: MacOS :: MacOS X',
+'Operating System :: Microsoft',
+'Operating System :: Microsoft :: MS-DOS',
+'Operating System :: Microsoft :: Windows',
+'Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier',
+'Operating System :: Microsoft :: Windows :: Windows 95/98/2000',
+'Operating System :: Microsoft :: Windows :: Windows CE',
+'Operating System :: Microsoft :: Windows :: Windows NT/2000',
+'Operating System :: OS/2',
+'Operating System :: OS Independent',
+'Operating System :: Other OS',
+'Operating System :: PalmOS',
+'Operating System :: PDA Systems',
+'Operating System :: POSIX',
+'Operating System :: POSIX :: AIX',
+'Operating System :: POSIX :: BSD',
+'Operating System :: POSIX :: BSD :: BSD/OS',
+'Operating System :: POSIX :: BSD :: FreeBSD',
+'Operating System :: POSIX :: BSD :: NetBSD',
+'Operating System :: POSIX :: BSD :: OpenBSD',
+'Operating System :: POSIX :: GNU Hurd',
+'Operating System :: POSIX :: HP-UX',
+'Operating System :: POSIX :: IRIX',
+'Operating System :: POSIX :: Linux',
+'Operating System :: POSIX :: Other',
+'Operating System :: POSIX :: SCO',
+'Operating System :: POSIX :: SunOS/Solaris',
+'Operating System :: Unix',
+'Programming Language :: Ada',
+'Programming Language :: APL',
+'Programming Language :: ASP',
+'Programming Language :: Assembly',
+'Programming Language :: Awk',
+'Programming Language :: Basic',
+'Programming Language :: C',
+'Programming Language :: C#',
+'Programming Language :: C++',
+'Programming Language :: Cold Fusion',
+'Programming Language :: Cython',
+'Programming Language :: Delphi/Kylix',
+'Programming Language :: Dylan',
+'Programming Language :: Eiffel',
+'Programming Language :: Emacs-Lisp',
+'Programming Language :: Erlang',
+'Programming Language :: Euler',
+'Programming Language :: Euphoria',
+'Programming Language :: Forth',
+'Programming Language :: Fortran',
+'Programming Language :: Haskell',
+'Programming Language :: Java',
+'Programming Language :: JavaScript',
+'Programming Language :: Lisp',
+'Programming Language :: Logo',
+'Programming Language :: ML',
+'Programming Language :: Modula',
+'Programming Language :: Objective C',
+'Programming Language :: Object Pascal',
+'Programming Language :: OCaml',
+'Programming Language :: Other',
+'Programming Language :: Other Scripting Engines',
+'Programming Language :: Pascal',
+'Programming Language :: Perl',
+'Programming Language :: PHP',
+'Programming Language :: Pike',
+'Programming Language :: Pliant',
+'Programming Language :: PL/SQL',
+'Programming Language :: PROGRESS',
+'Programming Language :: Prolog',
+'Programming Language :: Python',
+'Programming Language :: Python :: 2',
+'Programming Language :: Python :: 2.3',
+'Programming Language :: Python :: 2.4',
+'Programming Language :: Python :: 2.5',
+'Programming Language :: Python :: 2.6',
+'Programming Language :: Python :: 2.7',
+'Programming Language :: Python :: 3',
+'Programming Language :: Python :: 3.0',
+'Programming Language :: Python :: 3.1',
+'Programming Language :: Python :: 3.2',
+'Programming Language :: Python :: Implementation',
+'Programming Language :: Python :: Implementation :: CPython',
+'Programming Language :: Python :: Implementation :: IronPython',
+'Programming Language :: Python :: Implementation :: Jython',
+'Programming Language :: Python :: Implementation :: PyPy',
+'Programming Language :: Python :: Implementation :: Stackless',
+'Programming Language :: REBOL',
+'Programming Language :: Rexx',
+'Programming Language :: Ruby',
+'Programming Language :: Scheme',
+'Programming Language :: Simula',
+'Programming Language :: Smalltalk',
+'Programming Language :: SQL',
+'Programming Language :: Tcl',
+'Programming Language :: Unix Shell',
+'Programming Language :: Visual Basic',
+'Programming Language :: XBasic',
+'Programming Language :: YACC',
+'Programming Language :: Zope',
+'Topic :: Adaptive Technologies',
+'Topic :: Artistic Software',
+'Topic :: Communications',
+'Topic :: Communications :: BBS',
+'Topic :: Communications :: Chat',
+'Topic :: Communications :: Chat :: AOL Instant Messenger',
+'Topic :: Communications :: Chat :: ICQ',
+'Topic :: Communications :: Chat :: Internet Relay Chat',
+'Topic :: Communications :: Chat :: Unix Talk',
+'Topic :: Communications :: Conferencing',
+'Topic :: Communications :: Email',
+'Topic :: Communications :: Email :: Address Book',
+'Topic :: Communications :: Email :: Email Clients (MUA)',
+'Topic :: Communications :: Email :: Filters',
+'Topic :: Communications :: Email :: Mailing List Servers',
+'Topic :: Communications :: Email :: Mail Transport Agents',
+'Topic :: Communications :: Email :: Post-Office',
+'Topic :: Communications :: Email :: Post-Office :: IMAP',
+'Topic :: Communications :: Email :: Post-Office :: POP3',
+'Topic :: Communications :: Fax',
+'Topic :: Communications :: FIDO',
+'Topic :: Communications :: File Sharing',
+'Topic :: Communications :: File Sharing :: Gnutella',
+'Topic :: Communications :: File Sharing :: Napster',
+'Topic :: Communications :: Ham Radio',
+'Topic :: Communications :: Internet Phone',
+'Topic :: Communications :: Telephony',
+'Topic :: Communications :: Usenet News',
+'Topic :: Database',
+'Topic :: Database :: Database Engines/Servers',
+'Topic :: Database :: Front-Ends',
+'Topic :: Desktop Environment',
+'Topic :: Desktop Environment :: File Managers',
+'Topic :: Desktop Environment :: Gnome',
+'Topic :: Desktop Environment :: GNUstep',
+'Topic :: Desktop Environment :: K Desktop Environment (KDE)',
+'Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes',
+'Topic :: Desktop Environment :: PicoGUI',
+'Topic :: Desktop Environment :: PicoGUI :: Applications',
+'Topic :: Desktop Environment :: PicoGUI :: Themes',
+'Topic :: Desktop Environment :: Screen Savers',
+'Topic :: Desktop Environment :: Window Managers',
+'Topic :: Desktop Environment :: Window Managers :: Afterstep',
+'Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Applets',
+'Topic :: Desktop Environment :: Window Managers :: Blackbox',
+'Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: CTWM',
+'Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17',
+'Topic :: Desktop Environment :: Window Managers :: Fluxbox',
+'Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: FVWM',
+'Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: IceWM',
+'Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: MetaCity',
+'Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Oroborus',
+'Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30',
+'Topic :: Desktop Environment :: Window Managers :: Waimea',
+'Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: XFCE',
+'Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes',
+'Topic :: Documentation',
+'Topic :: Education',
+'Topic :: Education :: Computer Aided Instruction (CAI)',
+'Topic :: Education :: Testing',
+'Topic :: Games/Entertainment',
+'Topic :: Games/Entertainment :: Arcade',
+'Topic :: Games/Entertainment :: Board Games',
+'Topic :: Games/Entertainment :: First Person Shooters',
+'Topic :: Games/Entertainment :: Fortune Cookies',
+'Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)',
+'Topic :: Games/Entertainment :: Puzzle Games',
+'Topic :: Games/Entertainment :: Real Time Strategy',
+'Topic :: Games/Entertainment :: Role-Playing',
+'Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games',
+'Topic :: Games/Entertainment :: Simulation',
+'Topic :: Games/Entertainment :: Turn Based Strategy',
+'Topic :: Home Automation',
+'Topic :: Internet',
+'Topic :: Internet :: File Transfer Protocol (FTP)',
+'Topic :: Internet :: Finger',
+'Topic :: Internet :: Log Analysis',
+'Topic :: Internet :: Name Service (DNS)',
+'Topic :: Internet :: Proxy Servers',
+'Topic :: Internet :: WAP',
+'Topic :: Internet :: WWW/HTTP',
+'Topic :: Internet :: WWW/HTTP :: Browsers',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters',
+'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
+'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
+'Topic :: Internet :: WWW/HTTP :: Session',
+'Topic :: Internet :: WWW/HTTP :: Site Management',
+'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking',
+'Topic :: Internet :: WWW/HTTP :: WSGI',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
+'Topic :: Internet :: Z39.50',
+'Topic :: Multimedia',
+'Topic :: Multimedia :: Graphics',
+'Topic :: Multimedia :: Graphics :: 3D Modeling',
+'Topic :: Multimedia :: Graphics :: 3D Rendering',
+'Topic :: Multimedia :: Graphics :: Capture',
+'Topic :: Multimedia :: Graphics :: Capture :: Digital Camera',
+'Topic :: Multimedia :: Graphics :: Capture :: Scanners',
+'Topic :: Multimedia :: Graphics :: Capture :: Screen Capture',
+'Topic :: Multimedia :: Graphics :: Editors',
+'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based',
+'Topic :: Multimedia :: Graphics :: Editors :: Vector-Based',
+'Topic :: Multimedia :: Graphics :: Graphics Conversion',
+'Topic :: Multimedia :: Graphics :: Presentation',
+'Topic :: Multimedia :: Graphics :: Viewers',
+'Topic :: Multimedia :: Sound/Audio',
+'Topic :: Multimedia :: Sound/Audio :: Analysis',
+'Topic :: Multimedia :: Sound/Audio :: Capture/Recording',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing',
+'Topic :: Multimedia :: Sound/Audio :: Conversion',
+'Topic :: Multimedia :: Sound/Audio :: Editors',
+'Topic :: Multimedia :: Sound/Audio :: MIDI',
+'Topic :: Multimedia :: Sound/Audio :: Mixers',
+'Topic :: Multimedia :: Sound/Audio :: Players',
+'Topic :: Multimedia :: Sound/Audio :: Players :: MP3',
+'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',
+'Topic :: Multimedia :: Sound/Audio :: Speech',
+'Topic :: Multimedia :: Video',
+'Topic :: Multimedia :: Video :: Capture',
+'Topic :: Multimedia :: Video :: Conversion',
+'Topic :: Multimedia :: Video :: Display',
+'Topic :: Multimedia :: Video :: Non-Linear Editor',
+'Topic :: Office/Business',
+'Topic :: Office/Business :: Financial',
+'Topic :: Office/Business :: Financial :: Accounting',
+'Topic :: Office/Business :: Financial :: Investment',
+'Topic :: Office/Business :: Financial :: Point-Of-Sale',
+'Topic :: Office/Business :: Financial :: Spreadsheet',
+'Topic :: Office/Business :: Groupware',
+'Topic :: Office/Business :: News/Diary',
+'Topic :: Office/Business :: Office Suites',
+'Topic :: Office/Business :: Scheduling',
+'Topic :: Other/Nonlisted Topic',
+'Topic :: Printing',
+'Topic :: Religion',
+'Topic :: Scientific/Engineering',
+'Topic :: Scientific/Engineering :: Artificial Life',
+'Topic :: Scientific/Engineering :: Artificial Intelligence',
+'Topic :: Scientific/Engineering :: Astronomy',
+'Topic :: Scientific/Engineering :: Atmospheric Science',
+'Topic :: Scientific/Engineering :: Bio-Informatics',
+'Topic :: Scientific/Engineering :: Chemistry',
+'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
+'Topic :: Scientific/Engineering :: GIS',
+'Topic :: Scientific/Engineering :: Human Machine Interfaces',
+'Topic :: Scientific/Engineering :: Image Recognition',
+'Topic :: Scientific/Engineering :: Information Analysis',
+'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator',
+'Topic :: Scientific/Engineering :: Mathematics',
+'Topic :: Scientific/Engineering :: Medical Science Apps.',
+'Topic :: Scientific/Engineering :: Physics',
+'Topic :: Scientific/Engineering :: Visualization',
+'Topic :: Security',
+'Topic :: Security :: Cryptography',
+'Topic :: Sociology',
+'Topic :: Sociology :: Genealogy',
+'Topic :: Sociology :: History',
+'Topic :: Software Development',
+'Topic :: Software Development :: Assemblers',
+'Topic :: Software Development :: Bug Tracking',
+'Topic :: Software Development :: Build Tools',
+'Topic :: Software Development :: Code Generators',
+'Topic :: Software Development :: Compilers',
+'Topic :: Software Development :: Debuggers',
+'Topic :: Software Development :: Disassemblers',
+'Topic :: Software Development :: Documentation',
+'Topic :: Software Development :: Embedded Systems',
+'Topic :: Software Development :: Internationalization',
+'Topic :: Software Development :: Interpreters',
+'Topic :: Software Development :: Libraries',
+'Topic :: Software Development :: Libraries :: Application Frameworks',
+'Topic :: Software Development :: Libraries :: Java Libraries',
+'Topic :: Software Development :: Libraries :: Perl Modules',
+'Topic :: Software Development :: Libraries :: PHP Classes',
+'Topic :: Software Development :: Libraries :: Pike Modules',
+'Topic :: Software Development :: Libraries :: pygame',
+'Topic :: Software Development :: Libraries :: Python Modules',
+'Topic :: Software Development :: Libraries :: Ruby Modules',
+'Topic :: Software Development :: Libraries :: Tcl Extensions',
+'Topic :: Software Development :: Localization',
+'Topic :: Software Development :: Object Brokering',
+'Topic :: Software Development :: Object Brokering :: CORBA',
+'Topic :: Software Development :: Pre-processors',
+'Topic :: Software Development :: Quality Assurance',
+'Topic :: Software Development :: Testing',
+'Topic :: Software Development :: Testing :: Traffic Generation',
+'Topic :: Software Development :: User Interfaces',
+'Topic :: Software Development :: Version Control',
+'Topic :: Software Development :: Version Control :: CVS',
+'Topic :: Software Development :: Version Control :: RCS',
+'Topic :: Software Development :: Version Control :: SCCS',
+'Topic :: Software Development :: Widget Sets',
+'Topic :: System',
+'Topic :: System :: Archiving',
+'Topic :: System :: Archiving :: Backup',
+'Topic :: System :: Archiving :: Compression',
+'Topic :: System :: Archiving :: Mirroring',
+'Topic :: System :: Archiving :: Packaging',
+'Topic :: System :: Benchmark',
+'Topic :: System :: Boot',
+'Topic :: System :: Boot :: Init',
+'Topic :: System :: Clustering',
+'Topic :: System :: Console Fonts',
+'Topic :: System :: Distributed Computing',
+'Topic :: System :: Emulators',
+'Topic :: System :: Filesystems',
+'Topic :: System :: Hardware',
+'Topic :: System :: Hardware :: Hardware Drivers',
+'Topic :: System :: Hardware :: Mainframes',
+'Topic :: System :: Hardware :: Symmetric Multi-processing',
+'Topic :: System :: Installation/Setup',
+'Topic :: System :: Logging',
+'Topic :: System :: Monitoring',
+'Topic :: System :: Networking',
+'Topic :: System :: Networking :: Firewalls',
+'Topic :: System :: Networking :: Monitoring',
+'Topic :: System :: Networking :: Monitoring :: Hardware Watchdog',
+'Topic :: System :: Networking :: Time Synchronization',
+'Topic :: System :: Operating System',
+'Topic :: System :: Operating System Kernels',
+'Topic :: System :: Operating System Kernels :: BSD',
+'Topic :: System :: Operating System Kernels :: GNU Hurd',
+'Topic :: System :: Operating System Kernels :: Linux',
+'Topic :: System :: Power (UPS)',
+'Topic :: System :: Recovery Tools',
+'Topic :: System :: Shells',
+'Topic :: System :: Software Distribution',
+'Topic :: System :: Systems Administration',
+'Topic :: System :: Systems Administration :: Authentication/Directory',
+'Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP',
+'Topic :: System :: Systems Administration :: Authentication/Directory :: NIS',
+'Topic :: System :: System Shells',
+'Topic :: Terminals',
+'Topic :: Terminals :: Serial',
+'Topic :: Terminals :: Telnet',
+'Topic :: Terminals :: Terminal Emulators/X Terminals',
+'Topic :: Text Editors',
+'Topic :: Text Editors :: Documentation',
+'Topic :: Text Editors :: Emacs',
+'Topic :: Text Editors :: Integrated Development Environments (IDE)',
+'Topic :: Text Editors :: Text Processing',
+'Topic :: Text Editors :: Word Processors',
+'Topic :: Text Processing',
+'Topic :: Text Processing :: Filters',
+'Topic :: Text Processing :: Fonts',
+'Topic :: Text Processing :: General',
+'Topic :: Text Processing :: Indexing',
+'Topic :: Text Processing :: Linguistic',
+'Topic :: Text Processing :: Markup',
+'Topic :: Text Processing :: Markup :: HTML',
+'Topic :: Text Processing :: Markup :: LaTeX',
+'Topic :: Text Processing :: Markup :: SGML',
+'Topic :: Text Processing :: Markup :: VRML',
+'Topic :: Text Processing :: Markup :: XML',
+'Topic :: Utilities',
+]
diff --git a/Lib/packaging/command/__init__.py b/Lib/packaging/command/__init__.py
new file mode 100644
index 0000000..87227c0
--- /dev/null
+++ b/Lib/packaging/command/__init__.py
@@ -0,0 +1,53 @@
+"""Subpackage containing all standard commands."""
+import os
+from packaging.errors import PackagingModuleError
+from packaging.util import resolve_name
+
+__all__ = ['get_command_names', 'set_command', 'get_command_class',
+ 'STANDARD_COMMANDS']
+
+
+STANDARD_COMMANDS = [
+ # packaging
+ 'check', 'test',
+ # building
+ 'build', 'build_py', 'build_ext', 'build_clib', 'build_scripts', 'clean',
+ # installing
+ 'install_dist', 'install_lib', 'install_headers', 'install_scripts',
+ 'install_data', 'install_distinfo',
+ # distributing
+ 'sdist', 'bdist', 'bdist_dumb', 'bdist_wininst',
+ 'register', 'upload', 'upload_docs',
+ ]
+
+if os.name == 'nt':
+ STANDARD_COMMANDS.insert(STANDARD_COMMANDS.index('bdist_wininst'),
+ 'bdist_msi')
+
+# XXX maybe we need more than one registry, so that --list-comands can display
+# standard, custom and overriden standard commands differently
+_COMMANDS = dict((name, 'packaging.command.%s.%s' % (name, name))
+ for name in STANDARD_COMMANDS)
+
+
+def get_command_names():
+ """Return registered commands"""
+ return sorted(_COMMANDS)
+
+
+def set_command(location):
+ cls = resolve_name(location)
+ # XXX we want to do the duck-type checking here
+ _COMMANDS[cls.get_command_name()] = cls
+
+
+def get_command_class(name):
+ """Return the registered command"""
+ try:
+ cls = _COMMANDS[name]
+ except KeyError:
+ raise PackagingModuleError("Invalid command %s" % name)
+ if isinstance(cls, str):
+ cls = resolve_name(cls)
+ _COMMANDS[name] = cls
+ return cls
diff --git a/Lib/packaging/command/bdist.py b/Lib/packaging/command/bdist.py
new file mode 100644
index 0000000..e390cdc
--- /dev/null
+++ b/Lib/packaging/command/bdist.py
@@ -0,0 +1,141 @@
+"""Create a built (binary) distribution.
+
+If a --formats option was given on the command line, this command will
+call the corresponding bdist_* commands; if the option was absent, a
+bdist_* command depending on the current platform will be called.
+"""
+
+import os
+
+from packaging import util
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError, PackagingOptionError
+
+
+def show_formats():
+ """Print list of available formats (arguments to "--format" option).
+ """
+ from packaging.fancy_getopt import FancyGetopt
+ formats = []
+ for format in bdist.format_commands:
+ formats.append(("formats=" + format, None,
+ bdist.format_command[format][1]))
+ pretty_printer = FancyGetopt(formats)
+ pretty_printer.print_help("List of available distribution formats:")
+
+
+class bdist(Command):
+
+ description = "create a built (binary) distribution"
+
+ user_options = [('bdist-base=', 'b',
+ "temporary directory for creating built distributions"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % util.get_platform()),
+ ('formats=', None,
+ "formats for distribution (comma-separated list)"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in "
+ "[default: dist]"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file"
+ " [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file"
+ " [default: current group]"),
+ ]
+
+ boolean_options = ['skip-build']
+
+ help_options = [
+ ('help-formats', None,
+ "lists available distribution formats", show_formats),
+ ]
+
+ # This is of course very simplistic. The various UNIX family operating
+ # systems have their specific formats, but they are out of scope for us;
+ # bdist_dumb is, well, dumb; it's more a building block for other
+ # packaging tools than a real end-user binary format.
+ default_format = {'posix': 'gztar',
+ 'nt': 'zip',
+ 'os2': 'zip'}
+
+ # Establish the preferred order (for the --help-formats option).
+ format_commands = ['gztar', 'bztar', 'tar',
+ 'wininst', 'zip', 'msi']
+
+ # And the real information.
+ format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"),
+ 'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+ 'tar': ('bdist_dumb', "tar file"),
+ 'wininst': ('bdist_wininst',
+ "Windows executable installer"),
+ 'zip': ('bdist_dumb', "ZIP file"),
+ 'msi': ('bdist_msi', "Microsoft Installer"),
+ }
+
+ def initialize_options(self):
+ self.bdist_base = None
+ self.plat_name = None
+ self.formats = None
+ self.dist_dir = None
+ self.skip_build = False
+ self.group = None
+ self.owner = None
+
+ def finalize_options(self):
+ # have to finalize 'plat_name' before 'bdist_base'
+ if self.plat_name is None:
+ if self.skip_build:
+ self.plat_name = util.get_platform()
+ else:
+ self.plat_name = self.get_finalized_command('build').plat_name
+
+ # 'bdist_base' -- parent of per-built-distribution-format
+ # temporary directories (eg. we'll probably have
+ # "build/bdist.<plat>/dumb", etc.)
+ if self.bdist_base is None:
+ build_base = self.get_finalized_command('build').build_base
+ self.bdist_base = os.path.join(build_base,
+ 'bdist.' + self.plat_name)
+
+ self.ensure_string_list('formats')
+ if self.formats is None:
+ try:
+ self.formats = [self.default_format[os.name]]
+ except KeyError:
+ raise PackagingPlatformError(
+ "don't know how to create built distributions "
+ "on platform %s" % os.name)
+
+ if self.dist_dir is None:
+ self.dist_dir = "dist"
+
+ def run(self):
+ # Figure out which sub-commands we need to run.
+ commands = []
+ for format in self.formats:
+ try:
+ commands.append(self.format_command[format][0])
+ except KeyError:
+ raise PackagingOptionError("invalid format '%s'" % format)
+
+ # Reinitialize and run each command.
+ for i in range(len(self.formats)):
+ cmd_name = commands[i]
+ sub_cmd = self.reinitialize_command(cmd_name)
+ sub_cmd.format = self.formats[i]
+
+ # passing the owner and group names for tar archiving
+ if cmd_name == 'bdist_dumb':
+ sub_cmd.owner = self.owner
+ sub_cmd.group = self.group
+
+ # If we're going to need to run this command again, tell it to
+ # keep its temporary files around so subsequent runs go faster.
+ if cmd_name in commands[i+1:]:
+ sub_cmd.keep_temp = True
+ self.run_command(cmd_name)
diff --git a/Lib/packaging/command/bdist_dumb.py b/Lib/packaging/command/bdist_dumb.py
new file mode 100644
index 0000000..548e3c4
--- /dev/null
+++ b/Lib/packaging/command/bdist_dumb.py
@@ -0,0 +1,139 @@
+"""Create a "dumb" built distribution.
+
+A dumb distribution is just an archive meant to be unpacked under
+sys.prefix or sys.exec_prefix.
+"""
+
+import os
+from shutil import rmtree
+from sysconfig import get_python_version
+
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError
+from packaging import logger
+
+
+class bdist_dumb(Command):
+
+ description = 'create a "dumb" built distribution'
+
+ user_options = [('bdist-dir=', 'd',
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform()),
+ ('format=', 'f',
+ "archive format to create (tar, gztar, bztar, zip)"),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('relative', None,
+ "build the archive using relative paths"
+ "(default: false)"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file"
+ " [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file"
+ " [default: current group]"),
+ ]
+
+ boolean_options = ['keep-temp', 'skip-build', 'relative']
+
+ default_format = {'posix': 'gztar',
+ 'nt': 'zip',
+ 'os2': 'zip'}
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.format = None
+ self.keep_temp = False
+ self.dist_dir = None
+ self.skip_build = None
+ self.relative = False
+ self.owner = None
+ self.group = None
+
+ def finalize_options(self):
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'dumb')
+
+ if self.format is None:
+ try:
+ self.format = self.default_format[os.name]
+ except KeyError:
+ raise PackagingPlatformError(
+ "don't know how to create dumb built distributions "
+ "on platform %s" % os.name)
+
+ self.set_undefined_options('bdist',
+ 'dist_dir', 'plat_name', 'skip_build')
+
+ def run(self):
+ if not self.skip_build:
+ self.run_command('build')
+
+ install = self.reinitialize_command('install_dist',
+ reinit_subcommands=True)
+ install.root = self.bdist_dir
+ install.skip_build = self.skip_build
+ install.warn_dir = False
+
+ logger.info("installing to %s", self.bdist_dir)
+ self.run_command('install_dist')
+
+ # And make an archive relative to the root of the
+ # pseudo-installation tree.
+ archive_basename = "%s.%s" % (self.distribution.get_fullname(),
+ self.plat_name)
+
+ # OS/2 objects to any ":" characters in a filename (such as when
+ # a timestamp is used in a version) so change them to hyphens.
+ if os.name == "os2":
+ archive_basename = archive_basename.replace(":", "-")
+
+ pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
+ if not self.relative:
+ archive_root = self.bdist_dir
+ else:
+ if (self.distribution.has_ext_modules() and
+ (install.install_base != install.install_platbase)):
+ raise PackagingPlatformError(
+ "can't make a dumb built distribution where base and "
+ "platbase are different (%r, %r)" %
+ (install.install_base, install.install_platbase))
+ else:
+ archive_root = os.path.join(
+ self.bdist_dir,
+ self._ensure_relative(install.install_base))
+
+ # Make the archive
+ filename = self.make_archive(pseudoinstall_root,
+ self.format, root_dir=archive_root,
+ owner=self.owner, group=self.group)
+ if self.distribution.has_ext_modules():
+ pyversion = get_python_version()
+ else:
+ pyversion = 'any'
+ self.distribution.dist_files.append(('bdist_dumb', pyversion,
+ filename))
+
+ if not self.keep_temp:
+ if self.dry_run:
+ logger.info('removing %s', self.bdist_dir)
+ else:
+ rmtree(self.bdist_dir)
+
+ def _ensure_relative(self, path):
+ # copied from dir_util, deleted
+ drive, path = os.path.splitdrive(path)
+ if path[0:1] == os.sep:
+ path = drive + path[1:]
+ return path
diff --git a/Lib/packaging/command/bdist_msi.py b/Lib/packaging/command/bdist_msi.py
new file mode 100644
index 0000000..995eec5
--- /dev/null
+++ b/Lib/packaging/command/bdist_msi.py
@@ -0,0 +1,743 @@
+"""Create a Microsoft Installer (.msi) binary distribution."""
+
+# Copyright (C) 2005, 2006 Martin von Löwis
+# Licensed to PSF under a Contributor Agreement.
+
+import sys
+import os
+import msilib
+
+from shutil import rmtree
+from sysconfig import get_python_version
+from packaging.command.cmd import Command
+from packaging.version import NormalizedVersion
+from packaging.errors import PackagingOptionError
+from packaging import logger as log
+from packaging.util import get_platform
+from msilib import schema, sequence, text
+from msilib import Directory, Feature, Dialog, add_data
+
+class MSIVersion(NormalizedVersion):
+ """
+ MSI ProductVersion must be strictly numeric.
+ MSIVersion disallows prerelease and postrelease versions.
+ """
+ def __init__(self, *args, **kwargs):
+ super(MSIVersion, self).__init__(*args, **kwargs)
+ if not self.is_final:
+ raise ValueError("ProductVersion must be strictly numeric")
+
+class PyDialog(Dialog):
+ """Dialog class with a fixed layout: controls at the top, then a ruler,
+ then a list of buttons: back, next, cancel. Optionally a bitmap at the
+ left."""
+ def __init__(self, *args, **kw):
+ """Dialog(database, name, x, y, w, h, attributes, title, first,
+ default, cancel, bitmap=true)"""
+ super(PyDialog, self).__init__(*args)
+ ruler = self.h - 36
+ #if kw.get("bitmap", True):
+ # self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
+ self.line("BottomLine", 0, ruler, self.w, 0)
+
+ def title(self, title):
+ "Set the title text of the dialog at the top."
+ # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
+ # text, in VerdanaBold10
+ self.text("Title", 15, 10, 320, 60, 0x30003,
+ r"{\VerdanaBold10}%s" % title)
+
+ def back(self, title, next, name = "Back", active = 1):
+ """Add a back button with a given title, the tab-next button,
+ its name in the Control table, possibly initially disabled.
+
+ Return the button, so that events can be associated"""
+ if active:
+ flags = 3 # Visible|Enabled
+ else:
+ flags = 1 # Visible
+ return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
+
+ def cancel(self, title, next, name = "Cancel", active = 1):
+ """Add a cancel button with a given title, the tab-next button,
+ its name in the Control table, possibly initially disabled.
+
+ Return the button, so that events can be associated"""
+ if active:
+ flags = 3 # Visible|Enabled
+ else:
+ flags = 1 # Visible
+ return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
+
+ def next(self, title, next, name = "Next", active = 1):
+ """Add a Next button with a given title, the tab-next button,
+ its name in the Control table, possibly initially disabled.
+
+ Return the button, so that events can be associated"""
+ if active:
+ flags = 3 # Visible|Enabled
+ else:
+ flags = 1 # Visible
+ return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
+
+ def xbutton(self, name, title, next, xpos):
+ """Add a button with a given title, the tab-next button,
+ its name in the Control table, giving its x position; the
+ y-position is aligned with the other buttons.
+
+ Return the button, so that events can be associated"""
+ return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
+
+class bdist_msi(Command):
+
+ description = "create a Microsoft Installer (.msi) binary distribution"
+
+ user_options = [('bdist-dir=', None,
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform()),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('target-version=', None,
+ "require a specific python version" +
+ " on the target system"),
+ ('no-target-compile', 'c',
+ "do not compile .py to .pyc on the target system"),
+ ('no-target-optimize', 'o',
+ "do not compile .py to .pyo (optimized)"
+ "on the target system"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('install-script=', None,
+ "basename of installation script to be run after"
+ "installation or before deinstallation"),
+ ('pre-install-script=', None,
+ "Fully qualified filename of a script to be run before "
+ "any files are installed. This script need not be in the "
+ "distribution"),
+ ]
+
+ boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+ 'skip-build']
+
+ all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
+ '2.5', '2.6', '2.7', '2.8', '2.9',
+ '3.0', '3.1', '3.2', '3.3', '3.4',
+ '3.5', '3.6', '3.7', '3.8', '3.9']
+ other_version = 'X'
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = False
+ self.no_target_compile = False
+ self.no_target_optimize = False
+ self.target_version = None
+ self.dist_dir = None
+ self.skip_build = None
+ self.install_script = None
+ self.pre_install_script = None
+ self.versions = None
+
+ def finalize_options(self):
+ self.set_undefined_options('bdist', 'skip_build')
+
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'msi')
+
+ short_version = get_python_version()
+ if (not self.target_version) and self.distribution.has_ext_modules():
+ self.target_version = short_version
+
+ if self.target_version:
+ self.versions = [self.target_version]
+ if not self.skip_build and self.distribution.has_ext_modules()\
+ and self.target_version != short_version:
+ raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
+ " option must be specified" % (short_version,))
+ else:
+ self.versions = list(self.all_versions)
+
+ self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+ if self.pre_install_script:
+ raise PackagingOptionError("the pre-install-script feature is not yet implemented")
+
+ if self.install_script:
+ for script in self.distribution.scripts:
+ if self.install_script == os.path.basename(script):
+ break
+ else:
+ raise PackagingOptionError("install_script '%s' not found in scripts" % \
+ self.install_script)
+ self.install_script_key = None
+
+
+ def run(self):
+ if not self.skip_build:
+ self.run_command('build')
+
+ install = self.reinitialize_command('install_dist',
+ reinit_subcommands=True)
+ install.prefix = self.bdist_dir
+ install.skip_build = self.skip_build
+ install.warn_dir = False
+
+ install_lib = self.reinitialize_command('install_lib')
+ # we do not want to include pyc or pyo files
+ install_lib.compile = False
+ install_lib.optimize = 0
+
+ if self.distribution.has_ext_modules():
+ # If we are building an installer for a Python version other
+ # than the one we are currently running, then we need to ensure
+ # our build_lib reflects the other Python version rather than ours.
+ # Note that for target_version!=sys.version, we must have skipped the
+ # build step, so there is no issue with enforcing the build of this
+ # version.
+ target_version = self.target_version
+ if not target_version:
+ assert self.skip_build, "Should have already checked this"
+ target_version = '%s.%s' % sys.version_info[:2]
+ plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+ build = self.get_finalized_command('build')
+ build.build_lib = os.path.join(build.build_base,
+ 'lib' + plat_specifier)
+
+ log.info("installing to %s", self.bdist_dir)
+ install.ensure_finalized()
+
+ # avoid warning of 'install_lib' about installing
+ # into a directory not in sys.path
+ sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+ install.run()
+
+ del sys.path[0]
+
+ self.mkpath(self.dist_dir)
+ fullname = self.distribution.get_fullname()
+ installer_name = self.get_installer_filename(fullname)
+ installer_name = os.path.abspath(installer_name)
+ if os.path.exists(installer_name): os.unlink(installer_name)
+
+ metadata = self.distribution.metadata
+ author = metadata.author
+ if not author:
+ author = metadata.maintainer
+ if not author:
+ author = "UNKNOWN"
+ version = MSIVersion(metadata.get_version())
+ # Prefix ProductName with Python x.y, so that
+ # it sorts together with the other Python packages
+ # in Add-Remove-Programs (APR)
+ fullname = self.distribution.get_fullname()
+ if self.target_version:
+ product_name = "Python %s %s" % (self.target_version, fullname)
+ else:
+ product_name = "Python %s" % (fullname)
+ self.db = msilib.init_database(installer_name, schema,
+ product_name, msilib.gen_uuid(),
+ str(version), author)
+ msilib.add_tables(self.db, sequence)
+ props = [('DistVersion', version)]
+ email = metadata.author_email or metadata.maintainer_email
+ if email:
+ props.append(("ARPCONTACT", email))
+ if metadata.url:
+ props.append(("ARPURLINFOABOUT", metadata.url))
+ if props:
+ add_data(self.db, 'Property', props)
+
+ self.add_find_python()
+ self.add_files()
+ self.add_scripts()
+ self.add_ui()
+ self.db.Commit()
+
+ if hasattr(self.distribution, 'dist_files'):
+ tup = 'bdist_msi', self.target_version or 'any', fullname
+ self.distribution.dist_files.append(tup)
+
+ if not self.keep_temp:
+ log.info("removing temporary build directory %s", self.bdist_dir)
+ if not self.dry_run:
+ rmtree(self.bdist_dir)
+
+ def add_files(self):
+ db = self.db
+ cab = msilib.CAB("distfiles")
+ rootdir = os.path.abspath(self.bdist_dir)
+
+ root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
+ f = Feature(db, "Python", "Python", "Everything",
+ 0, 1, directory="TARGETDIR")
+
+ items = [(f, root, '')]
+ for version in self.versions + [self.other_version]:
+ target = "TARGETDIR" + version
+ name = default = "Python" + version
+ desc = "Everything"
+ if version is self.other_version:
+ title = "Python from another location"
+ level = 2
+ else:
+ title = "Python %s from registry" % version
+ level = 1
+ f = Feature(db, name, title, desc, 1, level, directory=target)
+ dir = Directory(db, cab, root, rootdir, target, default)
+ items.append((f, dir, version))
+ db.Commit()
+
+ seen = {}
+ for feature, dir, version in items:
+ todo = [dir]
+ while todo:
+ dir = todo.pop()
+ for file in os.listdir(dir.absolute):
+ afile = os.path.join(dir.absolute, file)
+ if os.path.isdir(afile):
+ short = "%s|%s" % (dir.make_short(file), file)
+ default = file + version
+ newdir = Directory(db, cab, dir, file, default, short)
+ todo.append(newdir)
+ else:
+ if not dir.component:
+ dir.start_component(dir.logical, feature, 0)
+ if afile not in seen:
+ key = seen[afile] = dir.add_file(file)
+ if file==self.install_script:
+ if self.install_script_key:
+ raise PackagingOptionError(
+ "Multiple files with name %s" % file)
+ self.install_script_key = '[#%s]' % key
+ else:
+ key = seen[afile]
+ add_data(self.db, "DuplicateFile",
+ [(key + version, dir.component, key, None, dir.logical)])
+ db.Commit()
+ cab.commit(db)
+
+ def add_find_python(self):
+ """Adds code to the installer to compute the location of Python.
+
+ Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
+ registry for each version of Python.
+
+ Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
+ else from PYTHON.MACHINE.X.Y.
+
+ Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
+
+ start = 402
+ for ver in self.versions:
+ install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
+ machine_reg = "python.machine." + ver
+ user_reg = "python.user." + ver
+ machine_prop = "PYTHON.MACHINE." + ver
+ user_prop = "PYTHON.USER." + ver
+ machine_action = "PythonFromMachine" + ver
+ user_action = "PythonFromUser" + ver
+ exe_action = "PythonExe" + ver
+ target_dir_prop = "TARGETDIR" + ver
+ exe_prop = "PYTHON" + ver
+ if msilib.Win64:
+ # type: msidbLocatorTypeRawValue + msidbLocatorType64bit
+ Type = 2+16
+ else:
+ Type = 2
+ add_data(self.db, "RegLocator",
+ [(machine_reg, 2, install_path, None, Type),
+ (user_reg, 1, install_path, None, Type)])
+ add_data(self.db, "AppSearch",
+ [(machine_prop, machine_reg),
+ (user_prop, user_reg)])
+ add_data(self.db, "CustomAction",
+ [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
+ (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
+ (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
+ ])
+ add_data(self.db, "InstallExecuteSequence",
+ [(machine_action, machine_prop, start),
+ (user_action, user_prop, start + 1),
+ (exe_action, None, start + 2),
+ ])
+ add_data(self.db, "InstallUISequence",
+ [(machine_action, machine_prop, start),
+ (user_action, user_prop, start + 1),
+ (exe_action, None, start + 2),
+ ])
+ add_data(self.db, "Condition",
+ [("Python" + ver, 0, "NOT TARGETDIR" + ver)])
+ start += 4
+ assert start < 500
+
+ def add_scripts(self):
+ if self.install_script:
+ start = 6800
+ for ver in self.versions + [self.other_version]:
+ install_action = "install_script." + ver
+ exe_prop = "PYTHON" + ver
+ add_data(self.db, "CustomAction",
+ [(install_action, 50, exe_prop, self.install_script_key)])
+ add_data(self.db, "InstallExecuteSequence",
+ [(install_action, "&Python%s=3" % ver, start)])
+ start += 1
+ # XXX pre-install scripts are currently refused in finalize_options()
+ # but if this feature is completed, it will also need to add
+ # entries for each version as the above code does
+ if self.pre_install_script:
+ scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
+ with open(scriptfn, "w") as f:
+ # The batch file will be executed with [PYTHON], so that %1
+ # is the path to the Python interpreter; %0 will be the path
+ # of the batch file.
+ # rem ="""
+ # %1 %0
+ # exit
+ # """
+ # <actual script>
+ f.write('rem ="""\n%1 %0\nexit\n"""\n')
+ with open(self.pre_install_script) as fp:
+ f.write(fp.read())
+ add_data(self.db, "Binary",
+ [("PreInstall", msilib.Binary(scriptfn)),
+ ])
+ add_data(self.db, "CustomAction",
+ [("PreInstall", 2, "PreInstall", None),
+ ])
+ add_data(self.db, "InstallExecuteSequence",
+ [("PreInstall", "NOT Installed", 450),
+ ])
+
+ def add_ui(self):
+ db = self.db
+ x = y = 50
+ w = 370
+ h = 300
+ title = "[ProductName] Setup"
+
+ # see "Dialog Style Bits"
+ modal = 3 # visible | modal
+ modeless = 1 # visible
+
+ # UI customization properties
+ add_data(db, "Property",
+ # See "DefaultUIFont Property"
+ [("DefaultUIFont", "DlgFont8"),
+ # See "ErrorDialog Style Bit"
+ ("ErrorDialog", "ErrorDlg"),
+ ("Progress1", "Install"), # modified in maintenance type dlg
+ ("Progress2", "installs"),
+ ("MaintenanceForm_Action", "Repair"),
+ # possible values: ALL, JUSTME
+ ("WhichUsers", "ALL")
+ ])
+
+ # Fonts, see "TextStyle Table"
+ add_data(db, "TextStyle",
+ [("DlgFont8", "Tahoma", 9, None, 0),
+ ("DlgFontBold8", "Tahoma", 8, None, 1), #bold
+ ("VerdanaBold10", "Verdana", 10, None, 1),
+ ("VerdanaRed9", "Verdana", 9, 255, 0),
+ ])
+
+ # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
+ # Numbers indicate sequence; see sequence.py for how these action integrate
+ add_data(db, "InstallUISequence",
+ [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
+ ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
+ # In the user interface, assume all-users installation if privileged.
+ ("SelectFeaturesDlg", "Not Installed", 1230),
+ # XXX no support for resume installations yet
+ #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
+ ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
+ ("ProgressDlg", None, 1280)])
+
+ add_data(db, 'ActionText', text.ActionText)
+ add_data(db, 'UIText', text.UIText)
+ #####################################################################
+ # Standard dialogs: FatalError, UserExit, ExitDialog
+ fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
+ "Finish", "Finish", "Finish")
+ fatal.title("[ProductName] Installer ended prematurely")
+ fatal.back("< Back", "Finish", active = 0)
+ fatal.cancel("Cancel", "Back", active = 0)
+ fatal.text("Description1", 15, 70, 320, 80, 0x30003,
+ "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
+ fatal.text("Description2", 15, 155, 320, 20, 0x30003,
+ "Click the Finish button to exit the Installer.")
+ c=fatal.next("Finish", "Cancel", name="Finish")
+ c.event("EndDialog", "Exit")
+
+ user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
+ "Finish", "Finish", "Finish")
+ user_exit.title("[ProductName] Installer was interrupted")
+ user_exit.back("< Back", "Finish", active = 0)
+ user_exit.cancel("Cancel", "Back", active = 0)
+ user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
+ "[ProductName] setup was interrupted. Your system has not been modified. "
+ "To install this program at a later time, please run the installation again.")
+ user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
+ "Click the Finish button to exit the Installer.")
+ c = user_exit.next("Finish", "Cancel", name="Finish")
+ c.event("EndDialog", "Exit")
+
+ exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
+ "Finish", "Finish", "Finish")
+ exit_dialog.title("Completing the [ProductName] Installer")
+ exit_dialog.back("< Back", "Finish", active = 0)
+ exit_dialog.cancel("Cancel", "Back", active = 0)
+ exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
+ "Click the Finish button to exit the Installer.")
+ c = exit_dialog.next("Finish", "Cancel", name="Finish")
+ c.event("EndDialog", "Return")
+
+ #####################################################################
+ # Required dialog: FilesInUse, ErrorDlg
+ inuse = PyDialog(db, "FilesInUse",
+ x, y, w, h,
+ 19, # KeepModeless|Modal|Visible
+ title,
+ "Retry", "Retry", "Retry", bitmap=False)
+ inuse.text("Title", 15, 6, 200, 15, 0x30003,
+ r"{\DlgFontBold8}Files in Use")
+ inuse.text("Description", 20, 23, 280, 20, 0x30003,
+ "Some files that need to be updated are currently in use.")
+ inuse.text("Text", 20, 55, 330, 50, 3,
+ "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
+ inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
+ None, None, None)
+ c=inuse.back("Exit", "Ignore", name="Exit")
+ c.event("EndDialog", "Exit")
+ c=inuse.next("Ignore", "Retry", name="Ignore")
+ c.event("EndDialog", "Ignore")
+ c=inuse.cancel("Retry", "Exit", name="Retry")
+ c.event("EndDialog","Retry")
+
+ # See "Error Dialog". See "ICE20" for the required names of the controls.
+ error = Dialog(db, "ErrorDlg",
+ 50, 10, 330, 101,
+ 65543, # Error|Minimize|Modal|Visible
+ title,
+ "ErrorText", None, None)
+ error.text("ErrorText", 50,9,280,48,3, "")
+ #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
+ error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
+ error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
+ error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
+ error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
+ error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
+ error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
+ error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
+
+ #####################################################################
+ # Global "Query Cancel" dialog
+ cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
+ "No", "No", "No")
+ cancel.text("Text", 48, 15, 194, 30, 3,
+ "Are you sure you want to cancel [ProductName] installation?")
+ #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
+ # "py.ico", None, None)
+ c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
+ c.event("EndDialog", "Exit")
+
+ c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
+ c.event("EndDialog", "Return")
+
+ #####################################################################
+ # Global "Wait for costing" dialog
+ costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
+ "Return", "Return", "Return")
+ costing.text("Text", 48, 15, 194, 30, 3,
+ "Please wait while the installer finishes determining your disk space requirements.")
+ c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
+ c.event("EndDialog", "Exit")
+
+ #####################################################################
+ # Preparation dialog: no user input except cancellation
+ prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
+ "Cancel", "Cancel", "Cancel")
+ prep.text("Description", 15, 70, 320, 40, 0x30003,
+ "Please wait while the Installer prepares to guide you through the installation.")
+ prep.title("Welcome to the [ProductName] Installer")
+ c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
+ c.mapping("ActionText", "Text")
+ c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
+ c.mapping("ActionData", "Text")
+ prep.back("Back", None, active=0)
+ prep.next("Next", None, active=0)
+ c=prep.cancel("Cancel", None)
+ c.event("SpawnDialog", "CancelDlg")
+
+ #####################################################################
+ # Feature (Python directory) selection
+ seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
+ "Next", "Next", "Cancel")
+ seldlg.title("Select Python Installations")
+
+ seldlg.text("Hint", 15, 30, 300, 20, 3,
+ "Select the Python locations where %s should be installed."
+ % self.distribution.get_fullname())
+
+ seldlg.back("< Back", None, active=0)
+ c = seldlg.next("Next >", "Cancel")
+ order = 1
+ c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
+ for version in self.versions + [self.other_version]:
+ order += 1
+ c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
+ "FEATURE_SELECTED AND &Python%s=3" % version,
+ ordering=order)
+ c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
+ c.event("EndDialog", "Return", ordering=order + 2)
+ c = seldlg.cancel("Cancel", "Features")
+ c.event("SpawnDialog", "CancelDlg")
+
+ c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
+ "FEATURE", None, "PathEdit", None)
+ c.event("[FEATURE_SELECTED]", "1")
+ ver = self.other_version
+ install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
+ dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
+
+ c = seldlg.text("Other", 15, 200, 300, 15, 3,
+ "Provide an alternate Python location")
+ c.condition("Enable", install_other_cond)
+ c.condition("Show", install_other_cond)
+ c.condition("Disable", dont_install_other_cond)
+ c.condition("Hide", dont_install_other_cond)
+
+ c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
+ "TARGETDIR" + ver, None, "Next", None)
+ c.condition("Enable", install_other_cond)
+ c.condition("Show", install_other_cond)
+ c.condition("Disable", dont_install_other_cond)
+ c.condition("Hide", dont_install_other_cond)
+
+ #####################################################################
+ # Disk cost
+ cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
+ "OK", "OK", "OK", bitmap=False)
+ cost.text("Title", 15, 6, 200, 15, 0x30003,
+ "{\DlgFontBold8}Disk Space Requirements")
+ cost.text("Description", 20, 20, 280, 20, 0x30003,
+ "The disk space required for the installation of the selected features.")
+ cost.text("Text", 20, 53, 330, 60, 3,
+ "The highlighted volumes (if any) do not have enough disk space "
+ "available for the currently selected features. You can either "
+ "remove some files from the highlighted volumes, or choose to "
+ "install less features onto local drive(s), or select different "
+ "destination drive(s).")
+ cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
+ None, "{120}{70}{70}{70}{70}", None, None)
+ cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
+
+ #####################################################################
+ # WhichUsers Dialog. Only available on NT, and for privileged users.
+ # This must be run before FindRelatedProducts, because that will
+ # take into account whether the previous installation was per-user
+ # or per-machine. We currently don't support going back to this
+ # dialog after "Next" was selected; to support this, we would need to
+ # find how to reset the ALLUSERS property, and how to re-run
+ # FindRelatedProducts.
+ # On Windows9x, the ALLUSERS property is ignored on the command line
+ # and in the Property table, but installer fails according to the documentation
+ # if a dialog attempts to set ALLUSERS.
+ whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
+ "AdminInstall", "Next", "Cancel")
+ whichusers.title("Select whether to install [ProductName] for all users of this computer.")
+ # A radio group with two options: allusers, justme
+ g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
+ "WhichUsers", "", "Next")
+ g.add("ALL", 0, 5, 150, 20, "Install for all users")
+ g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
+
+ whichusers.back("Back", None, active=0)
+
+ c = whichusers.next("Next >", "Cancel")
+ c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
+ c.event("EndDialog", "Return", ordering = 2)
+
+ c = whichusers.cancel("Cancel", "AdminInstall")
+ c.event("SpawnDialog", "CancelDlg")
+
+ #####################################################################
+ # Installation Progress dialog (modeless)
+ progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
+ "Cancel", "Cancel", "Cancel", bitmap=False)
+ progress.text("Title", 20, 15, 200, 15, 0x30003,
+ "{\DlgFontBold8}[Progress1] [ProductName]")
+ progress.text("Text", 35, 65, 300, 30, 3,
+ "Please wait while the Installer [Progress2] [ProductName]. "
+ "This may take several minutes.")
+ progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
+
+ c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
+ c.mapping("ActionText", "Text")
+
+ #c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
+ #c.mapping("ActionData", "Text")
+
+ c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
+ None, "Progress done", None, None)
+ c.mapping("SetProgress", "Progress")
+
+ progress.back("< Back", "Next", active=False)
+ progress.next("Next >", "Cancel", active=False)
+ progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
+
+ ###################################################################
+ # Maintenance type: repair/uninstall
+ maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
+ "Next", "Next", "Cancel")
+ maint.title("Welcome to the [ProductName] Setup Wizard")
+ maint.text("BodyText", 15, 63, 330, 42, 3,
+ "Select whether you want to repair or remove [ProductName].")
+ g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
+ "MaintenanceForm_Action", "", "Next")
+ #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
+ g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
+ g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
+
+ maint.back("< Back", None, active=False)
+ c=maint.next("Finish", "Cancel")
+ # Change installation: Change progress dialog to "Change", then ask
+ # for feature selection
+ #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
+ #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
+
+ # Reinstall: Change progress dialog to "Repair", then invoke reinstall
+ # Also set list of reinstalled features to "ALL"
+ c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
+ c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
+ c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
+ c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
+
+ # Uninstall: Change progress to "Remove", then invoke uninstall
+ # Also set list of removed features to "ALL"
+ c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
+ c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
+ c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
+ c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
+
+ # Close dialog when maintenance action scheduled
+ c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
+ #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
+
+ maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
+
+ def get_installer_filename(self, fullname):
+ # Factored out to allow overriding in subclasses
+ if self.target_version:
+ base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
+ self.target_version)
+ else:
+ base_name = "%s.%s.msi" % (fullname, self.plat_name)
+ installer_name = os.path.join(self.dist_dir, base_name)
+ return installer_name
diff --git a/Lib/packaging/command/bdist_wininst.py b/Lib/packaging/command/bdist_wininst.py
new file mode 100644
index 0000000..3c66360
--- /dev/null
+++ b/Lib/packaging/command/bdist_wininst.py
@@ -0,0 +1,345 @@
+"""Create an executable installer for Windows."""
+
+import sys
+import os
+
+from shutil import rmtree
+from sysconfig import get_python_version
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError, PackagingPlatformError
+from packaging import logger
+from packaging.util import get_platform
+
+
+class bdist_wininst(Command):
+
+ description = "create an executable installer for Windows"
+
+ user_options = [('bdist-dir=', None,
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform()),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('target-version=', None,
+ "require a specific python version" +
+ " on the target system"),
+ ('no-target-compile', 'c',
+ "do not compile .py to .pyc on the target system"),
+ ('no-target-optimize', 'o',
+ "do not compile .py to .pyo (optimized)"
+ "on the target system"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('bitmap=', 'b',
+ "bitmap to use for the installer instead of python-powered logo"),
+ ('title=', 't',
+ "title to display on the installer background instead of default"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('install-script=', None,
+ "basename of installation script to be run after"
+ "installation or before deinstallation"),
+ ('pre-install-script=', None,
+ "Fully qualified filename of a script to be run before "
+ "any files are installed. This script need not be in the "
+ "distribution"),
+ ('user-access-control=', None,
+ "specify Vista's UAC handling - 'none'/default=no "
+ "handling, 'auto'=use UAC if target Python installed for "
+ "all users, 'force'=always use UAC"),
+ ]
+
+ boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+ 'skip-build']
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = False
+ self.no_target_compile = False
+ self.no_target_optimize = False
+ self.target_version = None
+ self.dist_dir = None
+ self.bitmap = None
+ self.title = None
+ self.skip_build = None
+ self.install_script = None
+ self.pre_install_script = None
+ self.user_access_control = None
+
+
+ def finalize_options(self):
+ self.set_undefined_options('bdist', 'skip_build')
+
+ if self.bdist_dir is None:
+ if self.skip_build and self.plat_name:
+ # If build is skipped and plat_name is overridden, bdist will
+ # not see the correct 'plat_name' - so set that up manually.
+ bdist = self.distribution.get_command_obj('bdist')
+ bdist.plat_name = self.plat_name
+ # next the command will be initialized using that name
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'wininst')
+
+ if not self.target_version:
+ self.target_version = ""
+
+ if not self.skip_build and self.distribution.has_ext_modules():
+ short_version = get_python_version()
+ if self.target_version and self.target_version != short_version:
+ raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
+ " option must be specified" % (short_version,))
+ self.target_version = short_version
+
+ self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+ if self.install_script:
+ for script in self.distribution.scripts:
+ if self.install_script == os.path.basename(script):
+ break
+ else:
+ raise PackagingOptionError("install_script '%s' not found in scripts" % \
+ self.install_script)
+
+ def run(self):
+ if (sys.platform != "win32" and
+ (self.distribution.has_ext_modules() or
+ self.distribution.has_c_libraries())):
+ raise PackagingPlatformError \
+ ("distribution contains extensions and/or C libraries; "
+ "must be compiled on a Windows 32 platform")
+
+ if not self.skip_build:
+ self.run_command('build')
+
+ install = self.reinitialize_command('install', reinit_subcommands=True)
+ install.root = self.bdist_dir
+ install.skip_build = self.skip_build
+ install.warn_dir = False
+ install.plat_name = self.plat_name
+
+ install_lib = self.reinitialize_command('install_lib')
+ # we do not want to include pyc or pyo files
+ install_lib.compile = False
+ install_lib.optimize = 0
+
+ if self.distribution.has_ext_modules():
+ # If we are building an installer for a Python version other
+ # than the one we are currently running, then we need to ensure
+ # our build_lib reflects the other Python version rather than ours.
+ # Note that for target_version!=sys.version, we must have skipped the
+ # build step, so there is no issue with enforcing the build of this
+ # version.
+ target_version = self.target_version
+ if not target_version:
+ assert self.skip_build, "Should have already checked this"
+ target_version = '%s.%s' % sys.version_info[:2]
+ plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+ build = self.get_finalized_command('build')
+ build.build_lib = os.path.join(build.build_base,
+ 'lib' + plat_specifier)
+
+ # Use a custom scheme for the zip-file, because we have to decide
+ # at installation time which scheme to use.
+ for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
+ value = key.upper()
+ if key == 'headers':
+ value = value + '/Include/$dist_name'
+ setattr(install,
+ 'install_' + key,
+ value)
+
+ logger.info("installing to %s", self.bdist_dir)
+ install.ensure_finalized()
+
+ # avoid warning of 'install_lib' about installing
+ # into a directory not in sys.path
+ sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+ install.run()
+
+ del sys.path[0]
+
+ # And make an archive relative to the root of the
+ # pseudo-installation tree.
+ from tempfile import NamedTemporaryFile
+ archive_basename = NamedTemporaryFile().name
+ fullname = self.distribution.get_fullname()
+ arcname = self.make_archive(archive_basename, "zip",
+ root_dir=self.bdist_dir)
+ # create an exe containing the zip-file
+ self.create_exe(arcname, fullname, self.bitmap)
+ if self.distribution.has_ext_modules():
+ pyversion = get_python_version()
+ else:
+ pyversion = 'any'
+ self.distribution.dist_files.append(('bdist_wininst', pyversion,
+ self.get_installer_filename(fullname)))
+ # remove the zip-file again
+ logger.debug("removing temporary file '%s'", arcname)
+ os.remove(arcname)
+
+ if not self.keep_temp:
+ logger.info('removing %s', self.bdist_dir)
+ if not self.dry_run:
+ rmtree(self.bdist_dir)
+
+ def get_inidata(self):
+ # Return data describing the installation.
+
+ lines = []
+ metadata = self.distribution.metadata
+
+ # Write the [metadata] section.
+ lines.append("[metadata]")
+
+ # 'info' will be displayed in the installer's dialog box,
+ # describing the items to be installed.
+ info = (metadata.long_description or '') + '\n'
+
+ # Escape newline characters
+ def escape(s):
+ return s.replace("\n", "\\n")
+
+ for name in ["author", "author_email", "description", "maintainer",
+ "maintainer_email", "name", "url", "version"]:
+ data = getattr(metadata, name, "")
+ if data:
+ info = info + ("\n %s: %s" % \
+ (name.capitalize(), escape(data)))
+ lines.append("%s=%s" % (name, escape(data)))
+
+ # The [setup] section contains entries controlling
+ # the installer runtime.
+ lines.append("\n[Setup]")
+ if self.install_script:
+ lines.append("install_script=%s" % self.install_script)
+ lines.append("info=%s" % escape(info))
+ lines.append("target_compile=%d" % (not self.no_target_compile))
+ lines.append("target_optimize=%d" % (not self.no_target_optimize))
+ if self.target_version:
+ lines.append("target_version=%s" % self.target_version)
+ if self.user_access_control:
+ lines.append("user_access_control=%s" % self.user_access_control)
+
+ title = self.title or self.distribution.get_fullname()
+ lines.append("title=%s" % escape(title))
+ import time
+ import packaging
+ build_info = "Built %s with packaging-%s" % \
+ (time.ctime(time.time()), packaging.__version__)
+ lines.append("build_info=%s" % build_info)
+ return "\n".join(lines)
+
+ def create_exe(self, arcname, fullname, bitmap=None):
+ import struct
+
+ self.mkpath(self.dist_dir)
+
+ cfgdata = self.get_inidata()
+
+ installer_name = self.get_installer_filename(fullname)
+ logger.info("creating %s", installer_name)
+
+ if bitmap:
+ with open(bitmap, "rb") as fp:
+ bitmapdata = fp.read()
+ bitmaplen = len(bitmapdata)
+ else:
+ bitmaplen = 0
+
+ with open(installer_name, "wb") as file:
+ file.write(self.get_exe_bytes())
+ if bitmap:
+ file.write(bitmapdata)
+
+ # Convert cfgdata from unicode to ascii, mbcs encoded
+ if isinstance(cfgdata, str):
+ cfgdata = cfgdata.encode("mbcs")
+
+ # Append the pre-install script
+ cfgdata = cfgdata + b"\0"
+ if self.pre_install_script:
+ # We need to normalize newlines, so we open in text mode and
+ # convert back to bytes. "latin-1" simply avoids any possible
+ # failures.
+ with open(self.pre_install_script, encoding="latin-1") as fp:
+ script_data = fp.read().encode("latin-1")
+ cfgdata = cfgdata + script_data + b"\n\0"
+ else:
+ # empty pre-install script
+ cfgdata = cfgdata + b"\0"
+ file.write(cfgdata)
+
+ # The 'magic number' 0x1234567B is used to make sure that the
+ # binary layout of 'cfgdata' is what the wininst.exe binary
+ # expects. If the layout changes, increment that number, make
+ # the corresponding changes to the wininst.exe sources, and
+ # recompile them.
+ header = struct.pack("<iii",
+ 0x1234567B, # tag
+ len(cfgdata), # length
+ bitmaplen, # number of bytes in bitmap
+ )
+ file.write(header)
+ with open(arcname, "rb") as fp:
+ file.write(fp.read())
+
+ def get_installer_filename(self, fullname):
+ # Factored out to allow overriding in subclasses
+ if self.target_version:
+ # if we create an installer for a specific python version,
+ # it's better to include this in the name
+ installer_name = os.path.join(self.dist_dir,
+ "%s.%s-py%s.exe" %
+ (fullname, self.plat_name, self.target_version))
+ else:
+ installer_name = os.path.join(self.dist_dir,
+ "%s.%s.exe" % (fullname, self.plat_name))
+ return installer_name
+
+ def get_exe_bytes(self):
+ from packaging.compiler.msvccompiler import get_build_version
+ # If a target-version other than the current version has been
+ # specified, then using the MSVC version from *this* build is no good.
+ # Without actually finding and executing the target version and parsing
+ # its sys.version, we just hard-code our knowledge of old versions.
+ # NOTE: Possible alternative is to allow "--target-version" to
+ # specify a Python executable rather than a simple version string.
+ # We can then execute this program to obtain any info we need, such
+ # as the real sys.version string for the build.
+ cur_version = get_python_version()
+ if self.target_version and self.target_version != cur_version:
+ # If the target version is *later* than us, then we assume they
+ # use what we use
+ # string compares seem wrong, but are what sysconfig.py itself uses
+ if self.target_version > cur_version:
+ bv = get_build_version()
+ else:
+ if self.target_version < "2.4":
+ bv = 6.0
+ else:
+ bv = 7.1
+ else:
+ # for current version - use authoritative check.
+ bv = get_build_version()
+
+ # wininst-x.y.exe is in the same directory as this file
+ directory = os.path.dirname(__file__)
+ # we must use a wininst-x.y.exe built with the same C compiler
+ # used for python. XXX What about mingw, borland, and so on?
+
+ # if plat_name starts with "win" but is not "win32"
+ # we want to strip "win" and leave the rest (e.g. -amd64)
+ # for all other cases, we don't want any suffix
+ if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
+ sfix = self.plat_name[3:]
+ else:
+ sfix = ''
+
+ filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
+ with open(filename, "rb") as fp:
+ return fp.read()
diff --git a/Lib/packaging/command/build.py b/Lib/packaging/command/build.py
new file mode 100644
index 0000000..fcb50df
--- /dev/null
+++ b/Lib/packaging/command/build.py
@@ -0,0 +1,151 @@
+"""Main build command, which calls the other build_* commands."""
+
+import sys
+import os
+
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError
+from packaging.compiler import show_compilers
+
+
+class build(Command):
+
+ description = "build everything needed to install"
+
+ user_options = [
+ ('build-base=', 'b',
+ "base directory for build library"),
+ ('build-purelib=', None,
+ "build directory for platform-neutral distributions"),
+ ('build-platlib=', None,
+ "build directory for platform-specific distributions"),
+ ('build-lib=', None,
+ "build directory for all distribution (defaults to either " +
+ "build-purelib or build-platlib"),
+ ('build-scripts=', None,
+ "build directory for scripts"),
+ ('build-temp=', 't',
+ "temporary build directory"),
+ ('plat-name=', 'p',
+ "platform name to build for, if supported "
+ "(default: %s)" % get_platform()),
+ ('compiler=', 'c',
+ "specify the compiler type"),
+ ('debug', 'g',
+ "compile extensions and libraries with debugging information"),
+ ('force', 'f',
+ "forcibly build everything (ignore file timestamps)"),
+ ('executable=', 'e',
+ "specify final destination interpreter path (build.py)"),
+ ('use-2to3', None,
+ "use 2to3 to make source python 3.x compatible"),
+ ('convert-2to3-doctests', None,
+ "use 2to3 to convert doctests in separate text files"),
+ ('use-2to3-fixers', None,
+ "list additional fixers opted for during 2to3 conversion"),
+ ]
+
+ boolean_options = ['debug', 'force']
+
+ help_options = [
+ ('help-compiler', None,
+ "list available compilers", show_compilers),
+ ]
+
+ def initialize_options(self):
+ self.build_base = 'build'
+ # these are decided only after 'build_base' has its final value
+ # (unless overridden by the user or client)
+ self.build_purelib = None
+ self.build_platlib = None
+ self.build_lib = None
+ self.build_temp = None
+ self.build_scripts = None
+ self.compiler = None
+ self.plat_name = None
+ self.debug = None
+ self.force = False
+ self.executable = None
+ self.use_2to3 = False
+ self.convert_2to3_doctests = None
+ self.use_2to3_fixers = None
+
+ def finalize_options(self):
+ if self.plat_name is None:
+ self.plat_name = get_platform()
+ else:
+ # plat-name only supported for windows (other platforms are
+ # supported via ./configure flags, if at all). Avoid misleading
+ # other platforms.
+ if os.name != 'nt':
+ raise PackagingOptionError(
+ "--plat-name only supported on Windows (try "
+ "using './configure --help' on your platform)")
+ pyversion = '%s.%s' % sys.version_info[:2]
+ plat_specifier = ".%s-%s" % (self.plat_name, pyversion)
+
+ # Make it so Python 2.x and Python 2.x with --with-pydebug don't
+ # share the same build directories. Doing so confuses the build
+ # process for C modules
+ if hasattr(sys, 'gettotalrefcount'):
+ plat_specifier += '-pydebug'
+
+ # 'build_purelib' and 'build_platlib' just default to 'lib' and
+ # 'lib.<plat>' under the base build directory. We only use one of
+ # them for a given distribution, though --
+ if self.build_purelib is None:
+ self.build_purelib = os.path.join(self.build_base, 'lib')
+ if self.build_platlib is None:
+ self.build_platlib = os.path.join(self.build_base,
+ 'lib' + plat_specifier)
+
+ # 'build_lib' is the actual directory that we will use for this
+ # particular module distribution -- if user didn't supply it, pick
+ # one of 'build_purelib' or 'build_platlib'.
+ if self.build_lib is None:
+ if self.distribution.ext_modules:
+ self.build_lib = self.build_platlib
+ else:
+ self.build_lib = self.build_purelib
+
+ # 'build_temp' -- temporary directory for compiler turds,
+ # "build/temp.<plat>"
+ if self.build_temp is None:
+ self.build_temp = os.path.join(self.build_base,
+ 'temp' + plat_specifier)
+ if self.build_scripts is None:
+ self.build_scripts = os.path.join(self.build_base,
+ 'scripts-' + pyversion)
+
+ if self.executable is None:
+ self.executable = os.path.normpath(sys.executable)
+
+ def run(self):
+ # Run all relevant sub-commands. This will be some subset of:
+ # - build_py - pure Python modules
+ # - build_clib - standalone C libraries
+ # - build_ext - Python extension modules
+ # - build_scripts - Python scripts
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ # -- Predicates for the sub-command list ---------------------------
+
+ def has_pure_modules(self):
+ return self.distribution.has_pure_modules()
+
+ def has_c_libraries(self):
+ return self.distribution.has_c_libraries()
+
+ def has_ext_modules(self):
+ return self.distribution.has_ext_modules()
+
+ def has_scripts(self):
+ return self.distribution.has_scripts()
+
+ sub_commands = [('build_py', has_pure_modules),
+ ('build_clib', has_c_libraries),
+ ('build_ext', has_ext_modules),
+ ('build_scripts', has_scripts),
+ ]
diff --git a/Lib/packaging/command/build_clib.py b/Lib/packaging/command/build_clib.py
new file mode 100644
index 0000000..5388ccd
--- /dev/null
+++ b/Lib/packaging/command/build_clib.py
@@ -0,0 +1,197 @@
+"""Build C/C++ libraries.
+
+This command is useful to build libraries that are included in the
+distribution and needed by extension modules.
+"""
+
+# XXX this module has *lots* of code ripped-off quite transparently from
+# build_ext.py -- not surprisingly really, as the work required to build
+# a static library from a collection of C source files is not really all
+# that different from what's required to build a shared object file from
+# a collection of C source files. Nevertheless, I haven't done the
+# necessary refactoring to account for the overlap in code between the
+# two modules, mainly because a number of subtle details changed in the
+# cut 'n paste. Sigh.
+
+import os
+from packaging.command.cmd import Command
+from packaging.errors import PackagingSetupError
+from packaging.compiler import customize_compiler, new_compiler
+from packaging import logger
+
+
+def show_compilers():
+ from packaging.compiler import show_compilers
+ show_compilers()
+
+
+class build_clib(Command):
+
+ description = "build C/C++ libraries used by extension modules"
+
+ user_options = [
+ ('build-clib=', 'b',
+ "directory to build C/C++ libraries to"),
+ ('build-temp=', 't',
+ "directory to put temporary build by-products"),
+ ('debug', 'g',
+ "compile with debugging information"),
+ ('force', 'f',
+ "forcibly build everything (ignore file timestamps)"),
+ ('compiler=', 'c',
+ "specify the compiler type"),
+ ]
+
+ boolean_options = ['debug', 'force']
+
+ help_options = [
+ ('help-compiler', None,
+ "list available compilers", show_compilers),
+ ]
+
+ def initialize_options(self):
+ self.build_clib = None
+ self.build_temp = None
+
+ # List of libraries to build
+ self.libraries = None
+
+ # Compilation options for all libraries
+ self.include_dirs = None
+ self.define = None
+ self.undef = None
+ self.debug = None
+ self.force = False
+ self.compiler = None
+
+
+ def finalize_options(self):
+ # This might be confusing: both build-clib and build-temp default
+ # to build-temp as defined by the "build" command. This is because
+ # I think that C libraries are really just temporary build
+ # by-products, at least from the point of view of building Python
+ # extensions -- but I want to keep my options open.
+ self.set_undefined_options('build',
+ ('build_temp', 'build_clib'),
+ ('build_temp', 'build_temp'),
+ 'compiler', 'debug', 'force')
+
+ self.libraries = self.distribution.libraries
+ if self.libraries:
+ self.check_library_list(self.libraries)
+
+ if self.include_dirs is None:
+ self.include_dirs = self.distribution.include_dirs or []
+ if isinstance(self.include_dirs, str):
+ self.include_dirs = self.include_dirs.split(os.pathsep)
+
+ # XXX same as for build_ext -- what about 'self.define' and
+ # 'self.undef' ?
+
+ def run(self):
+ if not self.libraries:
+ return
+
+ # Yech -- this is cut 'n pasted from build_ext.py!
+ self.compiler = new_compiler(compiler=self.compiler,
+ dry_run=self.dry_run,
+ force=self.force)
+ customize_compiler(self.compiler)
+
+ if self.include_dirs is not None:
+ self.compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for name, value in self.define:
+ self.compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ self.compiler.undefine_macro(macro)
+
+ self.build_libraries(self.libraries)
+
+
+ def check_library_list(self, libraries):
+ """Ensure that the list of libraries is valid.
+
+ `library` is presumably provided as a command option 'libraries'.
+ This method checks that it is a list of 2-tuples, where the tuples
+ are (library_name, build_info_dict).
+
+ Raise PackagingSetupError if the structure is invalid anywhere;
+ just returns otherwise.
+ """
+ if not isinstance(libraries, list):
+ raise PackagingSetupError("'libraries' option must be a list of tuples")
+
+ for lib in libraries:
+ if not isinstance(lib, tuple) and len(lib) != 2:
+ raise PackagingSetupError("each element of 'libraries' must a 2-tuple")
+
+ name, build_info = lib
+
+ if not isinstance(name, str):
+ raise PackagingSetupError("first element of each tuple in 'libraries' " + \
+ "must be a string (the library name)")
+ if '/' in name or (os.sep != '/' and os.sep in name):
+ raise PackagingSetupError(("bad library name '%s': " +
+ "may not contain directory separators") % \
+ lib[0])
+
+ if not isinstance(build_info, dict):
+ raise PackagingSetupError("second element of each tuple in 'libraries' " + \
+ "must be a dictionary (build info)")
+
+ def get_library_names(self):
+ # Assume the library list is valid -- 'check_library_list()' is
+ # called from 'finalize_options()', so it should be!
+ if not self.libraries:
+ return None
+
+ lib_names = []
+ for lib_name, build_info in self.libraries:
+ lib_names.append(lib_name)
+ return lib_names
+
+
+ def get_source_files(self):
+ self.check_library_list(self.libraries)
+ filenames = []
+ for lib_name, build_info in self.libraries:
+ sources = build_info.get('sources')
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise PackagingSetupError(("in 'libraries' option (library '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames") % lib_name)
+
+ filenames.extend(sources)
+ return filenames
+
+ def build_libraries(self, libraries):
+ for lib_name, build_info in libraries:
+ sources = build_info.get('sources')
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise PackagingSetupError(("in 'libraries' option (library '%s'), " +
+ "'sources' must be present and must be " +
+ "a list of source filenames") % lib_name)
+ sources = list(sources)
+
+ logger.info("building '%s' library", lib_name)
+
+ # First, compile the source code to object files in the library
+ # directory. (This should probably change to putting object
+ # files in a temporary build directory.)
+ macros = build_info.get('macros')
+ include_dirs = build_info.get('include_dirs')
+ objects = self.compiler.compile(sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ debug=self.debug)
+
+ # Now "link" the object files together into a static library.
+ # (On Unix at least, this isn't really linking -- it just
+ # builds an archive. Whatever.)
+ self.compiler.create_static_lib(objects, lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug)
diff --git a/Lib/packaging/command/build_ext.py b/Lib/packaging/command/build_ext.py
new file mode 100644
index 0000000..4c85822
--- /dev/null
+++ b/Lib/packaging/command/build_ext.py
@@ -0,0 +1,641 @@
+"""Build extension modules."""
+
+import os
+import re
+import sys
+import site
+import sysconfig
+
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import (CCompilerError, CompileError, PackagingError,
+ PackagingPlatformError, PackagingSetupError)
+from packaging.compiler import customize_compiler, show_compilers
+from packaging.util import newer_group
+from packaging.compiler.extension import Extension
+from packaging import logger
+
+if os.name == 'nt':
+ from packaging.compiler.msvccompiler import get_build_version
+ MSVC_VERSION = int(get_build_version())
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+ (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+class build_ext(Command):
+
+ description = "build C/C++ extension modules (compile/link to build directory)"
+
+ # XXX thoughts on how to deal with complex command-line options like
+ # these, i.e. how to make it so fancy_getopt can suck them off the
+ # command line and turn them into the appropriate
+ # lists of tuples of what-have-you.
+ # - each command needs a callback to process its command-line options
+ # - Command.__init__() needs access to its share of the whole
+ # command line (must ultimately come from
+ # Distribution.parse_command_line())
+ # - it then calls the current command class' option-parsing
+ # callback to deal with weird options like -D, which have to
+ # parse the option text and churn out some custom data
+ # structure
+ # - that data structure (in this case, a list of 2-tuples)
+ # will then be present in the command object by the time
+ # we get to finalize_options() (i.e. the constructor
+ # takes care of both command-line and client options
+ # in between initialize_options() and finalize_options())
+
+ sep_by = " (separated by '%s')" % os.pathsep
+ user_options = [
+ ('build-lib=', 'b',
+ "directory for compiled extension modules"),
+ ('build-temp=', 't',
+ "directory for temporary files (build by-products)"),
+ ('plat-name=', 'p',
+ "platform name to cross-compile for, if supported "
+ "(default: %s)" % get_platform()),
+ ('inplace', 'i',
+ "ignore build-lib and put compiled extensions into the source " +
+ "directory alongside your pure Python modules"),
+ ('user', None,
+ "add user include, library and rpath"),
+ ('include-dirs=', 'I',
+ "list of directories to search for header files" + sep_by),
+ ('define=', 'D',
+ "C preprocessor macros to define"),
+ ('undef=', 'U',
+ "C preprocessor macros to undefine"),
+ ('libraries=', 'l',
+ "external C libraries to link with"),
+ ('library-dirs=', 'L',
+ "directories to search for external C libraries" + sep_by),
+ ('rpath=', 'R',
+ "directories to search for shared C libraries at runtime"),
+ ('link-objects=', 'O',
+ "extra explicit link objects to include in the link"),
+ ('debug', 'g',
+ "compile/link with debugging information"),
+ ('force', 'f',
+ "forcibly build everything (ignore file timestamps)"),
+ ('compiler=', 'c',
+ "specify the compiler type"),
+ ('swig-opts=', None,
+ "list of SWIG command-line options"),
+ ('swig=', None,
+ "path to the SWIG executable"),
+ ]
+
+ boolean_options = ['inplace', 'debug', 'force', 'user']
+
+
+ help_options = [
+ ('help-compiler', None,
+ "list available compilers", show_compilers),
+ ]
+
+ def initialize_options(self):
+ self.extensions = None
+ self.build_lib = None
+ self.plat_name = None
+ self.build_temp = None
+ self.inplace = False
+ self.package = None
+
+ self.include_dirs = None
+ self.define = None
+ self.undef = None
+ self.libraries = None
+ self.library_dirs = None
+ self.rpath = None
+ self.link_objects = None
+ self.debug = None
+ self.force = None
+ self.compiler = None
+ self.swig = None
+ self.swig_opts = None
+ self.user = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ 'build_lib', 'build_temp', 'compiler',
+ 'debug', 'force', 'plat_name')
+
+ if self.package is None:
+ self.package = self.distribution.ext_package
+
+ # Ensure that the list of extensions is valid, i.e. it is a list of
+ # Extension objects.
+ self.extensions = self.distribution.ext_modules
+ if self.extensions:
+ if not isinstance(self.extensions, (list, tuple)):
+ type_name = (self.extensions is None and 'None'
+ or type(self.extensions).__name__)
+ raise PackagingSetupError(
+ "'ext_modules' must be a sequence of Extension instances,"
+ " not %s" % (type_name,))
+ for i, ext in enumerate(self.extensions):
+ if isinstance(ext, Extension):
+ continue # OK! (assume type-checking done
+ # by Extension constructor)
+ type_name = (ext is None and 'None' or type(ext).__name__)
+ raise PackagingSetupError(
+ "'ext_modules' item %d must be an Extension instance,"
+ " not %s" % (i, type_name))
+
+ # Make sure Python's include directories (for Python.h, pyconfig.h,
+ # etc.) are in the include search path.
+ py_include = sysconfig.get_path('include')
+ plat_py_include = sysconfig.get_path('platinclude')
+ if self.include_dirs is None:
+ self.include_dirs = self.distribution.include_dirs or []
+ if isinstance(self.include_dirs, str):
+ self.include_dirs = self.include_dirs.split(os.pathsep)
+
+ # Put the Python "system" include dir at the end, so that
+ # any local include dirs take precedence.
+ self.include_dirs.append(py_include)
+ if plat_py_include != py_include:
+ self.include_dirs.append(plat_py_include)
+
+ self.ensure_string_list('libraries')
+
+ # Life is easier if we're not forever checking for None, so
+ # simplify these options to empty lists if unset
+ if self.libraries is None:
+ self.libraries = []
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif isinstance(self.library_dirs, str):
+ self.library_dirs = self.library_dirs.split(os.pathsep)
+
+ if self.rpath is None:
+ self.rpath = []
+ elif isinstance(self.rpath, str):
+ self.rpath = self.rpath.split(os.pathsep)
+
+ # for extensions under windows use different directories
+ # for Release and Debug builds.
+ # also Python's library directory must be appended to library_dirs
+ if os.name == 'nt':
+ # the 'libs' directory is for binary installs - we assume that
+ # must be the *native* platform. But we don't really support
+ # cross-compiling via a binary install anyway, so we let it go.
+ self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+ if self.debug:
+ self.build_temp = os.path.join(self.build_temp, "Debug")
+ else:
+ self.build_temp = os.path.join(self.build_temp, "Release")
+
+ # Append the source distribution include and library directories,
+ # this allows distutils on windows to work in the source tree
+ self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+ if MSVC_VERSION == 9:
+ # Use the .lib files for the correct architecture
+ if self.plat_name == 'win32':
+ suffix = ''
+ else:
+ # win-amd64 or win-ia64
+ suffix = self.plat_name[4:]
+ new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+ if suffix:
+ new_lib = os.path.join(new_lib, suffix)
+ self.library_dirs.append(new_lib)
+
+ elif MSVC_VERSION == 8:
+ self.library_dirs.append(os.path.join(sys.exec_prefix,
+ 'PC', 'VS8.0'))
+ elif MSVC_VERSION == 7:
+ self.library_dirs.append(os.path.join(sys.exec_prefix,
+ 'PC', 'VS7.1'))
+ else:
+ self.library_dirs.append(os.path.join(sys.exec_prefix,
+ 'PC', 'VC6'))
+
+ # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
+ # import libraries in its "Config" subdirectory
+ if os.name == 'os2':
+ self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
+
+ # for extensions under Cygwin and AtheOS Python's library directory must be
+ # appended to library_dirs
+ if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+ if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+ # building third party extensions
+ self.library_dirs.append(os.path.join(sys.prefix, "lib",
+ "python" + sysconfig.get_python_version(),
+ "config"))
+ else:
+ # building python standard extensions
+ self.library_dirs.append(os.curdir)
+
+ # for extensions under Linux or Solaris with a shared Python library,
+ # Python's library directory must be appended to library_dirs
+ sysconfig.get_config_var('Py_ENABLE_SHARED')
+ if (sys.platform.startswith(('linux', 'gnu', 'sunos'))
+ and sysconfig.get_config_var('Py_ENABLE_SHARED')):
+ if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+ # building third party extensions
+ self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
+ else:
+ # building python standard extensions
+ self.library_dirs.append(os.curdir)
+
+ # The argument parsing will result in self.define being a string, but
+ # it has to be a list of 2-tuples. All the preprocessor symbols
+ # specified by the 'define' option will be set to '1'. Multiple
+ # symbols can be separated with commas.
+
+ if self.define:
+ defines = self.define.split(',')
+ self.define = [(symbol, '1') for symbol in defines]
+
+ # The option for macros to undefine is also a string from the
+ # option parsing, but has to be a list. Multiple symbols can also
+ # be separated with commas here.
+ if self.undef:
+ self.undef = self.undef.split(',')
+
+ if self.swig_opts is None:
+ self.swig_opts = []
+ else:
+ self.swig_opts = self.swig_opts.split(' ')
+
+ # Finally add the user include and library directories if requested
+ if self.user:
+ user_include = os.path.join(site.USER_BASE, "include")
+ user_lib = os.path.join(site.USER_BASE, "lib")
+ if os.path.isdir(user_include):
+ self.include_dirs.append(user_include)
+ if os.path.isdir(user_lib):
+ self.library_dirs.append(user_lib)
+ self.rpath.append(user_lib)
+
+ def run(self):
+ from packaging.compiler import new_compiler
+
+ if not self.extensions:
+ return
+
+ # If we were asked to build any C/C++ libraries, make sure that the
+ # directory where we put them is in the library search path for
+ # linking extensions.
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.libraries.extend(build_clib.get_library_names() or [])
+ self.library_dirs.append(build_clib.build_clib)
+
+ # Setup the CCompiler object that we'll use to do all the
+ # compiling and linking
+ self.compiler_obj = new_compiler(compiler=self.compiler,
+ dry_run=self.dry_run,
+ force=self.force)
+
+ customize_compiler(self.compiler_obj)
+ # If we are cross-compiling, init the compiler now (if we are not
+ # cross-compiling, init would not hurt, but people may rely on
+ # late initialization of compiler even if they shouldn't...)
+ if os.name == 'nt' and self.plat_name != get_platform():
+ self.compiler_obj.initialize(self.plat_name)
+
+ # And make sure that any compile/link-related options (which might
+ # come from the command line or from the setup script) are set in
+ # that CCompiler object -- that way, they automatically apply to
+ # all compiling and linking done here.
+ if self.include_dirs is not None:
+ self.compiler_obj.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for name, value in self.define:
+ self.compiler_obj.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ self.compiler_obj.undefine_macro(macro)
+ if self.libraries is not None:
+ self.compiler_obj.set_libraries(self.libraries)
+ if self.library_dirs is not None:
+ self.compiler_obj.set_library_dirs(self.library_dirs)
+ if self.rpath is not None:
+ self.compiler_obj.set_runtime_library_dirs(self.rpath)
+ if self.link_objects is not None:
+ self.compiler_obj.set_link_objects(self.link_objects)
+
+ # Now actually compile and link everything.
+ self.build_extensions()
+
+ def get_source_files(self):
+ filenames = []
+
+ # Wouldn't it be neat if we knew the names of header files too...
+ for ext in self.extensions:
+ filenames.extend(ext.sources)
+
+ return filenames
+
+ def get_outputs(self):
+ # And build the list of output (built) filenames. Note that this
+ # ignores the 'inplace' flag, and assumes everything goes in the
+ # "build" tree.
+ outputs = []
+ for ext in self.extensions:
+ outputs.append(self.get_ext_fullpath(ext.name))
+ return outputs
+
+ def build_extensions(self):
+ for ext in self.extensions:
+ try:
+ self.build_extension(ext)
+ except (CCompilerError, PackagingError, CompileError) as e:
+ if not ext.optional:
+ raise
+ logger.warning('%s: building extension %r failed: %s',
+ self.get_command_name(), ext.name, e)
+
+ def build_extension(self, ext):
+ sources = ext.sources
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise PackagingSetupError(("in 'ext_modules' option (extension '%s'), " +
+ "'sources' must be present and must be " +
+ "a list of source filenames") % ext.name)
+ sources = list(sources)
+
+ ext_path = self.get_ext_fullpath(ext.name)
+ depends = sources + ext.depends
+ if not (self.force or newer_group(depends, ext_path, 'newer')):
+ logger.debug("skipping '%s' extension (up-to-date)", ext.name)
+ return
+ else:
+ logger.info("building '%s' extension", ext.name)
+
+ # First, scan the sources for SWIG definition files (.i), run
+ # SWIG on 'em to create .c files, and modify the sources list
+ # accordingly.
+ sources = self.swig_sources(sources, ext)
+
+ # Next, compile the source code to object files.
+
+ # XXX not honouring 'define_macros' or 'undef_macros' -- the
+ # CCompiler API needs to change to accommodate this, and I
+ # want to do one thing at a time!
+
+ # Two possible sources for extra compiler arguments:
+ # - 'extra_compile_args' in Extension object
+ # - CFLAGS environment variable (not particularly
+ # elegant, but people seem to expect it and I
+ # guess it's useful)
+ # The environment variable should take precedence, and
+ # any sensible compiler will give precedence to later
+ # command-line args. Hence we combine them in order:
+ extra_args = ext.extra_compile_args or []
+
+ macros = ext.define_macros[:]
+ for undef in ext.undef_macros:
+ macros.append((undef,))
+
+ objects = self.compiler_obj.compile(sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=ext.include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_args,
+ depends=ext.depends)
+
+ # XXX -- this is a Vile HACK!
+ #
+ # The setup.py script for Python on Unix needs to be able to
+ # get this list so it can perform all the clean up needed to
+ # avoid keeping object files around when cleaning out a failed
+ # build of an extension module. Since Packaging does not
+ # track dependencies, we have to get rid of intermediates to
+ # ensure all the intermediates will be properly re-built.
+ #
+ self._built_objects = objects[:]
+
+ # Now link the object files together into a "shared object" --
+ # of course, first we have to figure out all the other things
+ # that go into the mix.
+ if ext.extra_objects:
+ objects.extend(ext.extra_objects)
+ extra_args = ext.extra_link_args or []
+
+ # Detect target language, if not provided
+ language = ext.language or self.compiler_obj.detect_language(sources)
+
+ self.compiler_obj.link_shared_object(
+ objects, ext_path,
+ libraries=self.get_libraries(ext),
+ library_dirs=ext.library_dirs,
+ runtime_library_dirs=ext.runtime_library_dirs,
+ extra_postargs=extra_args,
+ export_symbols=self.get_export_symbols(ext),
+ debug=self.debug,
+ build_temp=self.build_temp,
+ target_lang=language)
+
+
+ def swig_sources(self, sources, extension):
+ """Walk the list of source files in 'sources', looking for SWIG
+ interface (.i) files. Run SWIG on all that are found, and
+ return a modified 'sources' list with SWIG source files replaced
+ by the generated C (or C++) files.
+ """
+ new_sources = []
+ swig_sources = []
+ swig_targets = {}
+
+ # XXX this drops generated C/C++ files into the source tree, which
+ # is fine for developers who want to distribute the generated
+ # source -- but there should be an option to put SWIG output in
+ # the temp dir.
+
+ if ('-c++' in self.swig_opts or '-c++' in extension.swig_opts):
+ target_ext = '.cpp'
+ else:
+ target_ext = '.c'
+
+ for source in sources:
+ base, ext = os.path.splitext(source)
+ if ext == ".i": # SWIG interface file
+ new_sources.append(base + '_wrap' + target_ext)
+ swig_sources.append(source)
+ swig_targets[source] = new_sources[-1]
+ else:
+ new_sources.append(source)
+
+ if not swig_sources:
+ return new_sources
+
+ swig = self.swig or self.find_swig()
+ swig_cmd = [swig, "-python"]
+ swig_cmd.extend(self.swig_opts)
+
+ # Do not override commandline arguments
+ if not self.swig_opts:
+ for o in extension.swig_opts:
+ swig_cmd.append(o)
+
+ for source in swig_sources:
+ target = swig_targets[source]
+ logger.info("swigging %s to %s", source, target)
+ self.spawn(swig_cmd + ["-o", target, source])
+
+ return new_sources
+
+ def find_swig(self):
+ """Return the name of the SWIG executable. On Unix, this is
+ just "swig" -- it should be in the PATH. Tries a bit harder on
+ Windows.
+ """
+
+ if os.name == "posix":
+ return "swig"
+ elif os.name == "nt":
+
+ # Look for SWIG in its standard installation directory on
+ # Windows (or so I presume!). If we find it there, great;
+ # if not, act like Unix and assume it's in the PATH.
+ for vers in ("1.3", "1.2", "1.1"):
+ fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+ if os.path.isfile(fn):
+ return fn
+ else:
+ return "swig.exe"
+
+ elif os.name == "os2":
+ # assume swig available in the PATH.
+ return "swig.exe"
+
+ else:
+ raise PackagingPlatformError(("I don't know how to find (much less run) SWIG "
+ "on platform '%s'") % os.name)
+
+ # -- Name generators -----------------------------------------------
+ # (extension names, filenames, whatever)
+ def get_ext_fullpath(self, ext_name):
+ """Returns the path of the filename for a given extension.
+
+ The file is located in `build_lib` or directly in the package
+ (inplace option).
+ """
+ fullname = self.get_ext_fullname(ext_name)
+ modpath = fullname.split('.')
+ filename = self.get_ext_filename(modpath[-1])
+
+ if not self.inplace:
+ # no further work needed
+ # returning :
+ # build_dir/package/path/filename
+ filename = os.path.join(*modpath[:-1]+[filename])
+ return os.path.join(self.build_lib, filename)
+
+ # the inplace option requires to find the package directory
+ # using the build_py command for that
+ package = '.'.join(modpath[0:-1])
+ build_py = self.get_finalized_command('build_py')
+ package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+ # returning
+ # package_dir/filename
+ return os.path.join(package_dir, filename)
+
+ def get_ext_fullname(self, ext_name):
+ """Returns the fullname of a given extension name.
+
+ Adds the `package.` prefix"""
+ if self.package is None:
+ return ext_name
+ else:
+ return self.package + '.' + ext_name
+
+ def get_ext_filename(self, ext_name):
+ r"""Convert the name of an extension (eg. "foo.bar") into the name
+ of the file from which it will be loaded (eg. "foo/bar.so", or
+ "foo\bar.pyd").
+ """
+ ext_path = ext_name.split('.')
+ # OS/2 has an 8 character module (extension) limit :-(
+ if os.name == "os2":
+ ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
+ # extensions in debug_mode are named 'module_d.pyd' under windows
+ so_ext = sysconfig.get_config_var('SO')
+ if os.name == 'nt' and self.debug:
+ return os.path.join(*ext_path) + '_d' + so_ext
+ return os.path.join(*ext_path) + so_ext
+
+ def get_export_symbols(self, ext):
+ """Return the list of symbols that a shared extension has to
+ export. This either uses 'ext.export_symbols' or, if it's not
+ provided, "init" + module_name. Only relevant on Windows, where
+ the .pyd file (DLL) must export the module "init" function.
+ """
+ initfunc_name = "PyInit_" + ext.name.split('.')[-1]
+ if initfunc_name not in ext.export_symbols:
+ ext.export_symbols.append(initfunc_name)
+ return ext.export_symbols
+
+ def get_libraries(self, ext):
+ """Return the list of libraries to link against when building a
+ shared extension. On most platforms, this is just 'ext.libraries';
+ on Windows and OS/2, we add the Python library (eg. python20.dll).
+ """
+ # The python library is always needed on Windows. For MSVC, this
+ # is redundant, since the library is mentioned in a pragma in
+ # pyconfig.h that MSVC groks. The other Windows compilers all seem
+ # to need it mentioned explicitly, though, so that's what we do.
+ # Append '_d' to the python import library on debug builds.
+ if sys.platform == "win32":
+ from packaging.compiler.msvccompiler import MSVCCompiler
+ if not isinstance(self.compiler_obj, MSVCCompiler):
+ template = "python%d%d"
+ if self.debug:
+ template = template + '_d'
+ pythonlib = template % sys.version_info[:2]
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
+ else:
+ return ext.libraries
+ elif sys.platform == "os2emx":
+ # EMX/GCC requires the python library explicitly, and I
+ # believe VACPP does as well (though not confirmed) - AIM Apr01
+ template = "python%d%d"
+ # debug versions of the main DLL aren't supported, at least
+ # not at this time - AIM Apr01
+ #if self.debug:
+ # template = template + '_d'
+ pythonlib = template % sys.version_info[:2]
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
+ elif sys.platform[:6] == "cygwin":
+ template = "python%d.%d"
+ pythonlib = template % sys.version_info[:2]
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
+ elif sys.platform[:6] == "atheos":
+ template = "python%d.%d"
+ pythonlib = template % sys.version_info[:2]
+ # Get SHLIBS from Makefile
+ extra = []
+ for lib in sysconfig.get_config_var('SHLIBS').split():
+ if lib.startswith('-l'):
+ extra.append(lib[2:])
+ else:
+ extra.append(lib)
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib, "m"] + extra
+
+ elif sys.platform == 'darwin':
+ # Don't use the default code below
+ return ext.libraries
+
+ else:
+ if sysconfig.get_config_var('Py_ENABLE_SHARED'):
+ template = 'python%d.%d' + sys.abiflags
+ pythonlib = template % sys.version_info[:2]
+ return ext.libraries + [pythonlib]
+ else:
+ return ext.libraries
diff --git a/Lib/packaging/command/build_py.py b/Lib/packaging/command/build_py.py
new file mode 100644
index 0000000..0062140
--- /dev/null
+++ b/Lib/packaging/command/build_py.py
@@ -0,0 +1,392 @@
+"""Build pure Python modules (just copy to build directory)."""
+
+import os
+import imp
+from glob import glob
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError, PackagingFileError
+from packaging.util import convert_path
+from packaging.compat import Mixin2to3
+
+# marking public APIs
+__all__ = ['build_py']
+
+
+class build_py(Command, Mixin2to3):
+
+ description = "build pure Python modules (copy to build directory)"
+
+ # The options for controlling byte compilation are two independent sets;
+ # more info in install_lib or the reST docs
+
+ user_options = [
+ ('build-lib=', 'd', "directory to build (copy) to"),
+ ('compile', 'c', "compile .py to .pyc"),
+ ('no-compile', None, "don't compile .py files [default]"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+ ('use-2to3', None,
+ "use 2to3 to make source python 3.x compatible"),
+ ('convert-2to3-doctests', None,
+ "use 2to3 to convert doctests in separate text files"),
+ ('use-2to3-fixers', None,
+ "list additional fixers opted for during 2to3 conversion"),
+ ]
+
+ boolean_options = ['compile', 'force']
+
+ negative_opt = {'no-compile': 'compile'}
+
+ def initialize_options(self):
+ self.build_lib = None
+ self.py_modules = None
+ self.package = None
+ self.package_data = None
+ self.package_dir = None
+ self.compile = False
+ self.optimize = 0
+ self.force = None
+ self._updated_files = []
+ self._doctests_2to3 = []
+ self.use_2to3 = False
+ self.convert_2to3_doctests = None
+ self.use_2to3_fixers = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ 'use_2to3', 'use_2to3_fixers',
+ 'convert_2to3_doctests', 'build_lib',
+ 'force')
+
+ # Get the distribution options that are aliases for build_py
+ # options -- list of packages and list of modules.
+ self.packages = self.distribution.packages
+ self.py_modules = self.distribution.py_modules
+ self.package_data = self.distribution.package_data
+ self.package_dir = None
+ if self.distribution.package_dir is not None:
+ self.package_dir = convert_path(self.distribution.package_dir)
+ self.data_files = self.get_data_files()
+
+ # Ick, copied straight from install_lib.py (fancy_getopt needs a
+ # type system! Hell, *everything* needs a type system!!!)
+ if not isinstance(self.optimize, int):
+ try:
+ self.optimize = int(self.optimize)
+ assert 0 <= self.optimize <= 2
+ except (ValueError, AssertionError):
+ raise PackagingOptionError("optimize must be 0, 1, or 2")
+
+ def run(self):
+ # XXX copy_file by default preserves atime and mtime. IMHO this is
+ # the right thing to do, but perhaps it should be an option -- in
+ # particular, a site administrator might want installed files to
+ # reflect the time of installation rather than the last
+ # modification time before the installed release.
+
+ # XXX copy_file by default preserves mode, which appears to be the
+ # wrong thing to do: if a file is read-only in the working
+ # directory, we want it to be installed read/write so that the next
+ # installation of the same module distribution can overwrite it
+ # without problems. (This might be a Unix-specific issue.) Thus
+ # we turn off 'preserve_mode' when copying to the build directory,
+ # since the build directory is supposed to be exactly what the
+ # installation will look like (ie. we preserve mode when
+ # installing).
+
+ # Two options control which modules will be installed: 'packages'
+ # and 'py_modules'. The former lets us work with whole packages, not
+ # specifying individual modules at all; the latter is for
+ # specifying modules one-at-a-time.
+
+ if self.py_modules:
+ self.build_modules()
+ if self.packages:
+ self.build_packages()
+ self.build_package_data()
+
+ if self.use_2to3 and self._updated_files:
+ self.run_2to3(self._updated_files, self._doctests_2to3,
+ self.use_2to3_fixers)
+
+ self.byte_compile(self.get_outputs(include_bytecode=False),
+ prefix=self.build_lib)
+
+ # -- Top-level worker functions ------------------------------------
+
+ def get_data_files(self):
+ """Generate list of '(package,src_dir,build_dir,filenames)' tuples.
+
+ Helper function for finalize_options.
+ """
+ data = []
+ if not self.packages:
+ return data
+ for package in self.packages:
+ # Locate package source directory
+ src_dir = self.get_package_dir(package)
+
+ # Compute package build directory
+ build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+ # Length of path to strip from found files
+ plen = 0
+ if src_dir:
+ plen = len(src_dir) + 1
+
+ # Strip directory from globbed filenames
+ filenames = [
+ file[plen:] for file in self.find_data_files(package, src_dir)
+ ]
+ data.append((package, src_dir, build_dir, filenames))
+ return data
+
+ def find_data_files(self, package, src_dir):
+ """Return filenames for package's data files in 'src_dir'.
+
+ Helper function for get_data_files.
+ """
+ globs = (self.package_data.get('', [])
+ + self.package_data.get(package, []))
+ files = []
+ for pattern in globs:
+ # Each pattern has to be converted to a platform-specific path
+ filelist = glob(os.path.join(src_dir, convert_path(pattern)))
+ # Files that match more than one pattern are only added once
+ files.extend(fn for fn in filelist if fn not in files)
+ return files
+
+ def build_package_data(self):
+ """Copy data files into build directory.
+
+ Helper function for run.
+ """
+ # FIXME add tests for this method
+ for package, src_dir, build_dir, filenames in self.data_files:
+ for filename in filenames:
+ target = os.path.join(build_dir, filename)
+ srcfile = os.path.join(src_dir, filename)
+ self.mkpath(os.path.dirname(target))
+ outf, copied = self.copy_file(srcfile,
+ target, preserve_mode=False)
+ doctests = self.distribution.convert_2to3_doctests
+ if copied and srcfile in doctests:
+ self._doctests_2to3.append(outf)
+
+ # XXX - this should be moved to the Distribution class as it is not
+ # only needed for build_py. It also has no dependencies on this class.
+ def get_package_dir(self, package):
+ """Return the directory, relative to the top of the source
+ distribution, where package 'package' should be found
+ (at least according to the 'package_dir' option, if any).
+ """
+ path = package.split('.')
+ if self.package_dir is not None:
+ path.insert(0, self.package_dir)
+
+ if len(path) > 0:
+ return os.path.join(*path)
+
+ return ''
+
+ def check_package(self, package, package_dir):
+ """Helper function for find_package_modules and find_modules."""
+ # Empty dir name means current directory, which we can probably
+ # assume exists. Also, os.path.exists and isdir don't know about
+ # my "empty string means current dir" convention, so we have to
+ # circumvent them.
+ if package_dir != "":
+ if not os.path.exists(package_dir):
+ raise PackagingFileError(
+ "package directory '%s' does not exist" % package_dir)
+ if not os.path.isdir(package_dir):
+ raise PackagingFileError(
+ "supposed package directory '%s' exists, "
+ "but is not a directory" % package_dir)
+
+ # Require __init__.py for all but the "root package"
+ if package:
+ init_py = os.path.join(package_dir, "__init__.py")
+ if os.path.isfile(init_py):
+ return init_py
+ else:
+ logger.warning("package init file %r not found "
+ "(or not a regular file)", init_py)
+
+ # Either not in a package at all (__init__.py not expected), or
+ # __init__.py doesn't exist -- so don't return the filename.
+ return None
+
+ def check_module(self, module, module_file):
+ if not os.path.isfile(module_file):
+ logger.warning("file %r (for module %r) not found",
+ module_file, module)
+ return False
+ else:
+ return True
+
+ def find_package_modules(self, package, package_dir):
+ self.check_package(package, package_dir)
+ module_files = glob(os.path.join(package_dir, "*.py"))
+ modules = []
+ if self.distribution.script_name is not None:
+ setup_script = os.path.abspath(self.distribution.script_name)
+ else:
+ setup_script = None
+
+ for f in module_files:
+ abs_f = os.path.abspath(f)
+ if abs_f != setup_script:
+ module = os.path.splitext(os.path.basename(f))[0]
+ modules.append((package, module, f))
+ else:
+ logger.debug("excluding %r", setup_script)
+ return modules
+
+ def find_modules(self):
+ """Finds individually-specified Python modules, ie. those listed by
+ module name in 'self.py_modules'. Returns a list of tuples (package,
+ module_base, filename): 'package' is a tuple of the path through
+ package-space to the module; 'module_base' is the bare (no
+ packages, no dots) module name, and 'filename' is the path to the
+ ".py" file (relative to the distribution root) that implements the
+ module.
+ """
+ # Map package names to tuples of useful info about the package:
+ # (package_dir, checked)
+ # package_dir - the directory where we'll find source files for
+ # this package
+ # checked - true if we have checked that the package directory
+ # is valid (exists, contains __init__.py, ... ?)
+ packages = {}
+
+ # List of (package, module, filename) tuples to return
+ modules = []
+
+ # We treat modules-in-packages almost the same as toplevel modules,
+ # just the "package" for a toplevel is empty (either an empty
+ # string or empty list, depending on context). Differences:
+ # - don't check for __init__.py in directory for empty package
+ for module in self.py_modules:
+ path = module.split('.')
+ package = '.'.join(path[0:-1])
+ module_base = path[-1]
+
+ try:
+ package_dir, checked = packages[package]
+ except KeyError:
+ package_dir = self.get_package_dir(package)
+ checked = False
+
+ if not checked:
+ init_py = self.check_package(package, package_dir)
+ packages[package] = (package_dir, 1)
+ if init_py:
+ modules.append((package, "__init__", init_py))
+
+ # XXX perhaps we should also check for just .pyc files
+ # (so greedy closed-source bastards can distribute Python
+ # modules too)
+ module_file = os.path.join(package_dir, module_base + ".py")
+ if not self.check_module(module, module_file):
+ continue
+
+ modules.append((package, module_base, module_file))
+
+ return modules
+
+ def find_all_modules(self):
+ """Compute the list of all modules that will be built, whether
+ they are specified one-module-at-a-time ('self.py_modules') or
+ by whole packages ('self.packages'). Return a list of tuples
+ (package, module, module_file), just like 'find_modules()' and
+ 'find_package_modules()' do."""
+ modules = []
+ if self.py_modules:
+ modules.extend(self.find_modules())
+ if self.packages:
+ for package in self.packages:
+ package_dir = self.get_package_dir(package)
+ m = self.find_package_modules(package, package_dir)
+ modules.extend(m)
+ return modules
+
+ def get_source_files(self):
+ sources = [module[-1] for module in self.find_all_modules()]
+ sources += [
+ os.path.join(src_dir, filename)
+ for package, src_dir, build_dir, filenames in self.data_files
+ for filename in filenames]
+ return sources
+
+ def get_module_outfile(self, build_dir, package, module):
+ outfile_path = [build_dir] + list(package) + [module + ".py"]
+ return os.path.join(*outfile_path)
+
+ def get_outputs(self, include_bytecode=True):
+ modules = self.find_all_modules()
+ outputs = []
+ for package, module, module_file in modules:
+ package = package.split('.')
+ filename = self.get_module_outfile(self.build_lib, package, module)
+ outputs.append(filename)
+ if include_bytecode:
+ if self.compile:
+ outputs.append(imp.cache_from_source(filename, True))
+ if self.optimize:
+ outputs.append(imp.cache_from_source(filename, False))
+
+ outputs += [
+ os.path.join(build_dir, filename)
+ for package, src_dir, build_dir, filenames in self.data_files
+ for filename in filenames]
+
+ return outputs
+
+ def build_module(self, module, module_file, package):
+ if isinstance(package, str):
+ package = package.split('.')
+ elif not isinstance(package, (list, tuple)):
+ raise TypeError(
+ "'package' must be a string (dot-separated), list, or tuple")
+
+ # Now put the module source file into the "build" area -- this is
+ # easy, we just copy it somewhere under self.build_lib (the build
+ # directory for Python source).
+ outfile = self.get_module_outfile(self.build_lib, package, module)
+ dir = os.path.dirname(outfile)
+ self.mkpath(dir)
+ return self.copy_file(module_file, outfile, preserve_mode=False)
+
+ def build_modules(self):
+ modules = self.find_modules()
+ for package, module, module_file in modules:
+ # Now "build" the module -- ie. copy the source file to
+ # self.build_lib (the build directory for Python source).
+ # (Actually, it gets copied to the directory for this package
+ # under self.build_lib.)
+ self.build_module(module, module_file, package)
+
+ def build_packages(self):
+ for package in self.packages:
+ # Get list of (package, module, module_file) tuples based on
+ # scanning the package directory. 'package' is only included
+ # in the tuple so that 'find_modules()' and
+ # 'find_package_tuples()' have a consistent interface; it's
+ # ignored here (apart from a sanity check). Also, 'module' is
+ # the *unqualified* module name (ie. no dots, no package -- we
+ # already know its package!), and 'module_file' is the path to
+ # the .py file, relative to the current directory
+ # (ie. including 'package_dir').
+ package_dir = self.get_package_dir(package)
+ modules = self.find_package_modules(package, package_dir)
+
+ # Now loop over the modules we found, "building" each one (just
+ # copy it to self.build_lib).
+ for package_, module, module_file in modules:
+ assert package == package_
+ self.build_module(module, module_file, package)
diff --git a/Lib/packaging/command/build_scripts.py b/Lib/packaging/command/build_scripts.py
new file mode 100644
index 0000000..d651ae0
--- /dev/null
+++ b/Lib/packaging/command/build_scripts.py
@@ -0,0 +1,154 @@
+"""Build scripts (copy to build dir and fix up shebang line)."""
+
+import os
+import re
+import sysconfig
+from tokenize import detect_encoding
+
+from packaging.command.cmd import Command
+from packaging.util import convert_path, newer
+from packaging import logger
+from packaging.compat import Mixin2to3
+
+
+# check if Python is called on the first line with this expression
+first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$')
+
+class build_scripts(Command, Mixin2to3):
+
+ description = "build scripts (copy and fix up shebang line)"
+
+ user_options = [
+ ('build-dir=', 'd', "directory to build (copy) to"),
+ ('force', 'f', "forcibly build everything (ignore file timestamps"),
+ ('executable=', 'e', "specify final destination interpreter path"),
+ ]
+
+ boolean_options = ['force']
+
+
+ def initialize_options(self):
+ self.build_dir = None
+ self.scripts = None
+ self.force = None
+ self.executable = None
+ self.outfiles = None
+ self.use_2to3 = False
+ self.convert_2to3_doctests = None
+ self.use_2to3_fixers = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ ('build_scripts', 'build_dir'),
+ 'use_2to3', 'use_2to3_fixers',
+ 'convert_2to3_doctests', 'force',
+ 'executable')
+ self.scripts = self.distribution.scripts
+
+ def get_source_files(self):
+ return self.scripts
+
+ def run(self):
+ if not self.scripts:
+ return
+ copied_files = self.copy_scripts()
+ if self.use_2to3 and copied_files:
+ self._run_2to3(copied_files, fixers=self.use_2to3_fixers)
+
+ def copy_scripts(self):
+ """Copy each script listed in 'self.scripts'; if it's marked as a
+ Python script in the Unix way (first line matches 'first_line_re',
+ ie. starts with "\#!" and contains "python"), then adjust the first
+ line to refer to the current Python interpreter as we copy.
+ """
+ self.mkpath(self.build_dir)
+ outfiles = []
+ for script in self.scripts:
+ adjust = False
+ script = convert_path(script)
+ outfile = os.path.join(self.build_dir, os.path.basename(script))
+ outfiles.append(outfile)
+
+ if not self.force and not newer(script, outfile):
+ logger.debug("not copying %s (up-to-date)", script)
+ continue
+
+ # Always open the file, but ignore failures in dry-run mode --
+ # that way, we'll get accurate feedback if we can read the
+ # script.
+ try:
+ f = open(script, "rb")
+ except IOError:
+ if not self.dry_run:
+ raise
+ f = None
+ else:
+ encoding, lines = detect_encoding(f.readline)
+ f.seek(0)
+ first_line = f.readline()
+ if not first_line:
+ logger.warning('%s: %s is an empty file (skipping)',
+ self.get_command_name(), script)
+ continue
+
+ match = first_line_re.match(first_line)
+ if match:
+ adjust = True
+ post_interp = match.group(1) or b''
+
+ if adjust:
+ logger.info("copying and adjusting %s -> %s", script,
+ self.build_dir)
+ if not self.dry_run:
+ if not sysconfig.is_python_build():
+ executable = self.executable
+ else:
+ executable = os.path.join(
+ sysconfig.get_config_var("BINDIR"),
+ "python%s%s" % (sysconfig.get_config_var("VERSION"),
+ sysconfig.get_config_var("EXE")))
+ executable = os.fsencode(executable)
+ shebang = b"#!" + executable + post_interp + b"\n"
+ # Python parser starts to read a script using UTF-8 until
+ # it gets a #coding:xxx cookie. The shebang has to be the
+ # first line of a file, the #coding:xxx cookie cannot be
+ # written before. So the shebang has to be decodable from
+ # UTF-8.
+ try:
+ shebang.decode('utf-8')
+ except UnicodeDecodeError:
+ raise ValueError(
+ "The shebang ({!r}) is not decodable "
+ "from utf-8".format(shebang))
+ # If the script is encoded to a custom encoding (use a
+ # #coding:xxx cookie), the shebang has to be decodable from
+ # the script encoding too.
+ try:
+ shebang.decode(encoding)
+ except UnicodeDecodeError:
+ raise ValueError(
+ "The shebang ({!r}) is not decodable "
+ "from the script encoding ({})"
+ .format(shebang, encoding))
+ with open(outfile, "wb") as outf:
+ outf.write(shebang)
+ outf.writelines(f.readlines())
+ if f:
+ f.close()
+ else:
+ if f:
+ f.close()
+ self.copy_file(script, outfile)
+
+ if os.name == 'posix':
+ for file in outfiles:
+ if self.dry_run:
+ logger.info("changing mode of %s", file)
+ else:
+ oldmode = os.stat(file).st_mode & 0o7777
+ newmode = (oldmode | 0o555) & 0o7777
+ if newmode != oldmode:
+ logger.info("changing mode of %s from %o to %o",
+ file, oldmode, newmode)
+ os.chmod(file, newmode)
+ return outfiles
diff --git a/Lib/packaging/command/check.py b/Lib/packaging/command/check.py
new file mode 100644
index 0000000..6715db9
--- /dev/null
+++ b/Lib/packaging/command/check.py
@@ -0,0 +1,88 @@
+"""Check PEP compliance of metadata."""
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingSetupError
+from packaging.util import resolve_name
+
+class check(Command):
+
+ description = "check PEP compliance of metadata"
+
+ user_options = [('metadata', 'm', 'Verify metadata'),
+ ('all', 'a',
+ ('runs extended set of checks')),
+ ('strict', 's',
+ 'Will exit with an error if a check fails')]
+
+ boolean_options = ['metadata', 'all', 'strict']
+
+ def initialize_options(self):
+ """Sets default values for options."""
+ self.all = False
+ self.metadata = True
+ self.strict = False
+ self._warnings = []
+
+ def finalize_options(self):
+ pass
+
+ def warn(self, msg, *args):
+ """Wrapper around logging that also remembers messages."""
+ # XXX we could use a special handler for this, but would need to test
+ # if it works even if the logger has a too high level
+ self._warnings.append((msg, args))
+ return logger.warning('%s: %s' % (self.get_command_name(), msg), *args)
+
+ def run(self):
+ """Runs the command."""
+ # perform the various tests
+ if self.metadata:
+ self.check_metadata()
+ if self.all:
+ self.check_restructuredtext()
+ self.check_hooks_resolvable()
+
+ # let's raise an error in strict mode, if we have at least
+ # one warning
+ if self.strict and len(self._warnings) > 0:
+ msg = '\n'.join(msg % args for msg, args in self._warnings)
+ raise PackagingSetupError(msg)
+
+ def check_metadata(self):
+ """Ensures that all required elements of metadata are supplied.
+
+ name, version, URL, author
+
+ Warns if any are missing.
+ """
+ missing, warnings = self.distribution.metadata.check(strict=True)
+ if missing != []:
+ self.warn('missing required metadata: %s', ', '.join(missing))
+ for warning in warnings:
+ self.warn(warning)
+
+ def check_restructuredtext(self):
+ """Checks if the long string fields are reST-compliant."""
+ missing, warnings = self.distribution.metadata.check(restructuredtext=True)
+ if self.distribution.metadata.docutils_support:
+ for warning in warnings:
+ line = warning[-1].get('line')
+ if line is None:
+ warning = warning[1]
+ else:
+ warning = '%s (line %s)' % (warning[1], line)
+ self.warn(warning)
+ elif self.strict:
+ raise PackagingSetupError('The docutils package is needed.')
+
+ def check_hooks_resolvable(self):
+ for options in self.distribution.command_options.values():
+ for hook_kind in ("pre_hook", "post_hook"):
+ if hook_kind not in options:
+ break
+ for hook_name in options[hook_kind][1].values():
+ try:
+ resolve_name(hook_name)
+ except ImportError:
+ self.warn('name %r cannot be resolved', hook_name)
diff --git a/Lib/packaging/command/clean.py b/Lib/packaging/command/clean.py
new file mode 100644
index 0000000..4f60f4e
--- /dev/null
+++ b/Lib/packaging/command/clean.py
@@ -0,0 +1,76 @@
+"""Clean up temporary files created by the build command."""
+
+# Contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>
+
+import os
+from shutil import rmtree
+from packaging.command.cmd import Command
+from packaging import logger
+
+class clean(Command):
+
+ description = "clean up temporary files from 'build' command"
+ user_options = [
+ ('build-base=', 'b',
+ "base build directory (default: 'build.build-base')"),
+ ('build-lib=', None,
+ "build directory for all modules (default: 'build.build-lib')"),
+ ('build-temp=', 't',
+ "temporary build directory (default: 'build.build-temp')"),
+ ('build-scripts=', None,
+ "build directory for scripts (default: 'build.build-scripts')"),
+ ('bdist-base=', None,
+ "temporary directory for built distributions"),
+ ('all', 'a',
+ "remove all build output, not just temporary by-products")
+ ]
+
+ boolean_options = ['all']
+
+ def initialize_options(self):
+ self.build_base = None
+ self.build_lib = None
+ self.build_temp = None
+ self.build_scripts = None
+ self.bdist_base = None
+ self.all = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build', 'build_base', 'build_lib',
+ 'build_scripts', 'build_temp')
+ self.set_undefined_options('bdist', 'bdist_base')
+
+ def run(self):
+ # remove the build/temp.<plat> directory (unless it's already
+ # gone)
+ if os.path.exists(self.build_temp):
+ if self.dry_run:
+ logger.info('removing %s', self.build_temp)
+ else:
+ rmtree(self.build_temp)
+ else:
+ logger.debug("'%s' does not exist -- can't clean it",
+ self.build_temp)
+
+ if self.all:
+ # remove build directories
+ for directory in (self.build_lib,
+ self.bdist_base,
+ self.build_scripts):
+ if os.path.exists(directory):
+ if self.dry_run:
+ logger.info('removing %s', directory)
+ else:
+ rmtree(directory)
+ else:
+ logger.warning("'%s' does not exist -- can't clean it",
+ directory)
+
+ # just for the heck of it, try to remove the base build directory:
+ # we might have emptied it right now, but if not we don't care
+ if not self.dry_run:
+ try:
+ os.rmdir(self.build_base)
+ logger.info("removing '%s'", self.build_base)
+ except OSError:
+ pass
diff --git a/Lib/packaging/command/cmd.py b/Lib/packaging/command/cmd.py
new file mode 100644
index 0000000..25e6a72
--- /dev/null
+++ b/Lib/packaging/command/cmd.py
@@ -0,0 +1,461 @@
+"""Base class for commands."""
+
+import os
+import re
+from shutil import copyfile, move, make_archive
+from packaging import util
+from packaging import logger
+from packaging.errors import PackagingOptionError
+
+
+class Command:
+ """Abstract base class for defining command classes, the "worker bees"
+ of Packaging. A useful analogy for command classes is to think of
+ them as subroutines with local variables called "options". The options
+ are "declared" in 'initialize_options()' and "defined" (given their
+ final values, aka "finalized") in 'finalize_options()', both of which
+ must be defined by every command class. The distinction between the
+ two is necessary because option values might come from the outside
+ world (command line, config file, ...), and any options dependent on
+ other options must be computed *after* these outside influences have
+ been processed -- hence 'finalize_options()'. The "body" of the
+ subroutine, where it does all its work based on the values of its
+ options, is the 'run()' method, which must also be implemented by every
+ command class.
+ """
+
+ # 'sub_commands' formalizes the notion of a "family" of commands,
+ # eg. "install_dist" as the parent with sub-commands "install_lib",
+ # "install_headers", etc. The parent of a family of commands
+ # defines 'sub_commands' as a class attribute; it's a list of
+ # (command_name : string, predicate : unbound_method | string | None)
+ # tuples, where 'predicate' is a method of the parent command that
+ # determines whether the corresponding command is applicable in the
+ # current situation. (Eg. we "install_headers" is only applicable if
+ # we have any C header files to install.) If 'predicate' is None,
+ # that command is always applicable.
+ #
+ # 'sub_commands' is usually defined at the *end* of a class, because
+ # predicates can be unbound methods, so they must already have been
+ # defined. The canonical example is the "install_dist" command.
+ sub_commands = []
+
+ # Pre and post command hooks are run just before or just after the command
+ # itself. They are simple functions that receive the command instance. They
+ # are specified as callable objects or dotted strings (for lazy loading).
+ pre_hook = None
+ post_hook = None
+
+ # -- Creation/initialization methods -------------------------------
+
+ def __init__(self, dist):
+ """Create and initialize a new Command object. Most importantly,
+ invokes the 'initialize_options()' method, which is the real
+ initializer and depends on the actual command being instantiated.
+ """
+ # late import because of mutual dependence between these classes
+ from packaging.dist import Distribution
+
+ if not isinstance(dist, Distribution):
+ raise TypeError("dist must be an instance of Distribution, not %r"
+ % type(dist))
+ if self.__class__ is Command:
+ raise RuntimeError("Command is an abstract class")
+
+ self.distribution = dist
+ self.initialize_options()
+
+ # Per-command versions of the global flags, so that the user can
+ # customize Packaging' behaviour command-by-command and let some
+ # commands fall back on the Distribution's behaviour. None means
+ # "not defined, check self.distribution's copy", while 0 or 1 mean
+ # false and true (duh). Note that this means figuring out the real
+ # value of each flag is a touch complicated -- hence "self._dry_run"
+ # will be handled by a property, below.
+ # XXX This needs to be fixed. [I changed it to a property--does that
+ # "fix" it?]
+ self._dry_run = None
+
+ # Some commands define a 'self.force' option to ignore file
+ # timestamps, but methods defined *here* assume that
+ # 'self.force' exists for all commands. So define it here
+ # just to be safe.
+ self.force = None
+
+ # The 'help' flag is just used for command line parsing, so
+ # none of that complicated bureaucracy is needed.
+ self.help = False
+
+ # 'finalized' records whether or not 'finalize_options()' has been
+ # called. 'finalize_options()' itself should not pay attention to
+ # this flag: it is the business of 'ensure_finalized()', which
+ # always calls 'finalize_options()', to respect/update it.
+ self.finalized = False
+
+ # XXX A more explicit way to customize dry_run would be better.
+ @property
+ def dry_run(self):
+ if self._dry_run is None:
+ return getattr(self.distribution, 'dry_run')
+ else:
+ return self._dry_run
+
+ def ensure_finalized(self):
+ if not self.finalized:
+ self.finalize_options()
+ self.finalized = True
+
+ # Subclasses must define:
+ # initialize_options()
+ # provide default values for all options; may be customized by
+ # setup script, by options from config file(s), or by command-line
+ # options
+ # finalize_options()
+ # decide on the final values for all options; this is called
+ # after all possible intervention from the outside world
+ # (command line, option file, etc.) has been processed
+ # run()
+ # run the command: do whatever it is we're here to do,
+ # controlled by the command's various option values
+
+ def initialize_options(self):
+ """Set default values for all the options that this command
+ supports. Note that these defaults may be overridden by other
+ commands, by the setup script, by config files, or by the
+ command line. Thus, this is not the place to code dependencies
+ between options; generally, 'initialize_options()' implementations
+ are just a bunch of "self.foo = None" assignments.
+
+ This method must be implemented by all command classes.
+ """
+ raise RuntimeError(
+ "abstract method -- subclass %s must override" % self.__class__)
+
+ def finalize_options(self):
+ """Set final values for all the options that this command supports.
+ This is always called as late as possible, ie. after any option
+ assignments from the command line or from other commands have been
+ done. Thus, this is the place to code option dependencies: if
+ 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
+ long as 'foo' still has the same value it was assigned in
+ 'initialize_options()'.
+
+ This method must be implemented by all command classes.
+ """
+ raise RuntimeError(
+ "abstract method -- subclass %s must override" % self.__class__)
+
+ def dump_options(self, header=None, indent=""):
+ if header is None:
+ header = "command options for '%s':" % self.get_command_name()
+ logger.info(indent + header)
+ indent = indent + " "
+ negative_opt = getattr(self, 'negative_opt', ())
+ for option, _, _ in self.user_options:
+ if option in negative_opt:
+ continue
+ option = option.replace('-', '_')
+ if option[-1] == "=":
+ option = option[:-1]
+ value = getattr(self, option)
+ logger.info(indent + "%s = %s", option, value)
+
+ def run(self):
+ """A command's raison d'etre: carry out the action it exists to
+ perform, controlled by the options initialized in
+ 'initialize_options()', customized by other commands, the setup
+ script, the command line and config files, and finalized in
+ 'finalize_options()'. All terminal output and filesystem
+ interaction should be done by 'run()'.
+
+ This method must be implemented by all command classes.
+ """
+ raise RuntimeError(
+ "abstract method -- subclass %s must override" % self.__class__)
+
+ # -- External interface --------------------------------------------
+ # (called by outsiders)
+
+ def get_source_files(self):
+ """Return the list of files that are used as inputs to this command,
+ i.e. the files used to generate the output files. The result is used
+ by the `sdist` command in determining the set of default files.
+
+ Command classes should implement this method if they operate on files
+ from the source tree.
+ """
+ return []
+
+ def get_outputs(self):
+ """Return the list of files that would be produced if this command
+ were actually run. Not affected by the "dry-run" flag or whether
+ any other commands have been run.
+
+ Command classes should implement this method if they produce any
+ output files that get consumed by another command. e.g., `build_ext`
+ returns the list of built extension modules, but not any temporary
+ files used in the compilation process.
+ """
+ return []
+
+ # -- Option validation methods -------------------------------------
+ # (these are very handy in writing the 'finalize_options()' method)
+ #
+ # NB. the general philosophy here is to ensure that a particular option
+ # value meets certain type and value constraints. If not, we try to
+ # force it into conformance (eg. if we expect a list but have a string,
+ # split the string on comma and/or whitespace). If we can't force the
+ # option into conformance, raise PackagingOptionError. Thus, command
+ # classes need do nothing more than (eg.)
+ # self.ensure_string_list('foo')
+ # and they can be guaranteed that thereafter, self.foo will be
+ # a list of strings.
+
+ def _ensure_stringlike(self, option, what, default=None):
+ val = getattr(self, option)
+ if val is None:
+ setattr(self, option, default)
+ return default
+ elif not isinstance(val, str):
+ raise PackagingOptionError("'%s' must be a %s (got `%s`)" %
+ (option, what, val))
+ return val
+
+ def ensure_string(self, option, default=None):
+ """Ensure that 'option' is a string; if not defined, set it to
+ 'default'.
+ """
+ self._ensure_stringlike(option, "string", default)
+
+ def ensure_string_list(self, option):
+ r"""Ensure that 'option' is a list of strings. If 'option' is
+ currently a string, we split it either on /,\s*/ or /\s+/, so
+ "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
+ ["foo", "bar", "baz"].
+ """
+ val = getattr(self, option)
+ if val is None:
+ return
+ elif isinstance(val, str):
+ setattr(self, option, re.split(r',\s*|\s+', val))
+ else:
+ if isinstance(val, list):
+ # checks if all elements are str
+ ok = True
+ for element in val:
+ if not isinstance(element, str):
+ ok = False
+ break
+ else:
+ ok = False
+
+ if not ok:
+ raise PackagingOptionError(
+ "'%s' must be a list of strings (got %r)" % (option, val))
+
+ def _ensure_tested_string(self, option, tester,
+ what, error_fmt, default=None):
+ val = self._ensure_stringlike(option, what, default)
+ if val is not None and not tester(val):
+ raise PackagingOptionError(
+ ("error in '%s' option: " + error_fmt) % (option, val))
+
+ def ensure_filename(self, option):
+ """Ensure that 'option' is the name of an existing file."""
+ self._ensure_tested_string(option, os.path.isfile,
+ "filename",
+ "'%s' does not exist or is not a file")
+
+ def ensure_dirname(self, option):
+ self._ensure_tested_string(option, os.path.isdir,
+ "directory name",
+ "'%s' does not exist or is not a directory")
+
+ # -- Convenience methods for commands ------------------------------
+
+ @classmethod
+ def get_command_name(cls):
+ if hasattr(cls, 'command_name'):
+ return cls.command_name
+ else:
+ return cls.__name__
+
+ def set_undefined_options(self, src_cmd, *options):
+ """Set values of undefined options from another command.
+
+ Undefined options are options set to None, which is the convention
+ used to indicate that an option has not been changed between
+ 'initialize_options()' and 'finalize_options()'. This method is
+ usually called from 'finalize_options()' for options that depend on
+ some other command rather than another option of the same command,
+ typically subcommands.
+
+ The 'src_cmd' argument is the other command from which option values
+ will be taken (a command object will be created for it if necessary);
+ the remaining positional arguments are strings that give the name of
+ the option to set. If the name is different on the source and target
+ command, you can pass a tuple with '(name_on_source, name_on_dest)' so
+ that 'self.name_on_dest' will be set from 'src_cmd.name_on_source'.
+ """
+ src_cmd_obj = self.distribution.get_command_obj(src_cmd)
+ src_cmd_obj.ensure_finalized()
+ for obj in options:
+ if isinstance(obj, tuple):
+ src_option, dst_option = obj
+ else:
+ src_option, dst_option = obj, obj
+ if getattr(self, dst_option) is None:
+ setattr(self, dst_option,
+ getattr(src_cmd_obj, src_option))
+
+ def get_finalized_command(self, command, create=True):
+ """Wrapper around Distribution's 'get_command_obj()' method: find
+ (create if necessary and 'create' is true) the command object for
+ 'command', call its 'ensure_finalized()' method, and return the
+ finalized command object.
+ """
+ cmd_obj = self.distribution.get_command_obj(command, create)
+ cmd_obj.ensure_finalized()
+ return cmd_obj
+
+ def reinitialize_command(self, command, reinit_subcommands=False):
+ return self.distribution.reinitialize_command(
+ command, reinit_subcommands)
+
+ def run_command(self, command):
+ """Run some other command: uses the 'run_command()' method of
+ Distribution, which creates and finalizes the command object if
+ necessary and then invokes its 'run()' method.
+ """
+ self.distribution.run_command(command)
+
+ def get_sub_commands(self):
+ """Determine the sub-commands that are relevant in the current
+ distribution (ie., that need to be run). This is based on the
+ 'sub_commands' class attribute: each tuple in that list may include
+ a method that we call to determine if the subcommand needs to be
+ run for the current distribution. Return a list of command names.
+ """
+ commands = []
+ for sub_command in self.sub_commands:
+ if len(sub_command) == 2:
+ cmd_name, method = sub_command
+ if method is None or method(self):
+ commands.append(cmd_name)
+ else:
+ commands.append(sub_command)
+ return commands
+
+ # -- External world manipulation -----------------------------------
+
+ def execute(self, func, args, msg=None, level=1):
+ util.execute(func, args, msg, dry_run=self.dry_run)
+
+ def mkpath(self, name, mode=0o777, dry_run=None):
+ if dry_run is None:
+ dry_run = self.dry_run
+ name = os.path.normpath(name)
+ if os.path.isdir(name) or name == '':
+ return
+ if dry_run:
+ head = ''
+ for part in name.split(os.sep):
+ logger.info("created directory %s%s", head, part)
+ head += part + os.sep
+ return
+ os.makedirs(name, mode)
+
+ def copy_file(self, infile, outfile,
+ preserve_mode=True, preserve_times=True, link=None, level=1):
+ """Copy a file respecting dry-run and force flags.
+
+ (dry-run defaults to whatever is in the Distribution object, and
+ force to false for commands that don't define it.)
+ """
+ if self.dry_run:
+ # XXX add a comment
+ return
+ if os.path.isdir(outfile):
+ outfile = os.path.join(outfile, os.path.split(infile)[-1])
+ copyfile(infile, outfile)
+ return outfile, None # XXX
+
+ def copy_tree(self, infile, outfile, preserve_mode=True,
+ preserve_times=True, preserve_symlinks=False, level=1):
+ """Copy an entire directory tree respecting dry-run
+ and force flags.
+ """
+ if self.dry_run:
+ # XXX should not return but let copy_tree log and decide to execute
+ # or not based on its dry_run argument
+ return
+
+ return util.copy_tree(infile, outfile, preserve_mode, preserve_times,
+ preserve_symlinks, not self.force, dry_run=self.dry_run)
+
+ def move_file(self, src, dst, level=1):
+ """Move a file respecting the dry-run flag."""
+ if self.dry_run:
+ return # XXX same thing
+ return move(src, dst)
+
+ def spawn(self, cmd, search_path=True, level=1):
+ """Spawn an external command respecting dry-run flag."""
+ from packaging.util import spawn
+ spawn(cmd, search_path, dry_run=self.dry_run)
+
+ def make_archive(self, base_name, format, root_dir=None, base_dir=None,
+ owner=None, group=None):
+ return make_archive(base_name, format, root_dir,
+ base_dir, dry_run=self.dry_run,
+ owner=owner, group=group)
+
+ def make_file(self, infiles, outfile, func, args,
+ exec_msg=None, skip_msg=None, level=1):
+ """Special case of 'execute()' for operations that process one or
+ more input files and generate one output file. Works just like
+ 'execute()', except the operation is skipped and a different
+ message printed if 'outfile' already exists and is newer than all
+ files listed in 'infiles'. If the command defined 'self.force',
+ and it is true, then the command is unconditionally run -- does no
+ timestamp checks.
+ """
+ if skip_msg is None:
+ skip_msg = "skipping %s (inputs unchanged)" % outfile
+
+ # Allow 'infiles' to be a single string
+ if isinstance(infiles, str):
+ infiles = (infiles,)
+ elif not isinstance(infiles, (list, tuple)):
+ raise TypeError(
+ "'infiles' must be a string, or a list or tuple of strings")
+
+ if exec_msg is None:
+ exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
+
+ # If 'outfile' must be regenerated (either because it doesn't
+ # exist, is out-of-date, or the 'force' flag is true) then
+ # perform the action that presumably regenerates it
+ if self.force or util.newer_group(infiles, outfile):
+ self.execute(func, args, exec_msg, level)
+
+ # Otherwise, print the "skip" message
+ else:
+ logger.debug(skip_msg)
+
+ def byte_compile(self, files, prefix=None):
+ """Byte-compile files to pyc and/or pyo files.
+
+ This method requires that the calling class define compile and
+ optimize options, like build_py and install_lib. It also
+ automatically respects the force and dry-run options.
+
+ prefix, if given, is a string that will be stripped off the
+ filenames encoded in bytecode files.
+ """
+ if self.compile:
+ util.byte_compile(files, optimize=False, prefix=prefix,
+ force=self.force, dry_run=self.dry_run)
+ if self.optimize:
+ util.byte_compile(files, optimize=self.optimize, prefix=prefix,
+ force=self.force, dry_run=self.dry_run)
diff --git a/Lib/packaging/command/command_template b/Lib/packaging/command/command_template
new file mode 100644
index 0000000..a12d32b
--- /dev/null
+++ b/Lib/packaging/command/command_template
@@ -0,0 +1,35 @@
+"""Do X and Y."""
+
+from packaging import logger
+from packaging.command.cmd import Command
+
+
+class x(Command):
+
+ # Brief (40-50 characters) description of the command
+ description = ""
+
+ # List of option tuples: long name, short name (None if no short
+ # name), and help string.
+ user_options = [
+ ('', '', # long option, short option (one letter) or None
+ ""), # help text
+ ]
+
+ def initialize_options(self):
+ self. = None
+ self. = None
+ self. = None
+
+ def finalize_options(self):
+ if self.x is None:
+ self.x = ...
+
+ def run(self):
+ ...
+ logger.info(...)
+
+ if not self.dry_run:
+ ...
+
+ self.execute(..., dry_run=self.dry_run)
diff --git a/Lib/packaging/command/config.py b/Lib/packaging/command/config.py
new file mode 100644
index 0000000..264c139
--- /dev/null
+++ b/Lib/packaging/command/config.py
@@ -0,0 +1,349 @@
+"""Prepare the build.
+
+This module provides config, a (mostly) empty command class
+that exists mainly to be sub-classed by specific module distributions and
+applications. The idea is that while every "config" command is different,
+at least they're all named the same, and users always see "config" in the
+list of standard commands. Also, this is a good place to put common
+configure-like tasks: "try to compile this C code", or "figure out where
+this header file lives".
+"""
+
+import os
+import re
+
+from packaging.command.cmd import Command
+from packaging.errors import PackagingExecError
+from packaging.compiler import customize_compiler
+from packaging import logger
+
+LANG_EXT = {'c': '.c', 'c++': '.cxx'}
+
+class config(Command):
+
+ description = "prepare the build"
+
+ user_options = [
+ ('compiler=', None,
+ "specify the compiler type"),
+ ('cc=', None,
+ "specify the compiler executable"),
+ ('include-dirs=', 'I',
+ "list of directories to search for header files"),
+ ('define=', 'D',
+ "C preprocessor macros to define"),
+ ('undef=', 'U',
+ "C preprocessor macros to undefine"),
+ ('libraries=', 'l',
+ "external C libraries to link with"),
+ ('library-dirs=', 'L',
+ "directories to search for external C libraries"),
+
+ ('noisy', None,
+ "show every action (compile, link, run, ...) taken"),
+ ('dump-source', None,
+ "dump generated source files before attempting to compile them"),
+ ]
+
+
+ # The three standard command methods: since the "config" command
+ # does nothing by default, these are empty.
+
+ def initialize_options(self):
+ self.compiler = None
+ self.cc = None
+ self.include_dirs = None
+ self.libraries = None
+ self.library_dirs = None
+
+ # maximal output for now
+ self.noisy = True
+ self.dump_source = True
+
+ # list of temporary files generated along-the-way that we have
+ # to clean at some point
+ self.temp_files = []
+
+ def finalize_options(self):
+ if self.include_dirs is None:
+ self.include_dirs = self.distribution.include_dirs or []
+ elif isinstance(self.include_dirs, str):
+ self.include_dirs = self.include_dirs.split(os.pathsep)
+
+ if self.libraries is None:
+ self.libraries = []
+ elif isinstance(self.libraries, str):
+ self.libraries = [self.libraries]
+
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif isinstance(self.library_dirs, str):
+ self.library_dirs = self.library_dirs.split(os.pathsep)
+
+ def run(self):
+ pass
+
+
+ # Utility methods for actual "config" commands. The interfaces are
+ # loosely based on Autoconf macros of similar names. Sub-classes
+ # may use these freely.
+
+ def _check_compiler(self):
+ """Check that 'self.compiler' really is a CCompiler object;
+ if not, make it one.
+ """
+ # We do this late, and only on-demand, because this is an expensive
+ # import.
+ from packaging.compiler.ccompiler import CCompiler
+ from packaging.compiler import new_compiler
+ if not isinstance(self.compiler, CCompiler):
+ self.compiler = new_compiler(compiler=self.compiler,
+ dry_run=self.dry_run, force=True)
+ customize_compiler(self.compiler)
+ if self.include_dirs:
+ self.compiler.set_include_dirs(self.include_dirs)
+ if self.libraries:
+ self.compiler.set_libraries(self.libraries)
+ if self.library_dirs:
+ self.compiler.set_library_dirs(self.library_dirs)
+
+
+ def _gen_temp_sourcefile(self, body, headers, lang):
+ filename = "_configtest" + LANG_EXT[lang]
+ with open(filename, "w") as file:
+ if headers:
+ for header in headers:
+ file.write("#include <%s>\n" % header)
+ file.write("\n")
+ file.write(body)
+ if body[-1] != "\n":
+ file.write("\n")
+ return filename
+
+ def _preprocess(self, body, headers, include_dirs, lang):
+ src = self._gen_temp_sourcefile(body, headers, lang)
+ out = "_configtest.i"
+ self.temp_files.extend((src, out))
+ self.compiler.preprocess(src, out, include_dirs=include_dirs)
+ return src, out
+
+ def _compile(self, body, headers, include_dirs, lang):
+ src = self._gen_temp_sourcefile(body, headers, lang)
+ if self.dump_source:
+ dump_file(src, "compiling '%s':" % src)
+ obj = self.compiler.object_filenames([src])[0]
+ self.temp_files.extend((src, obj))
+ self.compiler.compile([src], include_dirs=include_dirs)
+ return src, obj
+
+ def _link(self, body, headers, include_dirs, libraries, library_dirs,
+ lang):
+ src, obj = self._compile(body, headers, include_dirs, lang)
+ prog = os.path.splitext(os.path.basename(src))[0]
+ self.compiler.link_executable([obj], prog,
+ libraries=libraries,
+ library_dirs=library_dirs,
+ target_lang=lang)
+
+ if self.compiler.exe_extension is not None:
+ prog = prog + self.compiler.exe_extension
+ self.temp_files.append(prog)
+
+ return src, obj, prog
+
+ def _clean(self, *filenames):
+ if not filenames:
+ filenames = self.temp_files
+ self.temp_files = []
+ logger.info("removing: %s", ' '.join(filenames))
+ for filename in filenames:
+ try:
+ os.remove(filename)
+ except OSError:
+ pass
+
+
+ # XXX these ignore the dry-run flag: what to do, what to do? even if
+ # you want a dry-run build, you still need some sort of configuration
+ # info. My inclination is to make it up to the real config command to
+ # consult 'dry_run', and assume a default (minimal) configuration if
+ # true. The problem with trying to do it here is that you'd have to
+ # return either true or false from all the 'try' methods, neither of
+ # which is correct.
+
+ # XXX need access to the header search path and maybe default macros.
+
+ def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
+ """Construct a source file from 'body' (a string containing lines
+ of C/C++ code) and 'headers' (a list of header files to include)
+ and run it through the preprocessor. Return true if the
+ preprocessor succeeded, false if there were any errors.
+ ('body' probably isn't of much use, but what the heck.)
+ """
+ from packaging.compiler.ccompiler import CompileError
+ self._check_compiler()
+ ok = True
+ try:
+ self._preprocess(body, headers, include_dirs, lang)
+ except CompileError:
+ ok = False
+
+ self._clean()
+ return ok
+
+ def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
+ lang="c"):
+ """Construct a source file (just like 'try_cpp()'), run it through
+ the preprocessor, and return true if any line of the output matches
+ 'pattern'. 'pattern' should either be a compiled regex object or a
+ string containing a regex. If both 'body' and 'headers' are None,
+ preprocesses an empty file -- which can be useful to determine the
+ symbols the preprocessor and compiler set by default.
+ """
+ self._check_compiler()
+ src, out = self._preprocess(body, headers, include_dirs, lang)
+
+ if isinstance(pattern, str):
+ pattern = re.compile(pattern)
+
+ with open(out) as file:
+ match = False
+ while True:
+ line = file.readline()
+ if line == '':
+ break
+ if pattern.search(line):
+ match = True
+ break
+
+ self._clean()
+ return match
+
+ def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
+ """Try to compile a source file built from 'body' and 'headers'.
+ Return true on success, false otherwise.
+ """
+ from packaging.compiler.ccompiler import CompileError
+ self._check_compiler()
+ try:
+ self._compile(body, headers, include_dirs, lang)
+ ok = True
+ except CompileError:
+ ok = False
+
+ logger.info(ok and "success!" or "failure.")
+ self._clean()
+ return ok
+
+ def try_link(self, body, headers=None, include_dirs=None, libraries=None,
+ library_dirs=None, lang="c"):
+ """Try to compile and link a source file, built from 'body' and
+ 'headers', to executable form. Return true on success, false
+ otherwise.
+ """
+ from packaging.compiler.ccompiler import CompileError, LinkError
+ self._check_compiler()
+ try:
+ self._link(body, headers, include_dirs,
+ libraries, library_dirs, lang)
+ ok = True
+ except (CompileError, LinkError):
+ ok = False
+
+ logger.info(ok and "success!" or "failure.")
+ self._clean()
+ return ok
+
+ def try_run(self, body, headers=None, include_dirs=None, libraries=None,
+ library_dirs=None, lang="c"):
+ """Try to compile, link to an executable, and run a program
+ built from 'body' and 'headers'. Return true on success, false
+ otherwise.
+ """
+ from packaging.compiler.ccompiler import CompileError, LinkError
+ self._check_compiler()
+ try:
+ src, obj, exe = self._link(body, headers, include_dirs,
+ libraries, library_dirs, lang)
+ self.spawn([exe])
+ ok = True
+ except (CompileError, LinkError, PackagingExecError):
+ ok = False
+
+ logger.info(ok and "success!" or "failure.")
+ self._clean()
+ return ok
+
+
+ # -- High-level methods --------------------------------------------
+ # (these are the ones that are actually likely to be useful
+ # when implementing a real-world config command!)
+
+ def check_func(self, func, headers=None, include_dirs=None,
+ libraries=None, library_dirs=None, decl=False, call=False):
+
+ """Determine if function 'func' is available by constructing a
+ source file that refers to 'func', and compiles and links it.
+ If everything succeeds, returns true; otherwise returns false.
+
+ The constructed source file starts out by including the header
+ files listed in 'headers'. If 'decl' is true, it then declares
+ 'func' (as "int func()"); you probably shouldn't supply 'headers'
+ and set 'decl' true in the same call, or you might get errors about
+ a conflicting declarations for 'func'. Finally, the constructed
+ 'main()' function either references 'func' or (if 'call' is true)
+ calls it. 'libraries' and 'library_dirs' are used when
+ linking.
+ """
+
+ self._check_compiler()
+ body = []
+ if decl:
+ body.append("int %s ();" % func)
+ body.append("int main () {")
+ if call:
+ body.append(" %s();" % func)
+ else:
+ body.append(" %s;" % func)
+ body.append("}")
+ body = "\n".join(body) + "\n"
+
+ return self.try_link(body, headers, include_dirs,
+ libraries, library_dirs)
+
+ def check_lib(self, library, library_dirs=None, headers=None,
+ include_dirs=None, other_libraries=[]):
+ """Determine if 'library' is available to be linked against,
+ without actually checking that any particular symbols are provided
+ by it. 'headers' will be used in constructing the source file to
+ be compiled, but the only effect of this is to check if all the
+ header files listed are available. Any libraries listed in
+ 'other_libraries' will be included in the link, in case 'library'
+ has symbols that depend on other libraries.
+ """
+ self._check_compiler()
+ return self.try_link("int main (void) { }",
+ headers, include_dirs,
+ [library]+other_libraries, library_dirs)
+
+ def check_header(self, header, include_dirs=None, library_dirs=None,
+ lang="c"):
+ """Determine if the system header file named by 'header_file'
+ exists and can be found by the preprocessor; return true if so,
+ false otherwise.
+ """
+ return self.try_cpp(body="/* No body */", headers=[header],
+ include_dirs=include_dirs)
+
+
+def dump_file(filename, head=None):
+ """Dumps a file content into log.info.
+
+ If head is not None, will be dumped before the file content.
+ """
+ if head is None:
+ logger.info(filename)
+ else:
+ logger.info(head)
+ with open(filename) as file:
+ logger.info(file.read())
diff --git a/Lib/packaging/command/install_data.py b/Lib/packaging/command/install_data.py
new file mode 100644
index 0000000..9ca6279
--- /dev/null
+++ b/Lib/packaging/command/install_data.py
@@ -0,0 +1,79 @@
+"""Install platform-independent data files."""
+
+# Contributed by Bastian Kleineidam
+
+import os
+from shutil import Error
+from sysconfig import get_paths, format_value
+from packaging import logger
+from packaging.util import convert_path
+from packaging.command.cmd import Command
+
+
+class install_data(Command):
+
+ description = "install platform-independent data files"
+
+ user_options = [
+ ('install-dir=', 'd',
+ "base directory for installing data files "
+ "(default: installation base dir)"),
+ ('root=', None,
+ "install everything relative to this alternate root directory"),
+ ('force', 'f', "force installation (overwrite existing files)"),
+ ]
+
+ boolean_options = ['force']
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.outfiles = []
+ self.data_files_out = []
+ self.root = None
+ self.force = False
+ self.data_files = self.distribution.data_files
+ self.warn_dir = True
+
+ def finalize_options(self):
+ self.set_undefined_options('install_dist',
+ ('install_data', 'install_dir'),
+ 'root', 'force')
+
+ def run(self):
+ self.mkpath(self.install_dir)
+ for _file in self.data_files.items():
+ destination = convert_path(self.expand_categories(_file[1]))
+ dir_dest = os.path.abspath(os.path.dirname(destination))
+
+ self.mkpath(dir_dest)
+ try:
+ out = self.copy_file(_file[0], dir_dest)[0]
+ except Error as e:
+ logger.warning('%s: %s', self.get_command_name(), e)
+ out = destination
+
+ self.outfiles.append(out)
+ self.data_files_out.append((_file[0], destination))
+
+ def expand_categories(self, path_with_categories):
+ local_vars = get_paths()
+ local_vars['distribution.name'] = self.distribution.metadata['Name']
+ expanded_path = format_value(path_with_categories, local_vars)
+ expanded_path = format_value(expanded_path, local_vars)
+ if '{' in expanded_path and '}' in expanded_path:
+ logger.warning(
+ '%s: unable to expand %s, some categories may be missing',
+ self.get_command_name(), path_with_categories)
+ return expanded_path
+
+ def get_source_files(self):
+ return list(self.data_files)
+
+ def get_inputs(self):
+ return list(self.data_files)
+
+ def get_outputs(self):
+ return self.outfiles
+
+ def get_resources_out(self):
+ return self.data_files_out
diff --git a/Lib/packaging/command/install_dist.py b/Lib/packaging/command/install_dist.py
new file mode 100644
index 0000000..8388dc9
--- /dev/null
+++ b/Lib/packaging/command/install_dist.py
@@ -0,0 +1,605 @@
+"""Main install command, which calls the other install_* commands."""
+
+import sys
+import os
+
+import sysconfig
+from sysconfig import get_config_vars, get_paths, get_path, get_config_var
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError
+from packaging.util import write_file
+from packaging.util import convert_path, change_root, get_platform
+from packaging.errors import PackagingOptionError
+
+
+class install_dist(Command):
+
+ description = "install everything from build directory"
+
+ user_options = [
+ # Select installation scheme and set base director(y|ies)
+ ('prefix=', None,
+ "installation prefix"),
+ ('exec-prefix=', None,
+ "(Unix only) prefix for platform-specific files"),
+ ('user', None,
+ "install in user site-packages directory [%s]" %
+ get_path('purelib', '%s_user' % os.name)),
+ ('home=', None,
+ "(Unix only) home directory to install under"),
+
+ # Or just set the base director(y|ies)
+ ('install-base=', None,
+ "base installation directory (instead of --prefix or --home)"),
+ ('install-platbase=', None,
+ "base installation directory for platform-specific files " +
+ "(instead of --exec-prefix or --home)"),
+ ('root=', None,
+ "install everything relative to this alternate root directory"),
+
+ # Or explicitly set the installation scheme
+ ('install-purelib=', None,
+ "installation directory for pure Python module distributions"),
+ ('install-platlib=', None,
+ "installation directory for non-pure module distributions"),
+ ('install-lib=', None,
+ "installation directory for all module distributions " +
+ "(overrides --install-purelib and --install-platlib)"),
+
+ ('install-headers=', None,
+ "installation directory for C/C++ headers"),
+ ('install-scripts=', None,
+ "installation directory for Python scripts"),
+ ('install-data=', None,
+ "installation directory for data files"),
+
+ # Byte-compilation options -- see install_lib for details
+ ('compile', 'c', "compile .py to .pyc [default]"),
+ ('no-compile', None, "don't compile .py files"),
+ ('optimize=', 'O',
+ 'also compile with optimization: -O1 for "python -O", '
+ '-O2 for "python -OO", and -O0 to disable [default: -O0]'),
+
+ # Miscellaneous control options
+ ('force', 'f',
+ "force installation (overwrite any existing files)"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+
+ # Where to install documentation (eventually!)
+ #('doc-format=', None, "format of documentation to generate"),
+ #('install-man=', None, "directory for Unix man pages"),
+ #('install-html=', None, "directory for HTML documentation"),
+ #('install-info=', None, "directory for GNU info files"),
+
+ # XXX use a name that makes clear this is the old format
+ ('record=', None,
+ "filename in which to record a list of installed files "
+ "(not PEP 376-compliant)"),
+ ('resources=', None,
+ "data files mapping"),
+
+ # .dist-info related arguments, read by install_dist_info
+ ('no-distinfo', None,
+ "do not create a .dist-info directory"),
+ ('installer=', None,
+ "the name of the installer"),
+ ('requested', None,
+ "generate a REQUESTED file (i.e."),
+ ('no-requested', None,
+ "do not generate a REQUESTED file"),
+ ('no-record', None,
+ "do not generate a RECORD file"),
+ ]
+
+ boolean_options = ['compile', 'force', 'skip-build', 'no-distinfo',
+ 'requested', 'no-record', 'user']
+
+ negative_opt = {'no-compile': 'compile', 'no-requested': 'requested'}
+
+ def initialize_options(self):
+ # High-level options: these select both an installation base
+ # and scheme.
+ self.prefix = None
+ self.exec_prefix = None
+ self.home = None
+ self.user = False
+
+ # These select only the installation base; it's up to the user to
+ # specify the installation scheme (currently, that means supplying
+ # the --install-{platlib,purelib,scripts,data} options).
+ self.install_base = None
+ self.install_platbase = None
+ self.root = None
+
+ # These options are the actual installation directories; if not
+ # supplied by the user, they are filled in using the installation
+ # scheme implied by prefix/exec-prefix/home and the contents of
+ # that installation scheme.
+ self.install_purelib = None # for pure module distributions
+ self.install_platlib = None # non-pure (dists w/ extensions)
+ self.install_headers = None # for C/C++ headers
+ self.install_lib = None # set to either purelib or platlib
+ self.install_scripts = None
+ self.install_data = None
+ self.install_userbase = get_config_var('userbase')
+ self.install_usersite = get_path('purelib', '%s_user' % os.name)
+
+ self.compile = None
+ self.optimize = None
+
+ # These two are for putting non-packagized distributions into their
+ # own directory and creating a .pth file if it makes sense.
+ # 'extra_path' comes from the setup file; 'install_path_file' can
+ # be turned off if it makes no sense to install a .pth file. (But
+ # better to install it uselessly than to guess wrong and not
+ # install it when it's necessary and would be used!) Currently,
+ # 'install_path_file' is always true unless some outsider meddles
+ # with it.
+ self.extra_path = None
+ self.install_path_file = True
+
+ # 'force' forces installation, even if target files are not
+ # out-of-date. 'skip_build' skips running the "build" command,
+ # handy if you know it's not necessary. 'warn_dir' (which is *not*
+ # a user option, it's just there so the bdist_* commands can turn
+ # it off) determines whether we warn about installing to a
+ # directory not in sys.path.
+ self.force = False
+ self.skip_build = False
+ self.warn_dir = True
+
+ # These are only here as a conduit from the 'build' command to the
+ # 'install_*' commands that do the real work. ('build_base' isn't
+ # actually used anywhere, but it might be useful in future.) They
+ # are not user options, because if the user told the install
+ # command where the build directory is, that wouldn't affect the
+ # build command.
+ self.build_base = None
+ self.build_lib = None
+
+ # Not defined yet because we don't know anything about
+ # documentation yet.
+ #self.install_man = None
+ #self.install_html = None
+ #self.install_info = None
+
+ self.record = None
+ self.resources = None
+
+ # .dist-info related options
+ self.no_distinfo = None
+ self.installer = None
+ self.requested = None
+ self.no_record = None
+
+ # -- Option finalizing methods -------------------------------------
+ # (This is rather more involved than for most commands,
+ # because this is where the policy for installing third-
+ # party Python modules on various platforms given a wide
+ # array of user input is decided. Yes, it's quite complex!)
+
+ def finalize_options(self):
+ # This method (and its pliant slaves, like 'finalize_unix()',
+ # 'finalize_other()', and 'select_scheme()') is where the default
+ # installation directories for modules, extension modules, and
+ # anything else we care to install from a Python module
+ # distribution. Thus, this code makes a pretty important policy
+ # statement about how third-party stuff is added to a Python
+ # installation! Note that the actual work of installation is done
+ # by the relatively simple 'install_*' commands; they just take
+ # their orders from the installation directory options determined
+ # here.
+
+ # Check for errors/inconsistencies in the options; first, stuff
+ # that's wrong on any platform.
+
+ if ((self.prefix or self.exec_prefix or self.home) and
+ (self.install_base or self.install_platbase)):
+ raise PackagingOptionError(
+ "must supply either prefix/exec-prefix/home or "
+ "install-base/install-platbase -- not both")
+
+ if self.home and (self.prefix or self.exec_prefix):
+ raise PackagingOptionError(
+ "must supply either home or prefix/exec-prefix -- not both")
+
+ if self.user and (self.prefix or self.exec_prefix or self.home or
+ self.install_base or self.install_platbase):
+ raise PackagingOptionError(
+ "can't combine user with prefix/exec_prefix/home or "
+ "install_base/install_platbase")
+
+ # Next, stuff that's wrong (or dubious) only on certain platforms.
+ if os.name != "posix":
+ if self.exec_prefix:
+ logger.warning(
+ '%s: exec-prefix option ignored on this platform',
+ self.get_command_name())
+ self.exec_prefix = None
+
+ # Now the interesting logic -- so interesting that we farm it out
+ # to other methods. The goal of these methods is to set the final
+ # values for the install_{lib,scripts,data,...} options, using as
+ # input a heady brew of prefix, exec_prefix, home, install_base,
+ # install_platbase, user-supplied versions of
+ # install_{purelib,platlib,lib,scripts,data,...}, and the
+ # INSTALL_SCHEME dictionary above. Phew!
+
+ self.dump_dirs("pre-finalize_{unix,other}")
+
+ if os.name == 'posix':
+ self.finalize_unix()
+ else:
+ self.finalize_other()
+
+ self.dump_dirs("post-finalize_{unix,other}()")
+
+ # Expand configuration variables, tilde, etc. in self.install_base
+ # and self.install_platbase -- that way, we can use $base or
+ # $platbase in the other installation directories and not worry
+ # about needing recursive variable expansion (shudder).
+
+ py_version = '%s.%s' % sys.version_info[:2]
+ prefix, exec_prefix, srcdir, projectbase = get_config_vars(
+ 'prefix', 'exec_prefix', 'srcdir', 'projectbase')
+
+ metadata = self.distribution.metadata
+ self.config_vars = {
+ 'dist_name': metadata['Name'],
+ 'dist_version': metadata['Version'],
+ 'dist_fullname': metadata.get_fullname(),
+ 'py_version': py_version,
+ 'py_version_short': py_version[:3],
+ 'py_version_nodot': py_version[:3:2],
+ 'sys_prefix': prefix,
+ 'prefix': prefix,
+ 'sys_exec_prefix': exec_prefix,
+ 'exec_prefix': exec_prefix,
+ 'srcdir': srcdir,
+ 'projectbase': projectbase,
+ 'userbase': self.install_userbase,
+ 'usersite': self.install_usersite,
+ }
+
+ self.expand_basedirs()
+
+ self.dump_dirs("post-expand_basedirs()")
+
+ # Now define config vars for the base directories so we can expand
+ # everything else.
+ self.config_vars['base'] = self.install_base
+ self.config_vars['platbase'] = self.install_platbase
+
+ # Expand "~" and configuration variables in the installation
+ # directories.
+ self.expand_dirs()
+
+ self.dump_dirs("post-expand_dirs()")
+
+ # Create directories under USERBASE
+ if self.user:
+ self.create_user_dirs()
+
+ # Pick the actual directory to install all modules to: either
+ # install_purelib or install_platlib, depending on whether this
+ # module distribution is pure or not. Of course, if the user
+ # already specified install_lib, use their selection.
+ if self.install_lib is None:
+ if self.distribution.ext_modules: # has extensions: non-pure
+ self.install_lib = self.install_platlib
+ else:
+ self.install_lib = self.install_purelib
+
+ # Convert directories from Unix /-separated syntax to the local
+ # convention.
+ self.convert_paths('lib', 'purelib', 'platlib', 'scripts',
+ 'data', 'headers', 'userbase', 'usersite')
+
+ # Well, we're not actually fully completely finalized yet: we still
+ # have to deal with 'extra_path', which is the hack for allowing
+ # non-packagized module distributions (hello, Numerical Python!) to
+ # get their own directories.
+ self.handle_extra_path()
+ self.install_libbase = self.install_lib # needed for .pth file
+ self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+ # If a new root directory was supplied, make all the installation
+ # dirs relative to it.
+ if self.root is not None:
+ self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+ 'scripts', 'data', 'headers')
+
+ self.dump_dirs("after prepending root")
+
+ # Find out the build directories, ie. where to install from.
+ self.set_undefined_options('build', 'build_base', 'build_lib')
+
+ # Punt on doc directories for now -- after all, we're punting on
+ # documentation completely!
+
+ if self.no_distinfo is None:
+ self.no_distinfo = False
+
+ def finalize_unix(self):
+ """Finalize options for posix platforms."""
+ if self.install_base is not None or self.install_platbase is not None:
+ if ((self.install_lib is None and
+ self.install_purelib is None and
+ self.install_platlib is None) or
+ self.install_headers is None or
+ self.install_scripts is None or
+ self.install_data is None):
+ raise PackagingOptionError(
+ "install-base or install-platbase supplied, but "
+ "installation scheme is incomplete")
+ return
+
+ if self.user:
+ if self.install_userbase is None:
+ raise PackagingPlatformError(
+ "user base directory is not specified")
+ self.install_base = self.install_platbase = self.install_userbase
+ self.select_scheme("posix_user")
+ elif self.home is not None:
+ self.install_base = self.install_platbase = self.home
+ self.select_scheme("posix_home")
+ else:
+ if self.prefix is None:
+ if self.exec_prefix is not None:
+ raise PackagingOptionError(
+ "must not supply exec-prefix without prefix")
+
+ self.prefix = os.path.normpath(sys.prefix)
+ self.exec_prefix = os.path.normpath(sys.exec_prefix)
+
+ else:
+ if self.exec_prefix is None:
+ self.exec_prefix = self.prefix
+
+ self.install_base = self.prefix
+ self.install_platbase = self.exec_prefix
+ self.select_scheme("posix_prefix")
+
+ def finalize_other(self):
+ """Finalize options for non-posix platforms"""
+ if self.user:
+ if self.install_userbase is None:
+ raise PackagingPlatformError(
+ "user base directory is not specified")
+ self.install_base = self.install_platbase = self.install_userbase
+ self.select_scheme(os.name + "_user")
+ elif self.home is not None:
+ self.install_base = self.install_platbase = self.home
+ self.select_scheme("posix_home")
+ else:
+ if self.prefix is None:
+ self.prefix = os.path.normpath(sys.prefix)
+
+ self.install_base = self.install_platbase = self.prefix
+ try:
+ self.select_scheme(os.name)
+ except KeyError:
+ raise PackagingPlatformError(
+ "no support for installation on '%s'" % os.name)
+
+ def dump_dirs(self, msg):
+ """Dump the list of user options."""
+ logger.debug(msg + ":")
+ for opt in self.user_options:
+ opt_name = opt[0]
+ if opt_name[-1] == "=":
+ opt_name = opt_name[0:-1]
+ if opt_name in self.negative_opt:
+ opt_name = self.negative_opt[opt_name]
+ opt_name = opt_name.replace('-', '_')
+ val = not getattr(self, opt_name)
+ else:
+ opt_name = opt_name.replace('-', '_')
+ val = getattr(self, opt_name)
+ logger.debug(" %s: %s", opt_name, val)
+
+ def select_scheme(self, name):
+ """Set the install directories by applying the install schemes."""
+ # it's the caller's problem if they supply a bad name!
+ scheme = get_paths(name, expand=False)
+ for key, value in scheme.items():
+ if key == 'platinclude':
+ key = 'headers'
+ value = os.path.join(value, self.distribution.metadata['Name'])
+ attrname = 'install_' + key
+ if hasattr(self, attrname):
+ if getattr(self, attrname) is None:
+ setattr(self, attrname, value)
+
+ def _expand_attrs(self, attrs):
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ if os.name == 'posix' or os.name == 'nt':
+ val = os.path.expanduser(val)
+ # see if we want to push this work in sysconfig XXX
+ val = sysconfig._subst_vars(val, self.config_vars)
+ setattr(self, attr, val)
+
+ def expand_basedirs(self):
+ """Call `os.path.expanduser` on install_{base,platbase} and root."""
+ self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+ def expand_dirs(self):
+ """Call `os.path.expanduser` on install dirs."""
+ self._expand_attrs(['install_purelib', 'install_platlib',
+ 'install_lib', 'install_headers',
+ 'install_scripts', 'install_data'])
+
+ def convert_paths(self, *names):
+ """Call `convert_path` over `names`."""
+ for name in names:
+ attr = "install_" + name
+ setattr(self, attr, convert_path(getattr(self, attr)))
+
+ def handle_extra_path(self):
+ """Set `path_file` and `extra_dirs` using `extra_path`."""
+ if self.extra_path is None:
+ self.extra_path = self.distribution.extra_path
+
+ if self.extra_path is not None:
+ if isinstance(self.extra_path, str):
+ self.extra_path = self.extra_path.split(',')
+
+ if len(self.extra_path) == 1:
+ path_file = extra_dirs = self.extra_path[0]
+ elif len(self.extra_path) == 2:
+ path_file, extra_dirs = self.extra_path
+ else:
+ raise PackagingOptionError(
+ "'extra_path' option must be a list, tuple, or "
+ "comma-separated string with 1 or 2 elements")
+
+ # convert to local form in case Unix notation used (as it
+ # should be in setup scripts)
+ extra_dirs = convert_path(extra_dirs)
+ else:
+ path_file = None
+ extra_dirs = ''
+
+ # XXX should we warn if path_file and not extra_dirs? (in which
+ # case the path file would be harmless but pointless)
+ self.path_file = path_file
+ self.extra_dirs = extra_dirs
+
+ def change_roots(self, *names):
+ """Change the install direcories pointed by name using root."""
+ for name in names:
+ attr = "install_" + name
+ setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+ def create_user_dirs(self):
+ """Create directories under USERBASE as needed."""
+ home = convert_path(os.path.expanduser("~"))
+ for name, path in self.config_vars.items():
+ if path.startswith(home) and not os.path.isdir(path):
+ os.makedirs(path, 0o700)
+
+ # -- Command execution methods -------------------------------------
+
+ def run(self):
+ """Runs the command."""
+ # Obviously have to build before we can install
+ if not self.skip_build:
+ self.run_command('build')
+ # If we built for any other platform, we can't install.
+ build_plat = self.distribution.get_command_obj('build').plat_name
+ # check warn_dir - it is a clue that the 'install_dist' is happening
+ # internally, and not to sys.path, so we don't check the platform
+ # matches what we are running.
+ if self.warn_dir and build_plat != get_platform():
+ raise PackagingPlatformError("Can't install when "
+ "cross-compiling")
+
+ # Run all sub-commands (at least those that need to be run)
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ if self.path_file:
+ self.create_path_file()
+
+ # write list of installed files, if requested.
+ if self.record:
+ outputs = self.get_outputs()
+ if self.root: # strip any package prefix
+ root_len = len(self.root)
+ for counter in range(len(outputs)):
+ outputs[counter] = outputs[counter][root_len:]
+ self.execute(write_file,
+ (self.record, outputs),
+ "writing list of installed files to '%s'" %
+ self.record)
+
+ normpath, normcase = os.path.normpath, os.path.normcase
+ sys_path = [normcase(normpath(p)) for p in sys.path]
+ install_lib = normcase(normpath(self.install_lib))
+ if (self.warn_dir and
+ not (self.path_file and self.install_path_file) and
+ install_lib not in sys_path):
+ logger.debug(("modules installed to '%s', which is not in "
+ "Python's module search path (sys.path) -- "
+ "you'll have to change the search path yourself"),
+ self.install_lib)
+
+ def create_path_file(self):
+ """Creates the .pth file"""
+ filename = os.path.join(self.install_libbase,
+ self.path_file + ".pth")
+ if self.install_path_file:
+ self.execute(write_file,
+ (filename, [self.extra_dirs]),
+ "creating %s" % filename)
+ else:
+ logger.warning('%s: path file %r not created',
+ self.get_command_name(), filename)
+
+ # -- Reporting methods ---------------------------------------------
+
+ def get_outputs(self):
+ """Assembles the outputs of all the sub-commands."""
+ outputs = []
+ for cmd_name in self.get_sub_commands():
+ cmd = self.get_finalized_command(cmd_name)
+ # Add the contents of cmd.get_outputs(), ensuring
+ # that outputs doesn't contain duplicate entries
+ for filename in cmd.get_outputs():
+ if filename not in outputs:
+ outputs.append(filename)
+
+ if self.path_file and self.install_path_file:
+ outputs.append(os.path.join(self.install_libbase,
+ self.path_file + ".pth"))
+
+ return outputs
+
+ def get_inputs(self):
+ """Returns the inputs of all the sub-commands"""
+ # XXX gee, this looks familiar ;-(
+ inputs = []
+ for cmd_name in self.get_sub_commands():
+ cmd = self.get_finalized_command(cmd_name)
+ inputs.extend(cmd.get_inputs())
+
+ return inputs
+
+ # -- Predicates for sub-command list -------------------------------
+
+ def has_lib(self):
+ """Returns true if the current distribution has any Python
+ modules to install."""
+ return (self.distribution.has_pure_modules() or
+ self.distribution.has_ext_modules())
+
+ def has_headers(self):
+ """Returns true if the current distribution has any headers to
+ install."""
+ return self.distribution.has_headers()
+
+ def has_scripts(self):
+ """Returns true if the current distribution has any scripts to.
+ install."""
+ return self.distribution.has_scripts()
+
+ def has_data(self):
+ """Returns true if the current distribution has any data to.
+ install."""
+ return self.distribution.has_data_files()
+
+ # 'sub_commands': a list of commands this command might have to run to
+ # get its work done. See cmd.py for more info.
+ sub_commands = [('install_lib', has_lib),
+ ('install_headers', has_headers),
+ ('install_scripts', has_scripts),
+ ('install_data', has_data),
+ # keep install_distinfo last, as it needs the record
+ # with files to be completely generated
+ ('install_distinfo', lambda self: not self.no_distinfo),
+ ]
diff --git a/Lib/packaging/command/install_distinfo.py b/Lib/packaging/command/install_distinfo.py
new file mode 100644
index 0000000..b49729f
--- /dev/null
+++ b/Lib/packaging/command/install_distinfo.py
@@ -0,0 +1,143 @@
+"""Create the PEP 376-compliant .dist-info directory."""
+
+# Forked from the former install_egg_info command by Josip Djolonga
+
+import os
+import csv
+import hashlib
+from shutil import rmtree
+
+from packaging import logger
+from packaging.command.cmd import Command
+
+
+class install_distinfo(Command):
+
+ description = 'create a .dist-info directory for the distribution'
+
+ user_options = [
+ ('install-dir=', None,
+ "directory where the the .dist-info directory will be created"),
+ ('installer=', None,
+ "the name of the installer"),
+ ('requested', None,
+ "generate a REQUESTED file"),
+ ('no-requested', None,
+ "do not generate a REQUESTED file"),
+ ('no-record', None,
+ "do not generate a RECORD file"),
+ ('no-resources', None,
+ "do not generate a RESOURCES file"),
+ ]
+
+ boolean_options = ['requested', 'no-record', 'no-resources']
+
+ negative_opt = {'no-requested': 'requested'}
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.installer = None
+ self.requested = None
+ self.no_record = None
+ self.no_resources = None
+ self.outfiles = []
+
+ def finalize_options(self):
+ self.set_undefined_options('install_dist',
+ 'installer', 'requested', 'no_record')
+
+ self.set_undefined_options('install_lib', 'install_dir')
+
+ if self.installer is None:
+ # FIXME distutils or packaging?
+ # + document default in the option help text above and in install
+ self.installer = 'distutils'
+ if self.requested is None:
+ self.requested = True
+ if self.no_record is None:
+ self.no_record = False
+ if self.no_resources is None:
+ self.no_resources = False
+
+ metadata = self.distribution.metadata
+
+ basename = metadata.get_fullname(filesafe=True) + ".dist-info"
+
+ self.install_dir = os.path.join(self.install_dir, basename)
+
+ def run(self):
+ target = self.install_dir
+
+ if os.path.isdir(target) and not os.path.islink(target):
+ if not self.dry_run:
+ rmtree(target)
+ elif os.path.exists(target):
+ self.execute(os.unlink, (self.install_dir,),
+ "removing " + target)
+
+ self.execute(os.makedirs, (target,), "creating " + target)
+
+ metadata_path = os.path.join(self.install_dir, 'METADATA')
+ self.execute(self.distribution.metadata.write, (metadata_path,),
+ "creating " + metadata_path)
+ self.outfiles.append(metadata_path)
+
+ installer_path = os.path.join(self.install_dir, 'INSTALLER')
+ logger.info('creating %s', installer_path)
+ if not self.dry_run:
+ with open(installer_path, 'w') as f:
+ f.write(self.installer)
+ self.outfiles.append(installer_path)
+
+ if self.requested:
+ requested_path = os.path.join(self.install_dir, 'REQUESTED')
+ logger.info('creating %s', requested_path)
+ if not self.dry_run:
+ open(requested_path, 'wb').close()
+ self.outfiles.append(requested_path)
+
+ if not self.no_resources:
+ install_data = self.get_finalized_command('install_data')
+ if install_data.get_resources_out() != []:
+ resources_path = os.path.join(self.install_dir,
+ 'RESOURCES')
+ logger.info('creating %s', resources_path)
+ if not self.dry_run:
+ with open(resources_path, 'w') as f:
+ writer = csv.writer(f, delimiter=',',
+ lineterminator='\n',
+ quotechar='"')
+ for row in install_data.get_resources_out():
+ writer.writerow(row)
+
+ self.outfiles.append(resources_path)
+
+ if not self.no_record:
+ record_path = os.path.join(self.install_dir, 'RECORD')
+ logger.info('creating %s', record_path)
+ if not self.dry_run:
+ with open(record_path, 'w', encoding='utf-8') as f:
+ writer = csv.writer(f, delimiter=',',
+ lineterminator='\n',
+ quotechar='"')
+
+ install = self.get_finalized_command('install_dist')
+
+ for fpath in install.get_outputs():
+ if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
+ # do not put size and md5 hash, as in PEP-376
+ writer.writerow((fpath, '', ''))
+ else:
+ size = os.path.getsize(fpath)
+ with open(fpath, 'rb') as fp:
+ hash = hashlib.md5()
+ hash.update(fp.read())
+ md5sum = hash.hexdigest()
+ writer.writerow((fpath, md5sum, size))
+
+ # add the RECORD file itself
+ writer.writerow((record_path, '', ''))
+ self.outfiles.append(record_path)
+
+ def get_outputs(self):
+ return self.outfiles
diff --git a/Lib/packaging/command/install_headers.py b/Lib/packaging/command/install_headers.py
new file mode 100644
index 0000000..e043d6b
--- /dev/null
+++ b/Lib/packaging/command/install_headers.py
@@ -0,0 +1,43 @@
+"""Install C/C++ header files to the Python include directory."""
+
+from packaging.command.cmd import Command
+
+
+# XXX force is never used
+class install_headers(Command):
+
+ description = "install C/C++ header files"
+
+ user_options = [('install-dir=', 'd',
+ "directory to install header files to"),
+ ('force', 'f',
+ "force installation (overwrite existing files)"),
+ ]
+
+ boolean_options = ['force']
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.force = False
+ self.outfiles = []
+
+ def finalize_options(self):
+ self.set_undefined_options('install_dist',
+ ('install_headers', 'install_dir'),
+ 'force')
+
+ def run(self):
+ headers = self.distribution.headers
+ if not headers:
+ return
+
+ self.mkpath(self.install_dir)
+ for header in headers:
+ out = self.copy_file(header, self.install_dir)[0]
+ self.outfiles.append(out)
+
+ def get_inputs(self):
+ return self.distribution.headers or []
+
+ def get_outputs(self):
+ return self.outfiles
diff --git a/Lib/packaging/command/install_lib.py b/Lib/packaging/command/install_lib.py
new file mode 100644
index 0000000..ffc5d45
--- /dev/null
+++ b/Lib/packaging/command/install_lib.py
@@ -0,0 +1,188 @@
+"""Install all modules (extensions and pure Python)."""
+
+import os
+import imp
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError
+
+
+# Extension for Python source files.
+# XXX dead code? most of the codebase checks for literal '.py'
+if hasattr(os, 'extsep'):
+ PYTHON_SOURCE_EXTENSION = os.extsep + "py"
+else:
+ PYTHON_SOURCE_EXTENSION = ".py"
+
+
+class install_lib(Command):
+
+ description = "install all modules (extensions and pure Python)"
+
+ # The options for controlling byte compilation are two independent sets:
+ # 'compile' is strictly boolean, and only decides whether to
+ # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
+ # decides both whether to generate .pyo files and what level of
+ # optimization to use.
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install to"),
+ ('build-dir=', 'b', "build directory (where to install from)"),
+ ('force', 'f', "force installation (overwrite existing files)"),
+ ('compile', 'c', "compile .py to .pyc [default]"),
+ ('no-compile', None, "don't compile .py files"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('skip-build', None, "skip the build steps"),
+ ]
+
+ boolean_options = ['force', 'compile', 'skip-build']
+
+ negative_opt = {'no-compile': 'compile'}
+
+ def initialize_options(self):
+ # let the 'install_dist' command dictate our installation directory
+ self.install_dir = None
+ self.build_dir = None
+ self.force = False
+ self.compile = None
+ self.optimize = None
+ self.skip_build = None
+
+ def finalize_options(self):
+ # Get all the information we need to install pure Python modules
+ # from the umbrella 'install_dist' command -- build (source) directory,
+ # install (target) directory, and whether to compile .py files.
+ self.set_undefined_options('install_dist',
+ ('build_lib', 'build_dir'),
+ ('install_lib', 'install_dir'),
+ 'force', 'compile', 'optimize',
+ 'skip_build')
+
+ if self.compile is None:
+ self.compile = True
+ if self.optimize is None:
+ self.optimize = 0
+
+ if not isinstance(self.optimize, int):
+ try:
+ self.optimize = int(self.optimize)
+ if self.optimize not in (0, 1, 2):
+ raise AssertionError
+ except (ValueError, AssertionError):
+ raise PackagingOptionError("optimize must be 0, 1, or 2")
+
+ def run(self):
+ # Make sure we have built everything we need first
+ self.build()
+
+ # Install everything: simply dump the entire contents of the build
+ # directory to the installation directory (that's the beauty of
+ # having a build directory!)
+ outfiles = self.install()
+
+ # (Optionally) compile .py to .pyc and/or .pyo
+ if outfiles is not None and self.distribution.has_pure_modules():
+ # XXX comment from distutils: "This [prefix stripping] is far from
+ # complete, but it should at least generate usable bytecode in RPM
+ # distributions." -> need to find exact requirements for
+ # byte-compiled files and fix it
+ install_root = self.get_finalized_command('install_dist').root
+ self.byte_compile(outfiles, prefix=install_root)
+
+ # -- Top-level worker functions ------------------------------------
+ # (called from 'run()')
+
+ def build(self):
+ if not self.skip_build:
+ if self.distribution.has_pure_modules():
+ self.run_command('build_py')
+ if self.distribution.has_ext_modules():
+ self.run_command('build_ext')
+
+ def install(self):
+ if os.path.isdir(self.build_dir):
+ outfiles = self.copy_tree(self.build_dir, self.install_dir)
+ else:
+ logger.warning(
+ '%s: %r does not exist -- no Python modules to install',
+ self.get_command_name(), self.build_dir)
+ return
+ return outfiles
+
+ # -- Utility methods -----------------------------------------------
+
+ def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
+ if not has_any:
+ return []
+
+ build_cmd = self.get_finalized_command(build_cmd)
+ build_files = build_cmd.get_outputs()
+ build_dir = getattr(build_cmd, cmd_option)
+
+ prefix_len = len(build_dir) + len(os.sep)
+ outputs = []
+ for file in build_files:
+ outputs.append(os.path.join(output_dir, file[prefix_len:]))
+
+ return outputs
+
+ def _bytecode_filenames(self, py_filenames):
+ bytecode_files = []
+ for py_file in py_filenames:
+ # Since build_py handles package data installation, the
+ # list of outputs can contain more than just .py files.
+ # Make sure we only report bytecode for the .py files.
+ ext = os.path.splitext(os.path.normcase(py_file))[1]
+ if ext != PYTHON_SOURCE_EXTENSION:
+ continue
+ if self.compile:
+ bytecode_files.append(imp.cache_from_source(py_file, True))
+ if self.optimize:
+ bytecode_files.append(imp.cache_from_source(py_file, False))
+
+ return bytecode_files
+
+ # -- External interface --------------------------------------------
+ # (called by outsiders)
+
+ def get_outputs(self):
+ """Return the list of files that would be installed if this command
+ were actually run. Not affected by the "dry-run" flag or whether
+ modules have actually been built yet.
+ """
+ pure_outputs = \
+ self._mutate_outputs(self.distribution.has_pure_modules(),
+ 'build_py', 'build_lib',
+ self.install_dir)
+ if self.compile:
+ bytecode_outputs = self._bytecode_filenames(pure_outputs)
+ else:
+ bytecode_outputs = []
+
+ ext_outputs = \
+ self._mutate_outputs(self.distribution.has_ext_modules(),
+ 'build_ext', 'build_lib',
+ self.install_dir)
+
+ return pure_outputs + bytecode_outputs + ext_outputs
+
+ def get_inputs(self):
+ """Get the list of files that are input to this command, ie. the
+ files that get installed as they are named in the build tree.
+ The files in this list correspond one-to-one to the output
+ filenames returned by 'get_outputs()'.
+ """
+ inputs = []
+
+ if self.distribution.has_pure_modules():
+ build_py = self.get_finalized_command('build_py')
+ inputs.extend(build_py.get_outputs())
+
+ if self.distribution.has_ext_modules():
+ build_ext = self.get_finalized_command('build_ext')
+ inputs.extend(build_ext.get_outputs())
+
+ return inputs
diff --git a/Lib/packaging/command/install_scripts.py b/Lib/packaging/command/install_scripts.py
new file mode 100644
index 0000000..cfacbe2
--- /dev/null
+++ b/Lib/packaging/command/install_scripts.py
@@ -0,0 +1,59 @@
+"""Install scripts."""
+
+# Contributed by Bastian Kleineidam
+
+import os
+from packaging.command.cmd import Command
+from packaging import logger
+
+class install_scripts(Command):
+
+ description = "install scripts (Python or otherwise)"
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install scripts to"),
+ ('build-dir=','b', "build directory (where to install from)"),
+ ('force', 'f', "force installation (overwrite existing files)"),
+ ('skip-build', None, "skip the build steps"),
+ ]
+
+ boolean_options = ['force', 'skip-build']
+
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.force = False
+ self.build_dir = None
+ self.skip_build = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build', ('build_scripts', 'build_dir'))
+ self.set_undefined_options('install_dist',
+ ('install_scripts', 'install_dir'),
+ 'force', 'skip_build')
+
+ def run(self):
+ if not self.skip_build:
+ self.run_command('build_scripts')
+
+ if not os.path.exists(self.build_dir):
+ self.outfiles = []
+ return
+
+ self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
+ if os.name == 'posix':
+ # Set the executable bits (owner, group, and world) on
+ # all the scripts we just installed.
+ for file in self.get_outputs():
+ if self.dry_run:
+ logger.info("changing mode of %s", file)
+ else:
+ mode = (os.stat(file).st_mode | 0o555) & 0o7777
+ logger.info("changing mode of %s to %o", file, mode)
+ os.chmod(file, mode)
+
+ def get_inputs(self):
+ return self.distribution.scripts or []
+
+ def get_outputs(self):
+ return self.outfiles or []
diff --git a/Lib/packaging/command/register.py b/Lib/packaging/command/register.py
new file mode 100644
index 0000000..59805f7
--- /dev/null
+++ b/Lib/packaging/command/register.py
@@ -0,0 +1,263 @@
+"""Register a release with a project index."""
+
+# Contributed by Richard Jones
+
+import getpass
+import urllib.error
+import urllib.parse
+import urllib.request
+
+from packaging import logger
+from packaging.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY,
+ DEFAULT_REALM, get_pypirc_path, encode_multipart)
+from packaging.command.cmd import Command
+
+class register(Command):
+
+ description = "register a release with PyPI"
+ user_options = [
+ ('repository=', 'r',
+ "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+ ('show-response', None,
+ "display full response text from server"),
+ ('list-classifiers', None,
+ "list valid Trove classifiers"),
+ ('strict', None ,
+ "stop the registration if the metadata is not fully compliant")
+ ]
+
+ boolean_options = ['show-response', 'list-classifiers', 'strict']
+
+ def initialize_options(self):
+ self.repository = None
+ self.realm = None
+ self.show_response = False
+ self.list_classifiers = False
+ self.strict = False
+
+ def finalize_options(self):
+ if self.repository is None:
+ self.repository = DEFAULT_REPOSITORY
+ if self.realm is None:
+ self.realm = DEFAULT_REALM
+
+ def run(self):
+ self._set_config()
+
+ # Check the package metadata
+ check = self.distribution.get_command_obj('check')
+ if check.strict != self.strict and not check.all:
+ # If check was already run but with different options,
+ # re-run it
+ check.strict = self.strict
+ check.all = True
+ self.distribution.have_run.pop('check', None)
+ self.run_command('check')
+
+ if self.dry_run:
+ self.verify_metadata()
+ elif self.list_classifiers:
+ self.classifiers()
+ else:
+ self.send_metadata()
+
+ def _set_config(self):
+ ''' Reads the configuration file and set attributes.
+ '''
+ config = read_pypirc(self.repository, self.realm)
+ if config != {}:
+ self.username = config['username']
+ self.password = config['password']
+ self.repository = config['repository']
+ self.realm = config['realm']
+ self.has_config = True
+ else:
+ if self.repository not in ('pypi', DEFAULT_REPOSITORY):
+ raise ValueError('%s not found in .pypirc' % self.repository)
+ if self.repository == 'pypi':
+ self.repository = DEFAULT_REPOSITORY
+ self.has_config = False
+
+ def classifiers(self):
+ ''' Fetch the list of classifiers from the server.
+ '''
+ response = urllib.request.urlopen(self.repository+'?:action=list_classifiers')
+ logger.info(response.read())
+
+ def verify_metadata(self):
+ ''' Send the metadata to the package index server to be checked.
+ '''
+ # send the info to the server and report the result
+ code, result = self.post_to_server(self.build_post_data('verify'))
+ logger.info('server response (%s): %s', code, result)
+
+
+ def send_metadata(self):
+ ''' Send the metadata to the package index server.
+
+ Well, do the following:
+ 1. figure who the user is, and then
+ 2. send the data as a Basic auth'ed POST.
+
+ First we try to read the username/password from $HOME/.pypirc,
+ which is a ConfigParser-formatted file with a section
+ [distutils] containing username and password entries (both
+ in clear text). Eg:
+
+ [distutils]
+ index-servers =
+ pypi
+
+ [pypi]
+ username: fred
+ password: sekrit
+
+ Otherwise, to figure who the user is, we offer the user three
+ choices:
+
+ 1. use existing login,
+ 2. register as a new user, or
+ 3. set the password to a random string and email the user.
+
+ '''
+ # TODO factor registration out into another method
+ # TODO use print to print, not logging
+
+ # see if we can short-cut and get the username/password from the
+ # config
+ if self.has_config:
+ choice = '1'
+ username = self.username
+ password = self.password
+ else:
+ choice = 'x'
+ username = password = ''
+
+ # get the user's login info
+ choices = '1 2 3 4'.split()
+ while choice not in choices:
+ logger.info('''\
+We need to know who you are, so please choose either:
+ 1. use your existing login,
+ 2. register as a new user,
+ 3. have the server generate a new password for you (and email it to you), or
+ 4. quit
+Your selection [default 1]: ''')
+
+ choice = input()
+ if not choice:
+ choice = '1'
+ elif choice not in choices:
+ print('Please choose one of the four options!')
+
+ if choice == '1':
+ # get the username and password
+ while not username:
+ username = input('Username: ')
+ while not password:
+ password = getpass.getpass('Password: ')
+
+ # set up the authentication
+ auth = urllib.request.HTTPPasswordMgr()
+ host = urllib.parse.urlparse(self.repository)[1]
+ auth.add_password(self.realm, host, username, password)
+ # send the info to the server and report the result
+ code, result = self.post_to_server(self.build_post_data('submit'),
+ auth)
+ logger.info('Server response (%s): %s', code, result)
+
+ # possibly save the login
+ if code == 200:
+ if self.has_config:
+ # sharing the password in the distribution instance
+ # so the upload command can reuse it
+ self.distribution.password = password
+ else:
+ logger.info(
+ 'I can store your PyPI login so future submissions '
+ 'will be faster.\n(the login will be stored in %s)',
+ get_pypirc_path())
+ choice = 'X'
+ while choice.lower() not in ('y', 'n'):
+ choice = input('Save your login (y/N)?')
+ if not choice:
+ choice = 'n'
+ if choice.lower() == 'y':
+ generate_pypirc(username, password)
+
+ elif choice == '2':
+ data = {':action': 'user'}
+ data['name'] = data['password'] = data['email'] = ''
+ data['confirm'] = None
+ while not data['name']:
+ data['name'] = input('Username: ')
+ while data['password'] != data['confirm']:
+ while not data['password']:
+ data['password'] = getpass.getpass('Password: ')
+ while not data['confirm']:
+ data['confirm'] = getpass.getpass(' Confirm: ')
+ if data['password'] != data['confirm']:
+ data['password'] = ''
+ data['confirm'] = None
+ print("Password and confirm don't match!")
+ while not data['email']:
+ data['email'] = input(' EMail: ')
+ code, result = self.post_to_server(data)
+ if code != 200:
+ logger.info('server response (%s): %s', code, result)
+ else:
+ logger.info('you will receive an email shortly; follow the '
+ 'instructions in it to complete registration.')
+ elif choice == '3':
+ data = {':action': 'password_reset'}
+ data['email'] = ''
+ while not data['email']:
+ data['email'] = input('Your email address: ')
+ code, result = self.post_to_server(data)
+ logger.info('server response (%s): %s', code, result)
+
+ def build_post_data(self, action):
+ # figure the data to send - the metadata plus some additional
+ # information used by the package server
+ data = self.distribution.metadata.todict()
+ data[':action'] = action
+ return data
+
+ # XXX to be refactored with upload.upload_file
+ def post_to_server(self, data, auth=None):
+ ''' Post a query to the server, and return a string response.
+ '''
+ if 'name' in data:
+ logger.info('Registering %s to %s', data['name'], self.repository)
+ # Build up the MIME payload for the urllib2 POST data
+ content_type, body = encode_multipart(data.items(), [])
+
+ # build the Request
+ headers = {
+ 'Content-type': content_type,
+ 'Content-length': str(len(body))
+ }
+ req = urllib.request.Request(self.repository, body, headers)
+
+ # handle HTTP and include the Basic Auth handler
+ opener = urllib.request.build_opener(
+ urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
+ )
+ data = ''
+ try:
+ result = opener.open(req)
+ except urllib.error.HTTPError as e:
+ if self.show_response:
+ data = e.fp.read()
+ result = e.code, e.msg
+ except urllib.error.URLError as e:
+ result = 500, str(e)
+ else:
+ if self.show_response:
+ data = result.read()
+ result = 200, 'OK'
+ if self.show_response:
+ dashes = '-' * 75
+ logger.info('%s%s%s', dashes, data, dashes)
+
+ return result
diff --git a/Lib/packaging/command/sdist.py b/Lib/packaging/command/sdist.py
new file mode 100644
index 0000000..d399981
--- /dev/null
+++ b/Lib/packaging/command/sdist.py
@@ -0,0 +1,347 @@
+"""Create a source distribution."""
+
+import os
+import re
+import sys
+from io import StringIO
+from shutil import get_archive_formats, rmtree
+
+from packaging import logger
+from packaging.util import resolve_name
+from packaging.errors import (PackagingPlatformError, PackagingOptionError,
+ PackagingModuleError, PackagingFileError)
+from packaging.command import get_command_names
+from packaging.command.cmd import Command
+from packaging.manifest import Manifest
+
+
+def show_formats():
+ """Print all possible values for the 'formats' option (used by
+ the "--help-formats" command-line option).
+ """
+ from packaging.fancy_getopt import FancyGetopt
+ formats = sorted(('formats=' + name, None, desc)
+ for name, desc in get_archive_formats())
+ FancyGetopt(formats).print_help(
+ "List of available source distribution formats:")
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w\n', re.M)
+_COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M)
+
+
+class sdist(Command):
+
+ description = "create a source distribution (tarball, zip file, etc.)"
+
+ user_options = [
+ ('manifest=', 'm',
+ "name of manifest file [default: MANIFEST]"),
+ ('use-defaults', None,
+ "include the default file set in the manifest "
+ "[default; disable with --no-defaults]"),
+ ('no-defaults', None,
+ "don't include the default file set"),
+ ('prune', None,
+ "specifically exclude files/directories that should not be "
+ "distributed (build tree, RCS/CVS dirs, etc.) "
+ "[default; disable with --no-prune]"),
+ ('no-prune', None,
+ "don't automatically exclude anything"),
+ ('manifest-only', 'o',
+ "just regenerate the manifest and then stop "),
+ ('formats=', None,
+ "formats for source distribution (comma-separated list)"),
+ ('keep-temp', 'k',
+ "keep the distribution tree around after creating " +
+ "archive file(s)"),
+ ('dist-dir=', 'd',
+ "directory to put the source distribution archive(s) in "
+ "[default: dist]"),
+ ('check-metadata', None,
+ "Ensure that all required elements of metadata "
+ "are supplied. Warn if any missing. [default]"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file [default: current group]"),
+ ('manifest-builders=', None,
+ "manifest builders (comma-separated list)"),
+ ]
+
+ boolean_options = ['use-defaults', 'prune',
+ 'manifest-only', 'keep-temp', 'check-metadata']
+
+ help_options = [
+ ('help-formats', None,
+ "list available distribution formats", show_formats),
+ ]
+
+ negative_opt = {'no-defaults': 'use-defaults',
+ 'no-prune': 'prune'}
+
+ default_format = {'posix': 'gztar',
+ 'nt': 'zip'}
+
+ def initialize_options(self):
+ self.manifest = None
+ # 'use_defaults': if true, we will include the default file set
+ # in the manifest
+ self.use_defaults = True
+ self.prune = True
+ self.manifest_only = False
+ self.formats = None
+ self.keep_temp = False
+ self.dist_dir = None
+
+ self.archive_files = None
+ self.metadata_check = True
+ self.owner = None
+ self.group = None
+ self.filelist = None
+ self.manifest_builders = None
+
+ def _check_archive_formats(self, formats):
+ supported_formats = [name for name, desc in get_archive_formats()]
+ for format in formats:
+ if format not in supported_formats:
+ return format
+ return None
+
+ def finalize_options(self):
+ if self.manifest is None:
+ self.manifest = "MANIFEST"
+
+ self.ensure_string_list('formats')
+ if self.formats is None:
+ try:
+ self.formats = [self.default_format[os.name]]
+ except KeyError:
+ raise PackagingPlatformError("don't know how to create source "
+ "distributions on platform %s" % os.name)
+
+ bad_format = self._check_archive_formats(self.formats)
+ if bad_format:
+ raise PackagingOptionError("unknown archive format '%s'" \
+ % bad_format)
+
+ if self.dist_dir is None:
+ self.dist_dir = "dist"
+
+ if self.filelist is None:
+ self.filelist = Manifest()
+
+ if self.manifest_builders is None:
+ self.manifest_builders = []
+ else:
+ if isinstance(self.manifest_builders, str):
+ self.manifest_builders = self.manifest_builders.split(',')
+ builders = []
+ for builder in self.manifest_builders:
+ builder = builder.strip()
+ if builder == '':
+ continue
+ try:
+ builder = resolve_name(builder)
+ except ImportError as e:
+ raise PackagingModuleError(e)
+
+ builders.append(builder)
+
+ self.manifest_builders = builders
+
+ def run(self):
+ # 'filelist' contains the list of files that will make up the
+ # manifest
+ self.filelist.clear()
+
+ # Check the package metadata
+ if self.metadata_check:
+ self.run_command('check')
+
+ # Do whatever it takes to get the list of files to process
+ # (process the manifest template, read an existing manifest,
+ # whatever). File list is accumulated in 'self.filelist'.
+ self.get_file_list()
+
+ # If user just wanted us to regenerate the manifest, stop now.
+ if self.manifest_only:
+ return
+
+ # Otherwise, go ahead and create the source distribution tarball,
+ # or zipfile, or whatever.
+ self.make_distribution()
+
+ def get_file_list(self):
+ """Figure out the list of files to include in the source
+ distribution, and put it in 'self.filelist'. This might involve
+ reading the manifest template (and writing the manifest), or just
+ reading the manifest, or just using the default file set -- it all
+ depends on the user's options.
+ """
+ template_exists = len(self.distribution.extra_files) > 0
+ if not template_exists:
+ logger.warning('%s: using default file list',
+ self.get_command_name())
+ self.filelist.findall()
+
+ if self.use_defaults:
+ self.add_defaults()
+ if template_exists:
+ template = '\n'.join(self.distribution.extra_files)
+ self.filelist.read_template(StringIO(template))
+
+ # call manifest builders, if any.
+ for builder in self.manifest_builders:
+ builder(self.distribution, self.filelist)
+
+ if self.prune:
+ self.prune_file_list()
+
+ self.filelist.write(self.manifest)
+
+ def add_defaults(self):
+ """Add all default files to self.filelist.
+
+ In addition to the setup.cfg file, this will include all files returned
+ by the get_source_files of every registered command. This will find
+ Python modules and packages, data files listed in package_data_,
+ data_files and extra_files, scripts, C sources of extension modules or
+ C libraries (headers are missing).
+ """
+ if os.path.exists('setup.cfg'):
+ self.filelist.append('setup.cfg')
+ else:
+ logger.warning("%s: standard 'setup.cfg' file not found",
+ self.get_command_name())
+
+ for cmd_name in get_command_names():
+ try:
+ cmd_obj = self.get_finalized_command(cmd_name)
+ except PackagingOptionError:
+ pass
+ else:
+ self.filelist.extend(cmd_obj.get_source_files())
+
+ def prune_file_list(self):
+ """Prune off branches that might slip into the file list as created
+ by 'read_template()', but really don't belong there:
+ * the build tree (typically "build")
+ * the release tree itself (only an issue if we ran "sdist"
+ previously with --keep-temp, or it aborted)
+ * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
+ """
+ build = self.get_finalized_command('build')
+ base_dir = self.distribution.get_fullname()
+
+ self.filelist.exclude_pattern(None, prefix=build.build_base)
+ self.filelist.exclude_pattern(None, prefix=base_dir)
+
+ # pruning out vcs directories
+ # both separators are used under win32
+ if sys.platform == 'win32':
+ seps = r'/|\\'
+ else:
+ seps = '/'
+
+ vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
+ '_darcs']
+ vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
+ self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
+
+ def make_release_tree(self, base_dir, files):
+ """Create the directory tree that will become the source
+ distribution archive. All directories implied by the filenames in
+ 'files' are created under 'base_dir', and then we hard link or copy
+ (if hard linking is unavailable) those files into place.
+ Essentially, this duplicates the developer's source tree, but in a
+ directory named after the distribution, containing only the files
+ to be distributed.
+ """
+ # Create all the directories under 'base_dir' necessary to
+ # put 'files' there; the 'mkpath()' is just so we don't die
+ # if the manifest happens to be empty.
+ self.mkpath(base_dir)
+ self.create_tree(base_dir, files, dry_run=self.dry_run)
+
+ # And walk over the list of files, either making a hard link (if
+ # os.link exists) to each one that doesn't already exist in its
+ # corresponding location under 'base_dir', or copying each file
+ # that's out-of-date in 'base_dir'. (Usually, all files will be
+ # out-of-date, because by default we blow away 'base_dir' when
+ # we're done making the distribution archives.)
+
+ if hasattr(os, 'link'): # can make hard links on this system
+ link = 'hard'
+ msg = "making hard links in %s..." % base_dir
+ else: # nope, have to copy
+ link = None
+ msg = "copying files to %s..." % base_dir
+
+ if not files:
+ logger.warning("no files to distribute -- empty manifest?")
+ else:
+ logger.info(msg)
+
+ for file in self.distribution.metadata.requires_files:
+ if file not in files:
+ msg = "'%s' must be included explicitly in 'extra_files'" \
+ % file
+ raise PackagingFileError(msg)
+
+ for file in files:
+ if not os.path.isfile(file):
+ logger.warning("'%s' not a regular file -- skipping", file)
+ else:
+ dest = os.path.join(base_dir, file)
+ self.copy_file(file, dest, link=link)
+
+ self.distribution.metadata.write(os.path.join(base_dir, 'PKG-INFO'))
+
+ def make_distribution(self):
+ """Create the source distribution(s). First, we create the release
+ tree with 'make_release_tree()'; then, we create all required
+ archive files (according to 'self.formats') from the release tree.
+ Finally, we clean up by blowing away the release tree (unless
+ 'self.keep_temp' is true). The list of archive files created is
+ stored so it can be retrieved later by 'get_archive_files()'.
+ """
+ # Don't warn about missing metadata here -- should be (and is!)
+ # done elsewhere.
+ base_dir = self.distribution.get_fullname()
+ base_name = os.path.join(self.dist_dir, base_dir)
+
+ self.make_release_tree(base_dir, self.filelist.files)
+ archive_files = [] # remember names of files we create
+ # tar archive must be created last to avoid overwrite and remove
+ if 'tar' in self.formats:
+ self.formats.append(self.formats.pop(self.formats.index('tar')))
+
+ for fmt in self.formats:
+ file = self.make_archive(base_name, fmt, base_dir=base_dir,
+ owner=self.owner, group=self.group)
+ archive_files.append(file)
+ self.distribution.dist_files.append(('sdist', '', file))
+
+ self.archive_files = archive_files
+
+ if not self.keep_temp:
+ if self.dry_run:
+ logger.info('removing %s', base_dir)
+ else:
+ rmtree(base_dir)
+
+ def get_archive_files(self):
+ """Return the list of archive files created when the command
+ was run, or None if the command hasn't run yet.
+ """
+ return self.archive_files
+
+ def create_tree(self, base_dir, files, mode=0o777, dry_run=False):
+ need_dir = set()
+ for file in files:
+ need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
+
+ # Now create them
+ for dir in sorted(need_dir):
+ self.mkpath(dir, mode, dry_run=dry_run)
diff --git a/Lib/packaging/command/test.py b/Lib/packaging/command/test.py
new file mode 100644
index 0000000..4d5348f
--- /dev/null
+++ b/Lib/packaging/command/test.py
@@ -0,0 +1,80 @@
+"""Run the project's test suite."""
+
+import os
+import sys
+import logging
+import unittest
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.database import get_distribution
+from packaging.errors import PackagingOptionError
+from packaging.util import resolve_name
+
+
+class test(Command):
+
+ description = "run the project's test suite"
+
+ user_options = [
+ ('suite=', 's',
+ "test suite to run (for example: 'some_module.test_suite')"),
+ ('runner=', None,
+ "test runner to be called."),
+ ('tests-require=', None,
+ "list of distributions required to run the test suite."),
+ ]
+
+ def initialize_options(self):
+ self.suite = None
+ self.runner = None
+ self.tests_require = []
+
+ def finalize_options(self):
+ self.build_lib = self.get_finalized_command("build").build_lib
+ for requirement in self.tests_require:
+ if get_distribution(requirement) is None:
+ logger.warning("test dependency %s is not installed, "
+ "tests may fail", requirement)
+ if (not self.suite and not self.runner and
+ self.get_ut_with_discovery() is None):
+ raise PackagingOptionError(
+ "no test discovery available, please give a 'suite' or "
+ "'runner' option or install unittest2")
+
+ def get_ut_with_discovery(self):
+ if hasattr(unittest.TestLoader, "discover"):
+ return unittest
+ else:
+ try:
+ import unittest2
+ return unittest2
+ except ImportError:
+ return None
+
+ def run(self):
+ prev_syspath = sys.path[:]
+ try:
+ # build release
+ build = self.reinitialize_command('build')
+ self.run_command('build')
+ sys.path.insert(0, build.build_lib)
+
+ # XXX maybe we could pass the verbose argument of pysetup here
+ logger = logging.getLogger('packaging')
+ verbose = logger.getEffectiveLevel() >= logging.DEBUG
+ verbosity = verbose + 1
+
+ # run the tests
+ if self.runner:
+ resolve_name(self.runner)()
+ elif self.suite:
+ runner = unittest.TextTestRunner(verbosity=verbosity)
+ runner.run(resolve_name(self.suite)())
+ elif self.get_ut_with_discovery():
+ ut = self.get_ut_with_discovery()
+ test_suite = ut.TestLoader().discover(os.curdir)
+ runner = ut.TextTestRunner(verbosity=verbosity)
+ runner.run(test_suite)
+ finally:
+ sys.path[:] = prev_syspath
diff --git a/Lib/packaging/command/upload.py b/Lib/packaging/command/upload.py
new file mode 100644
index 0000000..f56d2c6
--- /dev/null
+++ b/Lib/packaging/command/upload.py
@@ -0,0 +1,168 @@
+"""Upload a distribution to a project index."""
+
+import os
+import socket
+import logging
+import platform
+import urllib.parse
+from base64 import standard_b64encode
+from hashlib import md5
+from urllib.error import HTTPError
+from urllib.request import urlopen, Request
+
+from packaging import logger
+from packaging.errors import PackagingOptionError
+from packaging.util import (spawn, read_pypirc, DEFAULT_REPOSITORY,
+ DEFAULT_REALM, encode_multipart)
+from packaging.command.cmd import Command
+
+
+class upload(Command):
+
+ description = "upload distribution to PyPI"
+
+ user_options = [
+ ('repository=', 'r',
+ "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+ ('show-response', None,
+ "display full response text from server"),
+ ('sign', 's',
+ "sign files to upload using gpg"),
+ ('identity=', 'i',
+ "GPG identity used to sign files"),
+ ('upload-docs', None,
+ "upload documentation too"),
+ ]
+
+ boolean_options = ['show-response', 'sign']
+
+ def initialize_options(self):
+ self.repository = None
+ self.realm = None
+ self.show_response = False
+ self.username = ''
+ self.password = ''
+ self.show_response = False
+ self.sign = False
+ self.identity = None
+ self.upload_docs = False
+
+ def finalize_options(self):
+ if self.repository is None:
+ self.repository = DEFAULT_REPOSITORY
+ if self.realm is None:
+ self.realm = DEFAULT_REALM
+ if self.identity and not self.sign:
+ raise PackagingOptionError(
+ "Must use --sign for --identity to have meaning")
+ config = read_pypirc(self.repository, self.realm)
+ if config != {}:
+ self.username = config['username']
+ self.password = config['password']
+ self.repository = config['repository']
+ self.realm = config['realm']
+
+ # getting the password from the distribution
+ # if previously set by the register command
+ if not self.password and self.distribution.password:
+ self.password = self.distribution.password
+
+ def run(self):
+ if not self.distribution.dist_files:
+ raise PackagingOptionError(
+ "No dist file created in earlier command")
+ for command, pyversion, filename in self.distribution.dist_files:
+ self.upload_file(command, pyversion, filename)
+ if self.upload_docs:
+ upload_docs = self.get_finalized_command("upload_docs")
+ upload_docs.repository = self.repository
+ upload_docs.username = self.username
+ upload_docs.password = self.password
+ upload_docs.run()
+
+ # XXX to be refactored with register.post_to_server
+ def upload_file(self, command, pyversion, filename):
+ # Makes sure the repository URL is compliant
+ scheme, netloc, url, params, query, fragments = \
+ urllib.parse.urlparse(self.repository)
+ if params or query or fragments:
+ raise AssertionError("Incompatible url %s" % self.repository)
+
+ if scheme not in ('http', 'https'):
+ raise AssertionError("unsupported scheme " + scheme)
+
+ # Sign if requested
+ if self.sign:
+ gpg_args = ["gpg", "--detach-sign", "-a", filename]
+ if self.identity:
+ gpg_args[2:2] = ["--local-user", self.identity]
+ spawn(gpg_args,
+ dry_run=self.dry_run)
+
+ # Fill in the data - send all the metadata in case we need to
+ # register a new release
+ with open(filename, 'rb') as f:
+ content = f.read()
+
+ data = self.distribution.metadata.todict()
+
+ # extra upload infos
+ data[':action'] = 'file_upload'
+ data['protcol_version'] = '1'
+ data['content'] = (os.path.basename(filename), content)
+ data['filetype'] = command
+ data['pyversion'] = pyversion
+ data['md5_digest'] = md5(content).hexdigest()
+
+ if command == 'bdist_dumb':
+ data['comment'] = 'built for %s' % platform.platform(terse=True)
+
+ if self.sign:
+ with open(filename + '.asc') as fp:
+ sig = fp.read()
+ data['gpg_signature'] = [
+ (os.path.basename(filename) + ".asc", sig)]
+
+ # set up the authentication
+ # The exact encoding of the authentication string is debated.
+ # Anyway PyPI only accepts ascii for both username or password.
+ user_pass = (self.username + ":" + self.password).encode('ascii')
+ auth = b"Basic " + standard_b64encode(user_pass)
+
+ # Build up the MIME payload for the POST data
+ files = []
+ for key in ('content', 'gpg_signature'):
+ if key in data:
+ filename_, value = data.pop(key)
+ files.append((key, filename_, value))
+
+ content_type, body = encode_multipart(data.items(), files)
+
+ logger.info("Submitting %s to %s", filename, self.repository)
+
+ # build the Request
+ headers = {'Content-type': content_type,
+ 'Content-length': str(len(body)),
+ 'Authorization': auth}
+
+ request = Request(self.repository, body, headers)
+ # send the data
+ try:
+ result = urlopen(request)
+ status = result.code
+ reason = result.msg
+ except socket.error as e:
+ logger.error(e)
+ return
+ except HTTPError as e:
+ status = e.code
+ reason = e.msg
+
+ if status == 200:
+ logger.info('Server response (%s): %s', status, reason)
+ else:
+ logger.error('Upload failed (%s): %s', status, reason)
+
+ if self.show_response and logger.isEnabledFor(logging.INFO):
+ sep = '-' * 75
+ logger.info('%s\n%s\n%s', sep, result.read().decode(), sep)
diff --git a/Lib/packaging/command/upload_docs.py b/Lib/packaging/command/upload_docs.py
new file mode 100644
index 0000000..30e37b5
--- /dev/null
+++ b/Lib/packaging/command/upload_docs.py
@@ -0,0 +1,131 @@
+"""Upload HTML documentation to a project index."""
+
+import os
+import base64
+import socket
+import zipfile
+import logging
+import http.client
+import urllib.parse
+from io import BytesIO
+
+from packaging import logger
+from packaging.util import (read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM,
+ encode_multipart)
+from packaging.errors import PackagingFileError
+from packaging.command.cmd import Command
+
+
+def zip_dir(directory):
+ """Compresses recursively contents of directory into a BytesIO object"""
+ destination = BytesIO()
+ with zipfile.ZipFile(destination, "w") as zip_file:
+ for root, dirs, files in os.walk(directory):
+ for name in files:
+ full = os.path.join(root, name)
+ relative = root[len(directory):].lstrip(os.path.sep)
+ dest = os.path.join(relative, name)
+ zip_file.write(full, dest)
+ return destination
+
+
+class upload_docs(Command):
+
+ description = "upload HTML documentation to PyPI"
+
+ user_options = [
+ ('repository=', 'r',
+ "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+ ('show-response', None,
+ "display full response text from server"),
+ ('upload-dir=', None,
+ "directory to upload"),
+ ]
+
+ def initialize_options(self):
+ self.repository = None
+ self.realm = None
+ self.show_response = False
+ self.upload_dir = None
+ self.username = ''
+ self.password = ''
+
+ def finalize_options(self):
+ if self.repository is None:
+ self.repository = DEFAULT_REPOSITORY
+ if self.realm is None:
+ self.realm = DEFAULT_REALM
+ if self.upload_dir is None:
+ build = self.get_finalized_command('build')
+ self.upload_dir = os.path.join(build.build_base, "docs")
+ if not os.path.isdir(self.upload_dir):
+ self.upload_dir = os.path.join(build.build_base, "doc")
+ logger.info('Using upload directory %s', self.upload_dir)
+ self.verify_upload_dir(self.upload_dir)
+ config = read_pypirc(self.repository, self.realm)
+ if config != {}:
+ self.username = config['username']
+ self.password = config['password']
+ self.repository = config['repository']
+ self.realm = config['realm']
+
+ def verify_upload_dir(self, upload_dir):
+ self.ensure_dirname('upload_dir')
+ index_location = os.path.join(upload_dir, "index.html")
+ if not os.path.exists(index_location):
+ mesg = "No 'index.html found in docs directory (%s)"
+ raise PackagingFileError(mesg % upload_dir)
+
+ def run(self):
+ name = self.distribution.metadata['Name']
+ version = self.distribution.metadata['Version']
+ zip_file = zip_dir(self.upload_dir)
+
+ fields = [(':action', 'doc_upload'),
+ ('name', name), ('version', version)]
+ files = [('content', name, zip_file.getvalue())]
+ content_type, body = encode_multipart(fields, files)
+
+ credentials = self.username + ':' + self.password
+ # FIXME should use explicit encoding
+ auth = b"Basic " + base64.encodebytes(credentials.encode()).strip()
+
+ logger.info("Submitting documentation to %s", self.repository)
+
+ scheme, netloc, url, params, query, fragments = urllib.parse.urlparse(
+ self.repository)
+ if scheme == "http":
+ conn = http.client.HTTPConnection(netloc)
+ elif scheme == "https":
+ conn = http.client.HTTPSConnection(netloc)
+ else:
+ raise AssertionError("unsupported scheme %r" % scheme)
+
+ try:
+ conn.connect()
+ conn.putrequest("POST", url)
+ conn.putheader('Content-type', content_type)
+ conn.putheader('Content-length', str(len(body)))
+ conn.putheader('Authorization', auth)
+ conn.endheaders()
+ conn.send(body)
+
+ except socket.error as e:
+ logger.error(e)
+ return
+
+ r = conn.getresponse()
+
+ if r.status == 200:
+ logger.info('Server response (%s): %s', r.status, r.reason)
+ elif r.status == 301:
+ location = r.getheader('Location')
+ if location is None:
+ location = 'http://packages.python.org/%s/' % name
+ logger.info('Upload successful. Visit %s', location)
+ else:
+ logger.error('Upload failed (%s): %s', r.status, r.reason)
+
+ if self.show_response and logger.isEnabledFor(logging.INFO):
+ sep = '-' * 75
+ logger.info('%s\n%s\n%s', sep, r.read().decode('utf-8'), sep)
diff --git a/Lib/packaging/command/wininst-10.0-amd64.exe b/Lib/packaging/command/wininst-10.0-amd64.exe
new file mode 100644
index 0000000..11f98cd
--- /dev/null
+++ b/Lib/packaging/command/wininst-10.0-amd64.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-10.0.exe b/Lib/packaging/command/wininst-10.0.exe
new file mode 100644
index 0000000..8ac6e19
--- /dev/null
+++ b/Lib/packaging/command/wininst-10.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-6.0.exe b/Lib/packaging/command/wininst-6.0.exe
new file mode 100644
index 0000000..f57c855
--- /dev/null
+++ b/Lib/packaging/command/wininst-6.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-7.1.exe b/Lib/packaging/command/wininst-7.1.exe
new file mode 100644
index 0000000..1433bc1
--- /dev/null
+++ b/Lib/packaging/command/wininst-7.1.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-8.0.exe b/Lib/packaging/command/wininst-8.0.exe
new file mode 100644
index 0000000..7403bfa
--- /dev/null
+++ b/Lib/packaging/command/wininst-8.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-9.0-amd64.exe b/Lib/packaging/command/wininst-9.0-amd64.exe
new file mode 100644
index 0000000..11d8011
--- /dev/null
+++ b/Lib/packaging/command/wininst-9.0-amd64.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-9.0.exe b/Lib/packaging/command/wininst-9.0.exe
new file mode 100644
index 0000000..dadb31d
--- /dev/null
+++ b/Lib/packaging/command/wininst-9.0.exe
Binary files differ
diff --git a/Lib/packaging/compat.py b/Lib/packaging/compat.py
new file mode 100644
index 0000000..bfce92d
--- /dev/null
+++ b/Lib/packaging/compat.py
@@ -0,0 +1,50 @@
+"""Support for build-time 2to3 conversion."""
+
+from packaging import logger
+
+
+# XXX Having two classes with the same name is not a good thing.
+# XXX 2to3-related code should move from util to this module
+
+try:
+ from packaging.util import Mixin2to3 as _Mixin2to3
+ _CONVERT = True
+ _KLASS = _Mixin2to3
+except ImportError:
+ _CONVERT = False
+ _KLASS = object
+
+__all__ = ['Mixin2to3']
+
+
+class Mixin2to3(_KLASS):
+ """ The base class which can be used for refactoring. When run under
+ Python 3.0, the run_2to3 method provided by Mixin2to3 is overridden.
+ When run on Python 2.x, it merely creates a class which overrides run_2to3,
+ yet does nothing in particular with it.
+ """
+ if _CONVERT:
+
+ def _run_2to3(self, files=[], doctests=[], fixers=[]):
+ """ Takes a list of files and doctests, and performs conversion
+ on those.
+ - First, the files which contain the code(`files`) are converted.
+ - Second, the doctests in `files` are converted.
+ - Thirdly, the doctests in `doctests` are converted.
+ """
+ if fixers:
+ self.fixer_names = fixers
+
+ if files:
+ logger.info('converting Python code and doctests')
+ _KLASS.run_2to3(self, files)
+ _KLASS.run_2to3(self, files, doctests_only=True)
+
+ if doctests:
+ logger.info('converting doctests in text files')
+ _KLASS.run_2to3(self, doctests, doctests_only=True)
+ else:
+ # If run on Python 2.x, there is nothing to do.
+
+ def _run_2to3(self, files=[], doctests=[], fixers=[]):
+ pass
diff --git a/Lib/packaging/compiler/__init__.py b/Lib/packaging/compiler/__init__.py
new file mode 100644
index 0000000..d8e02ce
--- /dev/null
+++ b/Lib/packaging/compiler/__init__.py
@@ -0,0 +1,274 @@
+"""Compiler abstraction model used by packaging.
+
+An abstract base class is defined in the ccompiler submodule, and
+concrete implementations suitable for various platforms are defined in
+the other submodules. The extension module is also placed in this
+package.
+
+In general, code should not instantiate compiler classes directly but
+use the new_compiler and customize_compiler functions provided in this
+module.
+
+The compiler system has a registration API: get_default_compiler,
+set_compiler, show_compilers.
+"""
+
+import os
+import sys
+import re
+import sysconfig
+
+from packaging.util import resolve_name
+from packaging.errors import PackagingPlatformError
+from packaging import logger
+
+def customize_compiler(compiler):
+ """Do any platform-specific customization of a CCompiler instance.
+
+ Mainly needed on Unix, so we can plug in the information that
+ varies across Unices and is stored in Python's Makefile.
+ """
+ if compiler.name == "unix":
+ cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags = (
+ sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+ 'CCSHARED', 'LDSHARED', 'SO', 'AR',
+ 'ARFLAGS'))
+
+ if 'CC' in os.environ:
+ cc = os.environ['CC']
+ if 'CXX' in os.environ:
+ cxx = os.environ['CXX']
+ if 'LDSHARED' in os.environ:
+ ldshared = os.environ['LDSHARED']
+ if 'CPP' in os.environ:
+ cpp = os.environ['CPP']
+ else:
+ cpp = cc + " -E" # not always
+ if 'LDFLAGS' in os.environ:
+ ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+ if 'CFLAGS' in os.environ:
+ cflags = opt + ' ' + os.environ['CFLAGS']
+ ldshared = ldshared + ' ' + os.environ['CFLAGS']
+ if 'CPPFLAGS' in os.environ:
+ cpp = cpp + ' ' + os.environ['CPPFLAGS']
+ cflags = cflags + ' ' + os.environ['CPPFLAGS']
+ ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+ if 'AR' in os.environ:
+ ar = os.environ['AR']
+ if 'ARFLAGS' in os.environ:
+ archiver = ar + ' ' + os.environ['ARFLAGS']
+ else:
+ if ar_flags is not None:
+ archiver = ar + ' ' + ar_flags
+ else:
+ # see if its the proper default value
+ # mmm I don't want to backport the makefile
+ archiver = ar + ' rc'
+
+ cc_cmd = cc + ' ' + cflags
+ compiler.set_executables(
+ preprocessor=cpp,
+ compiler=cc_cmd,
+ compiler_so=cc_cmd + ' ' + ccshared,
+ compiler_cxx=cxx,
+ linker_so=ldshared,
+ linker_exe=cc,
+ archiver=archiver)
+
+ compiler.shared_lib_extension = so_ext
+
+
+# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
+# type for that platform. Keys are interpreted as re match
+# patterns. Order is important; platform mappings are preferred over
+# OS names.
+_default_compilers = (
+ # Platform string mappings
+
+ # on a cygwin built python we can use gcc like an ordinary UNIXish
+ # compiler
+ ('cygwin.*', 'unix'),
+
+ # OS name mappings
+ ('posix', 'unix'),
+ ('nt', 'msvc'),
+)
+
+def get_default_compiler(osname=None, platform=None):
+ """ Determine the default compiler to use for the given platform.
+
+ osname should be one of the standard Python OS names (i.e. the
+ ones returned by os.name) and platform the common value
+ returned by sys.platform for the platform in question.
+
+ The default values are os.name and sys.platform in case the
+ parameters are not given.
+
+ """
+ if osname is None:
+ osname = os.name
+ if platform is None:
+ platform = sys.platform
+ for pattern, compiler in _default_compilers:
+ if re.match(pattern, platform) is not None or \
+ re.match(pattern, osname) is not None:
+ return compiler
+ # Defaults to Unix compiler
+ return 'unix'
+
+
+# compiler mapping
+# XXX useful to expose them? (i.e. get_compiler_names)
+_COMPILERS = {
+ 'unix': 'packaging.compiler.unixccompiler.UnixCCompiler',
+ 'msvc': 'packaging.compiler.msvccompiler.MSVCCompiler',
+ 'cygwin': 'packaging.compiler.cygwinccompiler.CygwinCCompiler',
+ 'mingw32': 'packaging.compiler.cygwinccompiler.Mingw32CCompiler',
+ 'bcpp': 'packaging.compiler.bcppcompiler.BCPPCompiler',
+}
+
+def set_compiler(location):
+ """Add or change a compiler"""
+ cls = resolve_name(location)
+ # XXX we want to check the class here
+ _COMPILERS[cls.name] = cls
+
+
+def show_compilers():
+ """Print list of available compilers (used by the "--help-compiler"
+ options to "build", "build_ext", "build_clib").
+ """
+ from packaging.fancy_getopt import FancyGetopt
+ compilers = []
+
+ for name, cls in _COMPILERS.items():
+ if isinstance(cls, str):
+ cls = resolve_name(cls)
+ _COMPILERS[name] = cls
+
+ compilers.append(("compiler=" + name, None, cls.description))
+
+ compilers.sort()
+ pretty_printer = FancyGetopt(compilers)
+ pretty_printer.print_help("List of available compilers:")
+
+
+def new_compiler(plat=None, compiler=None, dry_run=False, force=False):
+ """Generate an instance of some CCompiler subclass for the supplied
+ platform/compiler combination. 'plat' defaults to 'os.name'
+ (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
+ for that platform. Currently only 'posix' and 'nt' are supported, and
+ the default compilers are "traditional Unix interface" (UnixCCompiler
+ class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
+ possible to ask for a Unix compiler object under Windows, and a
+ Microsoft compiler object under Unix -- if you supply a value for
+ 'compiler', 'plat' is ignored.
+ """
+ if plat is None:
+ plat = os.name
+
+ try:
+ if compiler is None:
+ compiler = get_default_compiler(plat)
+
+ cls = _COMPILERS[compiler]
+ except KeyError:
+ msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+ if compiler is not None:
+ msg = msg + " with '%s' compiler" % compiler
+ raise PackagingPlatformError(msg)
+
+ if isinstance(cls, str):
+ cls = resolve_name(cls)
+ _COMPILERS[compiler] = cls
+
+ return cls(dry_run, force)
+
+
+def gen_preprocess_options(macros, include_dirs):
+ """Generate C pre-processor options (-D, -U, -I) as used by at least
+ two types of compilers: the typical Unix compiler and Visual C++.
+ 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
+ means undefine (-U) macro 'name', and (name,value) means define (-D)
+ macro 'name' to 'value'. 'include_dirs' is just a list of directory
+ names to be added to the header file search path (-I). Returns a list
+ of command-line options suitable for either Unix compilers or Visual
+ C++.
+ """
+ # XXX it would be nice (mainly aesthetic, and so we don't generate
+ # stupid-looking command lines) to go over 'macros' and eliminate
+ # redundant definitions/undefinitions (ie. ensure that only the
+ # latest mention of a particular macro winds up on the command
+ # line). I don't think it's essential, though, since most (all?)
+ # Unix C compilers only pay attention to the latest -D or -U
+ # mention of a macro on their command line. Similar situation for
+ # 'include_dirs'. I'm punting on both for now. Anyways, weeding out
+ # redundancies like this should probably be the province of
+ # CCompiler, since the data structures used are inherited from it
+ # and therefore common to all CCompiler classes.
+
+ pp_opts = []
+ for macro in macros:
+
+ if not isinstance(macro, tuple) and 1 <= len(macro) <= 2:
+ raise TypeError(
+ "bad macro definition '%s': each element of 'macros'"
+ "list must be a 1- or 2-tuple" % macro)
+
+ if len(macro) == 1: # undefine this macro
+ pp_opts.append("-U%s" % macro[0])
+ elif len(macro) == 2:
+ if macro[1] is None: # define with no explicit value
+ pp_opts.append("-D%s" % macro[0])
+ else:
+ # XXX *don't* need to be clever about quoting the
+ # macro value here, because we're going to avoid the
+ # shell at all costs when we spawn the command!
+ pp_opts.append("-D%s=%s" % macro)
+
+ for dir in include_dirs:
+ pp_opts.append("-I%s" % dir)
+
+ return pp_opts
+
+
+def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
+ """Generate linker options for searching library directories and
+ linking with specific libraries.
+
+ 'libraries' and 'library_dirs' are, respectively, lists of library names
+ (not filenames!) and search directories. Returns a list of command-line
+ options suitable for use with some compiler (depending on the two format
+ strings passed in).
+ """
+ lib_opts = []
+
+ for dir in library_dirs:
+ lib_opts.append(compiler.library_dir_option(dir))
+
+ for dir in runtime_library_dirs:
+ opt = compiler.runtime_library_dir_option(dir)
+ if isinstance(opt, list):
+ lib_opts.extend(opt)
+ else:
+ lib_opts.append(opt)
+
+ # XXX it's important that we *not* remove redundant library mentions!
+ # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
+ # resolve all symbols. I just hope we never have to say "-lfoo obj.o
+ # -lbar" to get things to work -- that's certainly a possibility, but a
+ # pretty nasty way to arrange your C code.
+
+ for lib in libraries:
+ lib_dir, lib_name = os.path.split(lib)
+ if lib_dir != '':
+ lib_file = compiler.find_library_file([lib_dir], lib_name)
+ if lib_file is not None:
+ lib_opts.append(lib_file)
+ else:
+ logger.warning("no library file corresponding to "
+ "'%s' found (skipping)" % lib)
+ else:
+ lib_opts.append(compiler.library_option(lib))
+
+ return lib_opts
diff --git a/Lib/packaging/compiler/bcppcompiler.py b/Lib/packaging/compiler/bcppcompiler.py
new file mode 100644
index 0000000..06c758c
--- /dev/null
+++ b/Lib/packaging/compiler/bcppcompiler.py
@@ -0,0 +1,355 @@
+"""CCompiler implementation for the Borland C++ compiler."""
+
+# This implementation by Lyle Johnson, based on the original msvccompiler.py
+# module and using the directions originally published by Gordon Williams.
+
+# XXX looks like there's a LOT of overlap between these two classes:
+# someone should sit down and factor out the common code as
+# WindowsCCompiler! --GPW
+
+import os
+
+from packaging.errors import (PackagingExecError, CompileError, LibError,
+ LinkError, UnknownFileError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_preprocess_options
+from packaging.file_util import write_file
+from packaging.dep_util import newer
+from packaging import logger
+
+
+class BCPPCompiler(CCompiler) :
+ """Concrete class that implements an interface to the Borland C/C++
+ compiler, as defined by the CCompiler abstract class.
+ """
+
+ name = 'bcpp'
+ description = 'Borland C++ Compiler'
+
+ # Just set this so CCompiler's constructor doesn't barf. We currently
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+ # as it really isn't necessary for this sort of single-compiler class.
+ # Would be nice to have a consistent interface with UnixCCompiler,
+ # though, so it's worth thinking about.
+ executables = {}
+
+ # Private class data (need to distinguish C from C++ source for compiler)
+ _c_extensions = ['.c']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
+
+ # Needed for the filename generation methods provided by the
+ # base class, CCompiler.
+ src_extensions = _c_extensions + _cpp_extensions
+ obj_extension = '.obj'
+ static_lib_extension = '.lib'
+ shared_lib_extension = '.dll'
+ static_lib_format = shared_lib_format = '%s%s'
+ exe_extension = '.exe'
+
+
+ def __init__(self, dry_run=False, force=False):
+ super(BCPPCompiler, self).__init__(dry_run, force)
+
+ # These executables are assumed to all be in the path.
+ # Borland doesn't seem to use any special registry settings to
+ # indicate their installation locations.
+
+ self.cc = "bcc32.exe"
+ self.linker = "ilink32.exe"
+ self.lib = "tlib.exe"
+
+ self.preprocess_options = None
+ self.compile_options = ['/tWM', '/O2', '/q', '/g0']
+ self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
+
+ self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
+ self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
+ self.ldflags_static = []
+ self.ldflags_exe = ['/Gn', '/q', '/x']
+ self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
+
+
+ # -- Worker methods ------------------------------------------------
+
+ def compile(self, sources,
+ output_dir=None, macros=None, include_dirs=None, debug=False,
+ extra_preargs=None, extra_postargs=None, depends=None):
+
+ macros, objects, extra_postargs, pp_opts, build = \
+ self._setup_compile(output_dir, macros, include_dirs, sources,
+ depends, extra_postargs)
+ compile_opts = extra_preargs or []
+ compile_opts.append('-c')
+ if debug:
+ compile_opts.extend(self.compile_options_debug)
+ else:
+ compile_opts.extend(self.compile_options)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ # XXX why do the normpath here?
+ src = os.path.normpath(src)
+ obj = os.path.normpath(obj)
+ # XXX _setup_compile() did a mkpath() too but before the normpath.
+ # Is it possible to skip the normpath?
+ self.mkpath(os.path.dirname(obj))
+
+ if ext == '.res':
+ # This is already a binary file -- skip it.
+ continue # the 'for' loop
+ if ext == '.rc':
+ # This needs to be compiled to a .res file -- do it now.
+ try:
+ self.spawn(["brcc32", "-fo", obj, src])
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+ continue # the 'for' loop
+
+ # The next two are both for the real compiler.
+ if ext in self._c_extensions:
+ input_opt = ""
+ elif ext in self._cpp_extensions:
+ input_opt = "-P"
+ else:
+ # Unknown file type -- no extra options. The compiler
+ # will probably fail, but let it just in case this is a
+ # file the compiler recognizes even if we don't.
+ input_opt = ""
+
+ output_opt = "-o" + obj
+
+ # Compiler command line syntax is: "bcc32 [options] file(s)".
+ # Note that the source file names must appear at the end of
+ # the command line.
+ try:
+ self.spawn([self.cc] + compile_opts + pp_opts +
+ [input_opt, output_opt] +
+ extra_postargs + [src])
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+
+ return objects
+
+
+ def create_static_lib(self, objects, output_libname, output_dir=None,
+ debug=False, target_lang=None):
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ output_filename = \
+ self.library_filename(output_libname, output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ lib_args = [output_filename, '/u'] + objects
+ if debug:
+ pass # XXX what goes here?
+ try:
+ self.spawn([self.lib] + lib_args)
+ except PackagingExecError as msg:
+ raise LibError(msg)
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+
+ def link(self, target_desc, objects, output_filename, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=False, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+
+ # XXX this ignores 'build_temp'! should follow the lead of
+ # msvccompiler.py
+
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ libraries, library_dirs, runtime_library_dirs = \
+ self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+ if runtime_library_dirs:
+ logger.warning("don't know what to do with "
+ "'runtime_library_dirs': %r", runtime_library_dirs)
+
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+
+ # Figure out linker args based on type of target.
+ if target_desc == CCompiler.EXECUTABLE:
+ startup_obj = 'c0w32'
+ if debug:
+ ld_args = self.ldflags_exe_debug[:]
+ else:
+ ld_args = self.ldflags_exe[:]
+ else:
+ startup_obj = 'c0d32'
+ if debug:
+ ld_args = self.ldflags_shared_debug[:]
+ else:
+ ld_args = self.ldflags_shared[:]
+
+
+ # Create a temporary exports file for use by the linker
+ if export_symbols is None:
+ def_file = ''
+ else:
+ head, tail = os.path.split(output_filename)
+ modname, ext = os.path.splitext(tail)
+ temp_dir = os.path.dirname(objects[0]) # preserve tree structure
+ def_file = os.path.join(temp_dir, '%s.def' % modname)
+ contents = ['EXPORTS']
+ for sym in (export_symbols or []):
+ contents.append(' %s=_%s' % (sym, sym))
+ self.execute(write_file, (def_file, contents),
+ "writing %s" % def_file)
+
+ # Borland C++ has problems with '/' in paths
+ objects2 = [os.path.normpath(o) for o in objects]
+ # split objects in .obj and .res files
+ # Borland C++ needs them at different positions in the command line
+ objects = [startup_obj]
+ resources = []
+ for file in objects2:
+ base, ext = os.path.splitext(os.path.normcase(file))
+ if ext == '.res':
+ resources.append(file)
+ else:
+ objects.append(file)
+
+
+ for l in library_dirs:
+ ld_args.append("/L%s" % os.path.normpath(l))
+ ld_args.append("/L.") # we sometimes use relative paths
+
+ # list of object files
+ ld_args.extend(objects)
+
+ # XXX the command line syntax for Borland C++ is a bit wonky;
+ # certain filenames are jammed together in one big string, but
+ # comma-delimited. This doesn't mesh too well with the
+ # Unix-centric attitude (with a DOS/Windows quoting hack) of
+ # 'spawn()', so constructing the argument list is a bit
+ # awkward. Note that doing the obvious thing and jamming all
+ # the filenames and commas into one argument would be wrong,
+ # because 'spawn()' would quote any filenames with spaces in
+ # them. Arghghh!. Apparently it works fine as coded...
+
+ # name of dll/exe file
+ ld_args.extend((',',output_filename))
+ # no map file and start libraries
+ ld_args.append(',,')
+
+ for lib in libraries:
+ # see if we find it and if there is a bcpp specific lib
+ # (xxx_bcpp.lib)
+ libfile = self.find_library_file(library_dirs, lib, debug)
+ if libfile is None:
+ ld_args.append(lib)
+ # probably a BCPP internal library -- don't warn
+ else:
+ # full name which prefers bcpp_xxx.lib over xxx.lib
+ ld_args.append(libfile)
+
+ # some default libraries
+ ld_args.append('import32')
+ ld_args.append('cw32mt')
+
+ # def file for export symbols
+ ld_args.extend((',',def_file))
+ # add resource files
+ ld_args.append(',')
+ ld_args.extend(resources)
+
+
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+
+ self.mkpath(os.path.dirname(output_filename))
+ try:
+ self.spawn([self.linker] + ld_args)
+ except PackagingExecError as msg:
+ raise LinkError(msg)
+
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+ # -- Miscellaneous methods -----------------------------------------
+
+
+ def find_library_file(self, dirs, lib, debug=False):
+ # List of effective library names to try, in order of preference:
+ # xxx_bcpp.lib is better than xxx.lib
+ # and xxx_d.lib is better than xxx.lib if debug is set
+ #
+ # The "_bcpp" suffix is to handle a Python installation for people
+ # with multiple compilers (primarily Packaging hackers, I suspect
+ # ;-). The idea is they'd have one static library for each
+ # compiler they care about, since (almost?) every Windows compiler
+ # seems to have a different format for static libraries.
+ if debug:
+ dlib = (lib + "_d")
+ try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
+ else:
+ try_names = (lib + "_bcpp", lib)
+
+ for dir in dirs:
+ for name in try_names:
+ libfile = os.path.join(dir, self.library_filename(name))
+ if os.path.exists(libfile):
+ return libfile
+ else:
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
+
+ # overwrite the one from CCompiler to support rc and res-files
+ def object_filenames(self, source_filenames, strip_dir=False,
+ output_dir=''):
+ if output_dir is None:
+ output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+ base, ext = os.path.splitext(os.path.normcase(src_name))
+ if ext not in (self.src_extensions + ['.rc','.res']):
+ raise UnknownFileError("unknown file type '%s' (from '%s')" % \
+ (ext, src_name))
+ if strip_dir:
+ base = os.path.basename(base)
+ if ext == '.res':
+ # these can go unchanged
+ obj_names.append(os.path.join(output_dir, base + ext))
+ elif ext == '.rc':
+ # these need to be compiled to .res-files
+ obj_names.append(os.path.join(output_dir, base + '.res'))
+ else:
+ obj_names.append(os.path.join(output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+
+ def preprocess(self, source, output_file=None, macros=None,
+ include_dirs=None, extra_preargs=None,
+ extra_postargs=None):
+ _, macros, include_dirs = \
+ self._fix_compile_args(None, macros, include_dirs)
+ pp_opts = gen_preprocess_options(macros, include_dirs)
+ pp_args = ['cpp32.exe'] + pp_opts
+ if output_file is not None:
+ pp_args.append('-o' + output_file)
+ if extra_preargs:
+ pp_args[:0] = extra_preargs
+ if extra_postargs:
+ pp_args.extend(extra_postargs)
+ pp_args.append(source)
+
+ # We need to preprocess: either we're being forced to, or the
+ # source file is newer than the target (or the target doesn't
+ # exist).
+ if self.force or output_file is None or newer(source, output_file):
+ if output_file:
+ self.mkpath(os.path.dirname(output_file))
+ try:
+ self.spawn(pp_args)
+ except PackagingExecError as msg:
+ raise CompileError(msg)
diff --git a/Lib/packaging/compiler/ccompiler.py b/Lib/packaging/compiler/ccompiler.py
new file mode 100644
index 0000000..98c4b68
--- /dev/null
+++ b/Lib/packaging/compiler/ccompiler.py
@@ -0,0 +1,863 @@
+"""Abstract base class for compilers.
+
+This modules contains CCompiler, an abstract base class that defines the
+interface for the compiler abstraction model used by packaging.
+"""
+
+import os
+from shutil import move
+from packaging import logger
+from packaging.util import split_quoted, execute, newer_group, spawn
+from packaging.errors import (CompileError, LinkError, UnknownFileError)
+from packaging.compiler import gen_preprocess_options
+
+
+class CCompiler:
+ """Abstract base class to define the interface that must be implemented
+ by real compiler classes. Also has some utility methods used by
+ several compiler classes.
+
+ The basic idea behind a compiler abstraction class is that each
+ instance can be used for all the compile/link steps in building a
+ single project. Thus, attributes common to all of those compile and
+ link steps -- include directories, macros to define, libraries to link
+ against, etc. -- are attributes of the compiler instance. To allow for
+ variability in how individual files are treated, most of those
+ attributes may be varied on a per-compilation or per-link basis.
+ """
+
+ # 'name' is a class attribute that identifies this class. It
+ # keeps code that wants to know what kind of compiler it's dealing with
+ # from having to import all possible compiler classes just to do an
+ # 'isinstance'.
+ name = None
+ description = None
+
+ # XXX things not handled by this compiler abstraction model:
+ # * client can't provide additional options for a compiler,
+ # e.g. warning, optimization, debugging flags. Perhaps this
+ # should be the domain of concrete compiler abstraction classes
+ # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
+ # class should have methods for the common ones.
+ # * can't completely override the include or library searchg
+ # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
+ # I'm not sure how widely supported this is even by Unix
+ # compilers, much less on other platforms. And I'm even less
+ # sure how useful it is; maybe for cross-compiling, but
+ # support for that is a ways off. (And anyways, cross
+ # compilers probably have a dedicated binary with the
+ # right paths compiled in. I hope.)
+ # * can't do really freaky things with the library list/library
+ # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
+ # different versions of libfoo.a in different locations. I
+ # think this is useless without the ability to null out the
+ # library search path anyways.
+
+
+ # Subclasses that rely on the standard filename generation methods
+ # implemented below should override these; see the comment near
+ # those methods ('object_filenames()' et. al.) for details:
+ src_extensions = None # list of strings
+ obj_extension = None # string
+ static_lib_extension = None
+ shared_lib_extension = None # string
+ static_lib_format = None # format string
+ shared_lib_format = None # prob. same as static_lib_format
+ exe_extension = None # string
+
+ # Default language settings. language_map is used to detect a source
+ # file or Extension target language, checking source filenames.
+ # language_order is used to detect the language precedence, when deciding
+ # what language to use when mixing source types. For example, if some
+ # extension has two files with ".c" extension, and one with ".cpp", it
+ # is still linked as c++.
+ language_map = {".c": "c",
+ ".cc": "c++",
+ ".cpp": "c++",
+ ".cxx": "c++",
+ ".m": "objc",
+ }
+ language_order = ["c++", "objc", "c"]
+
+ def __init__(self, dry_run=False, force=False):
+ self.dry_run = dry_run
+ self.force = force
+
+ # 'output_dir': a common output directory for object, library,
+ # shared object, and shared library files
+ self.output_dir = None
+
+ # 'macros': a list of macro definitions (or undefinitions). A
+ # macro definition is a 2-tuple (name, value), where the value is
+ # either a string or None (no explicit value). A macro
+ # undefinition is a 1-tuple (name,).
+ self.macros = []
+
+ # 'include_dirs': a list of directories to search for include files
+ self.include_dirs = []
+
+ # 'libraries': a list of libraries to include in any link
+ # (library names, not filenames: eg. "foo" not "libfoo.a")
+ self.libraries = []
+
+ # 'library_dirs': a list of directories to search for libraries
+ self.library_dirs = []
+
+ # 'runtime_library_dirs': a list of directories to search for
+ # shared libraries/objects at runtime
+ self.runtime_library_dirs = []
+
+ # 'objects': a list of object files (or similar, such as explicitly
+ # named library files) to include on any link
+ self.objects = []
+
+ for key, value in self.executables.items():
+ self.set_executable(key, value)
+
+ def set_executables(self, **args):
+ """Define the executables (and options for them) that will be run
+ to perform the various stages of compilation. The exact set of
+ executables that may be specified here depends on the compiler
+ class (via the 'executables' class attribute), but most will have:
+ compiler the C/C++ compiler
+ linker_so linker used to create shared objects and libraries
+ linker_exe linker used to create binary executables
+ archiver static library creator
+
+ On platforms with a command line (Unix, DOS/Windows), each of these
+ is a string that will be split into executable name and (optional)
+ list of arguments. (Splitting the string is done similarly to how
+ Unix shells operate: words are delimited by spaces, but quotes and
+ backslashes can override this. See
+ 'distutils.util.split_quoted()'.)
+ """
+
+ # Note that some CCompiler implementation classes will define class
+ # attributes 'cpp', 'cc', etc. with hard-coded executable names;
+ # this is appropriate when a compiler class is for exactly one
+ # compiler/OS combination (eg. MSVCCompiler). Other compiler
+ # classes (UnixCCompiler, in particular) are driven by information
+ # discovered at run-time, since there are many different ways to do
+ # basically the same things with Unix C compilers.
+
+ for key, value in args.items():
+ if key not in self.executables:
+ raise ValueError("unknown executable '%s' for class %s" % \
+ (key, self.__class__.__name__))
+ self.set_executable(key, value)
+
+ def set_executable(self, key, value):
+ if isinstance(value, str):
+ setattr(self, key, split_quoted(value))
+ else:
+ setattr(self, key, value)
+
+ def _find_macro(self, name):
+ i = 0
+ for defn in self.macros:
+ if defn[0] == name:
+ return i
+ i = i + 1
+ return None
+
+ def _check_macro_definitions(self, definitions):
+ """Ensures that every element of 'definitions' is a valid macro
+ definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
+ nothing if all definitions are OK, raise TypeError otherwise.
+ """
+ for defn in definitions:
+ if not (isinstance(defn, tuple) and
+ (len(defn) == 1 or
+ (len(defn) == 2 and
+ (isinstance(defn[1], str) or defn[1] is None))) and
+ isinstance(defn[0], str)):
+ raise TypeError(("invalid macro definition '%s': " % defn) + \
+ "must be tuple (string,), (string, string), or " + \
+ "(string, None)")
+
+
+ # -- Bookkeeping methods -------------------------------------------
+
+ def define_macro(self, name, value=None):
+ """Define a preprocessor macro for all compilations driven by this
+ compiler object. The optional parameter 'value' should be a
+ string; if it is not supplied, then the macro will be defined
+ without an explicit value and the exact outcome depends on the
+ compiler used (XXX true? does ANSI say anything about this?)
+ """
+ # Delete from the list of macro definitions/undefinitions if
+ # already there (so that this one will take precedence).
+ i = self._find_macro(name)
+ if i is not None:
+ del self.macros[i]
+
+ defn = (name, value)
+ self.macros.append(defn)
+
+ def undefine_macro(self, name):
+ """Undefine a preprocessor macro for all compilations driven by
+ this compiler object. If the same macro is defined by
+ 'define_macro()' and undefined by 'undefine_macro()' the last call
+ takes precedence (including multiple redefinitions or
+ undefinitions). If the macro is redefined/undefined on a
+ per-compilation basis (ie. in the call to 'compile()'), then that
+ takes precedence.
+ """
+ # Delete from the list of macro definitions/undefinitions if
+ # already there (so that this one will take precedence).
+ i = self._find_macro(name)
+ if i is not None:
+ del self.macros[i]
+
+ undefn = (name,)
+ self.macros.append(undefn)
+
+ def add_include_dir(self, dir):
+ """Add 'dir' to the list of directories that will be searched for
+ header files. The compiler is instructed to search directories in
+ the order in which they are supplied by successive calls to
+ 'add_include_dir()'.
+ """
+ self.include_dirs.append(dir)
+
+ def set_include_dirs(self, dirs):
+ """Set the list of directories that will be searched to 'dirs' (a
+ list of strings). Overrides any preceding calls to
+ 'add_include_dir()'; subsequence calls to 'add_include_dir()' add
+ to the list passed to 'set_include_dirs()'. This does not affect
+ any list of standard include directories that the compiler may
+ search by default.
+ """
+ self.include_dirs = dirs[:]
+
+ def add_library(self, libname):
+ """Add 'libname' to the list of libraries that will be included in
+ all links driven by this compiler object. Note that 'libname'
+ should *not* be the name of a file containing a library, but the
+ name of the library itself: the actual filename will be inferred by
+ the linker, the compiler, or the compiler class (depending on the
+ platform).
+
+ The linker will be instructed to link against libraries in the
+ order they were supplied to 'add_library()' and/or
+ 'set_libraries()'. It is perfectly valid to duplicate library
+ names; the linker will be instructed to link against libraries as
+ many times as they are mentioned.
+ """
+ self.libraries.append(libname)
+
+ def set_libraries(self, libnames):
+ """Set the list of libraries to be included in all links driven by
+ this compiler object to 'libnames' (a list of strings). This does
+ not affect any standard system libraries that the linker may
+ include by default.
+ """
+ self.libraries = libnames[:]
+
+
+ def add_library_dir(self, dir):
+ """Add 'dir' to the list of directories that will be searched for
+ libraries specified to 'add_library()' and 'set_libraries()'. The
+ linker will be instructed to search for libraries in the order they
+ are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
+ """
+ self.library_dirs.append(dir)
+
+ def set_library_dirs(self, dirs):
+ """Set the list of library search directories to 'dirs' (a list of
+ strings). This does not affect any standard library search path
+ that the linker may search by default.
+ """
+ self.library_dirs = dirs[:]
+
+ def add_runtime_library_dir(self, dir):
+ """Add 'dir' to the list of directories that will be searched for
+ shared libraries at runtime.
+ """
+ self.runtime_library_dirs.append(dir)
+
+ def set_runtime_library_dirs(self, dirs):
+ """Set the list of directories to search for shared libraries at
+ runtime to 'dirs' (a list of strings). This does not affect any
+ standard search path that the runtime linker may search by
+ default.
+ """
+ self.runtime_library_dirs = dirs[:]
+
+ def add_link_object(self, object):
+ """Add 'object' to the list of object files (or analogues, such as
+ explicitly named library files or the output of "resource
+ compilers") to be included in every link driven by this compiler
+ object.
+ """
+ self.objects.append(object)
+
+ def set_link_objects(self, objects):
+ """Set the list of object files (or analogues) to be included in
+ every link to 'objects'. This does not affect any standard object
+ files that the linker may include by default (such as system
+ libraries).
+ """
+ self.objects = objects[:]
+
+
+ # -- Private utility methods --------------------------------------
+ # (here for the convenience of subclasses)
+
+ # Helper method to prep compiler in subclass compile() methods
+ def _setup_compile(self, outdir, macros, incdirs, sources, depends,
+ extra):
+ """Process arguments and decide which source files to compile."""
+ if outdir is None:
+ outdir = self.output_dir
+ elif not isinstance(outdir, str):
+ raise TypeError("'output_dir' must be a string or None")
+
+ if macros is None:
+ macros = self.macros
+ elif isinstance(macros, list):
+ macros = macros + (self.macros or [])
+ else:
+ raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+ if incdirs is None:
+ incdirs = self.include_dirs
+ elif isinstance(incdirs, (list, tuple)):
+ incdirs = list(incdirs) + (self.include_dirs or [])
+ else:
+ raise TypeError(
+ "'include_dirs' (if supplied) must be a list of strings")
+
+ if extra is None:
+ extra = []
+
+ # Get the list of expected output (object) files
+ objects = self.object_filenames(sources,
+ strip_dir=False,
+ output_dir=outdir)
+ assert len(objects) == len(sources)
+
+ pp_opts = gen_preprocess_options(macros, incdirs)
+
+ build = {}
+ for i in range(len(sources)):
+ src = sources[i]
+ obj = objects[i]
+ ext = os.path.splitext(src)[1]
+ self.mkpath(os.path.dirname(obj))
+ build[obj] = (src, ext)
+
+ return macros, objects, extra, pp_opts, build
+
+ def _get_cc_args(self, pp_opts, debug, before):
+ # works for unixccompiler and cygwinccompiler
+ cc_args = pp_opts + ['-c']
+ if debug:
+ cc_args[:0] = ['-g']
+ if before:
+ cc_args[:0] = before
+ return cc_args
+
+ def _fix_compile_args(self, output_dir, macros, include_dirs):
+ """Typecheck and fix-up some of the arguments to the 'compile()'
+ method, and return fixed-up values. Specifically: if 'output_dir'
+ is None, replaces it with 'self.output_dir'; ensures that 'macros'
+ is a list, and augments it with 'self.macros'; ensures that
+ 'include_dirs' is a list, and augments it with 'self.include_dirs'.
+ Guarantees that the returned values are of the correct type,
+ i.e. for 'output_dir' either string or None, and for 'macros' and
+ 'include_dirs' either list or None.
+ """
+ if output_dir is None:
+ output_dir = self.output_dir
+ elif not isinstance(output_dir, str):
+ raise TypeError("'output_dir' must be a string or None")
+
+ if macros is None:
+ macros = self.macros
+ elif isinstance(macros, list):
+ macros = macros + (self.macros or [])
+ else:
+ raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+ if include_dirs is None:
+ include_dirs = self.include_dirs
+ elif isinstance(include_dirs, (list, tuple)):
+ include_dirs = list(include_dirs) + (self.include_dirs or [])
+ else:
+ raise TypeError(
+ "'include_dirs' (if supplied) must be a list of strings")
+
+ return output_dir, macros, include_dirs
+
+ def _fix_object_args(self, objects, output_dir):
+ """Typecheck and fix up some arguments supplied to various methods.
+ Specifically: ensure that 'objects' is a list; if output_dir is
+ None, replace with self.output_dir. Return fixed versions of
+ 'objects' and 'output_dir'.
+ """
+ if not isinstance(objects, (list, tuple)):
+ raise TypeError("'objects' must be a list or tuple of strings")
+ objects = list(objects)
+
+ if output_dir is None:
+ output_dir = self.output_dir
+ elif not isinstance(output_dir, str):
+ raise TypeError("'output_dir' must be a string or None")
+
+ return objects, output_dir
+
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+ """Typecheck and fix up some of the arguments supplied to the
+ 'link_*' methods. Specifically: ensure that all arguments are
+ lists, and augment them with their permanent versions
+ (eg. 'self.libraries' augments 'libraries'). Return a tuple with
+ fixed versions of all arguments.
+ """
+ if libraries is None:
+ libraries = self.libraries
+ elif isinstance(libraries, (list, tuple)):
+ libraries = list(libraries) + (self.libraries or [])
+ else:
+ raise TypeError(
+ "'libraries' (if supplied) must be a list of strings")
+
+ if library_dirs is None:
+ library_dirs = self.library_dirs
+ elif isinstance(library_dirs, (list, tuple)):
+ library_dirs = list(library_dirs) + (self.library_dirs or [])
+ else:
+ raise TypeError(
+ "'library_dirs' (if supplied) must be a list of strings")
+
+ if runtime_library_dirs is None:
+ runtime_library_dirs = self.runtime_library_dirs
+ elif isinstance(runtime_library_dirs, (list, tuple)):
+ runtime_library_dirs = (list(runtime_library_dirs) +
+ (self.runtime_library_dirs or []))
+ else:
+ raise TypeError("'runtime_library_dirs' (if supplied) "
+ "must be a list of strings")
+
+ return libraries, library_dirs, runtime_library_dirs
+
+ def _need_link(self, objects, output_file):
+ """Return true if we need to relink the files listed in 'objects'
+ to recreate 'output_file'.
+ """
+ if self.force:
+ return True
+ else:
+ if self.dry_run:
+ newer = newer_group(objects, output_file, missing='newer')
+ else:
+ newer = newer_group(objects, output_file)
+ return newer
+
+ def detect_language(self, sources):
+ """Detect the language of a given file, or list of files. Uses
+ language_map, and language_order to do the job.
+ """
+ if not isinstance(sources, list):
+ sources = [sources]
+ lang = None
+ index = len(self.language_order)
+ for source in sources:
+ base, ext = os.path.splitext(source)
+ extlang = self.language_map.get(ext)
+ try:
+ extindex = self.language_order.index(extlang)
+ if extindex < index:
+ lang = extlang
+ index = extindex
+ except ValueError:
+ pass
+ return lang
+
+ # -- Worker methods ------------------------------------------------
+ # (must be implemented by subclasses)
+
+ def preprocess(self, source, output_file=None, macros=None,
+ include_dirs=None, extra_preargs=None, extra_postargs=None):
+ """Preprocess a single C/C++ source file, named in 'source'.
+ Output will be written to file named 'output_file', or stdout if
+ 'output_file' not supplied. 'macros' is a list of macro
+ definitions as for 'compile()', which will augment the macros set
+ with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
+ list of directory names that will be added to the default list.
+
+ Raises PreprocessError on failure.
+ """
+ pass
+
+ def compile(self, sources, output_dir=None, macros=None,
+ include_dirs=None, debug=False, extra_preargs=None,
+ extra_postargs=None, depends=None):
+ """Compile one or more source files.
+
+ 'sources' must be a list of filenames, most likely C/C++
+ files, but in reality anything that can be handled by a
+ particular compiler and compiler class (eg. MSVCCompiler can
+ handle resource files in 'sources'). Return a list of object
+ filenames, one per source filename in 'sources'. Depending on
+ the implementation, not all source files will necessarily be
+ compiled, but all corresponding object filenames will be
+ returned.
+
+ If 'output_dir' is given, object files will be put under it, while
+ retaining their original path component. That is, "foo/bar.c"
+ normally compiles to "foo/bar.o" (for a Unix implementation); if
+ 'output_dir' is "build", then it would compile to
+ "build/foo/bar.o".
+
+ 'macros', if given, must be a list of macro definitions. A macro
+ definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
+ The former defines a macro; if the value is None, the macro is
+ defined without an explicit value. The 1-tuple case undefines a
+ macro. Later definitions/redefinitions/ undefinitions take
+ precedence.
+
+ 'include_dirs', if given, must be a list of strings, the
+ directories to add to the default include file search path for this
+ compilation only.
+
+ 'debug' is a boolean; if true, the compiler will be instructed to
+ output debug symbols in (or alongside) the object file(s).
+
+ 'extra_preargs' and 'extra_postargs' are implementation- dependent.
+ On platforms that have the notion of a command line (e.g. Unix,
+ DOS/Windows), they are most likely lists of strings: extra
+ command-line arguments to prepand/append to the compiler command
+ line. On other platforms, consult the implementation class
+ documentation. In any event, they are intended as an escape hatch
+ for those occasions when the abstract compiler framework doesn't
+ cut the mustard.
+
+ 'depends', if given, is a list of filenames that all targets
+ depend on. If a source file is older than any file in
+ depends, then the source file will be recompiled. This
+ supports dependency tracking, but only at a coarse
+ granularity.
+
+ Raises CompileError on failure.
+ """
+ # A concrete compiler class can either override this method
+ # entirely or implement _compile().
+
+ macros, objects, extra_postargs, pp_opts, build = \
+ self._setup_compile(output_dir, macros, include_dirs, sources,
+ depends, extra_postargs)
+ cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+
+ # Return *all* object filenames, not just the ones we just built.
+ return objects
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ """Compile 'src' to product 'obj'."""
+
+ # A concrete compiler class that does not override compile()
+ # should implement _compile().
+ pass
+
+ def create_static_lib(self, objects, output_libname, output_dir=None,
+ debug=False, target_lang=None):
+ """Link a bunch of stuff together to create a static library file.
+ The "bunch of stuff" consists of the list of object files supplied
+ as 'objects', the extra object files supplied to
+ 'add_link_object()' and/or 'set_link_objects()', the libraries
+ supplied to 'add_library()' and/or 'set_libraries()', and the
+ libraries supplied as 'libraries' (if any).
+
+ 'output_libname' should be a library name, not a filename; the
+ filename will be inferred from the library name. 'output_dir' is
+ the directory where the library file will be put.
+
+ 'debug' is a boolean; if true, debugging information will be
+ included in the library (note that on most platforms, it is the
+ compile step where this matters: the 'debug' flag is included here
+ just for consistency).
+
+ 'target_lang' is the target language for which the given objects
+ are being compiled. This allows specific linkage time treatment of
+ certain languages.
+
+ Raises LibError on failure.
+ """
+ pass
+
+ # values for target_desc parameter in link()
+ SHARED_OBJECT = "shared_object"
+ SHARED_LIBRARY = "shared_library"
+ EXECUTABLE = "executable"
+
+ def link(self, target_desc, objects, output_filename, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=False, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+ """Link a bunch of stuff together to create an executable or
+ shared library file.
+
+ The "bunch of stuff" consists of the list of object files supplied
+ as 'objects'. 'output_filename' should be a filename. If
+ 'output_dir' is supplied, 'output_filename' is relative to it
+ (i.e. 'output_filename' can provide directory components if
+ needed).
+
+ 'libraries' is a list of libraries to link against. These are
+ library names, not filenames, since they're translated into
+ filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
+ on Unix and "foo.lib" on DOS/Windows). However, they can include a
+ directory component, which means the linker will look in that
+ specific directory rather than searching all the normal locations.
+
+ 'library_dirs', if supplied, should be a list of directories to
+ search for libraries that were specified as bare library names
+ (ie. no directory component). These are on top of the system
+ default and those supplied to 'add_library_dir()' and/or
+ 'set_library_dirs()'. 'runtime_library_dirs' is a list of
+ directories that will be embedded into the shared library and used
+ to search for other shared libraries that *it* depends on at
+ run-time. (This may only be relevant on Unix.)
+
+ 'export_symbols' is a list of symbols that the shared library will
+ export. (This appears to be relevant only on Windows.)
+
+ 'debug' is as for 'compile()' and 'create_static_lib()', with the
+ slight distinction that it actually matters on most platforms (as
+ opposed to 'create_static_lib()', which includes a 'debug' flag
+ mostly for form's sake).
+
+ 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
+ of course that they supply command-line arguments for the
+ particular linker being used).
+
+ 'target_lang' is the target language for which the given objects
+ are being compiled. This allows specific linkage time treatment of
+ certain languages.
+
+ Raises LinkError on failure.
+ """
+ raise NotImplementedError
+
+
+ # Old 'link_*()' methods, rewritten to use the new 'link()' method.
+
+ def link_shared_lib(self, objects, output_libname, output_dir=None,
+ libraries=None, library_dirs=None,
+ runtime_library_dirs=None, export_symbols=None,
+ debug=False, extra_preargs=None, extra_postargs=None,
+ build_temp=None, target_lang=None):
+ self.link(CCompiler.SHARED_LIBRARY, objects,
+ self.library_filename(output_libname, lib_type='shared'),
+ output_dir,
+ libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug,
+ extra_preargs, extra_postargs, build_temp, target_lang)
+
+ def link_shared_object(self, objects, output_filename, output_dir=None,
+ libraries=None, library_dirs=None,
+ runtime_library_dirs=None, export_symbols=None,
+ debug=False, extra_preargs=None, extra_postargs=None,
+ build_temp=None, target_lang=None):
+ self.link(CCompiler.SHARED_OBJECT, objects,
+ output_filename, output_dir,
+ libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug,
+ extra_preargs, extra_postargs, build_temp, target_lang)
+
+ def link_executable(self, objects, output_progname, output_dir=None,
+ libraries=None, library_dirs=None,
+ runtime_library_dirs=None, debug=False,
+ extra_preargs=None, extra_postargs=None,
+ target_lang=None):
+ self.link(CCompiler.EXECUTABLE, objects,
+ self.executable_filename(output_progname), output_dir,
+ libraries, library_dirs, runtime_library_dirs, None,
+ debug, extra_preargs, extra_postargs, None, target_lang)
+
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function; there is
+ # no appropriate default implementation so subclasses should
+ # implement all of these.
+
+ def library_dir_option(self, dir):
+ """Return the compiler option to add 'dir' to the list of
+ directories searched for libraries.
+ """
+ raise NotImplementedError
+
+ def runtime_library_dir_option(self, dir):
+ """Return the compiler option to add 'dir' to the list of
+ directories searched for runtime libraries.
+ """
+ raise NotImplementedError
+
+ def library_option(self, lib):
+ """Return the compiler option to add 'dir' to the list of libraries
+ linked into the shared library or executable.
+ """
+ raise NotImplementedError
+
+ def has_function(self, funcname, includes=None, include_dirs=None,
+ libraries=None, library_dirs=None):
+ """Return a boolean indicating whether funcname is supported on
+ the current platform. The optional arguments can be used to
+ augment the compilation environment.
+ """
+
+ # this can't be included at module scope because it tries to
+ # import math which might not be available at that point - maybe
+ # the necessary logic should just be inlined?
+ import tempfile
+ if includes is None:
+ includes = []
+ if include_dirs is None:
+ include_dirs = []
+ if libraries is None:
+ libraries = []
+ if library_dirs is None:
+ library_dirs = []
+ fd, fname = tempfile.mkstemp(".c", funcname, text=True)
+ with os.fdopen(fd, "w") as f:
+ for incl in includes:
+ f.write("""#include "%s"\n""" % incl)
+ f.write("""\
+main (int argc, char **argv) {
+ %s();
+}
+""" % funcname)
+ try:
+ objects = self.compile([fname], include_dirs=include_dirs)
+ except CompileError:
+ return False
+
+ try:
+ self.link_executable(objects, "a.out",
+ libraries=libraries,
+ library_dirs=library_dirs)
+ except (LinkError, TypeError):
+ return False
+ return True
+
+ def find_library_file(self, dirs, lib, debug=False):
+ """Search the specified list of directories for a static or shared
+ library file 'lib' and return the full path to that file. If
+ 'debug' is true, look for a debugging version (if that makes sense on
+ the current platform). Return None if 'lib' wasn't found in any of
+ the specified directories.
+ """
+ raise NotImplementedError
+
+ # -- Filename generation methods -----------------------------------
+
+ # The default implementation of the filename generating methods are
+ # prejudiced towards the Unix/DOS/Windows view of the world:
+ # * object files are named by replacing the source file extension
+ # (eg. .c/.cpp -> .o/.obj)
+ # * library files (shared or static) are named by plugging the
+ # library name and extension into a format string, eg.
+ # "lib%s.%s" % (lib_name, ".a") for Unix static libraries
+ # * executables are named by appending an extension (possibly
+ # empty) to the program name: eg. progname + ".exe" for
+ # Windows
+ #
+ # To reduce redundant code, these methods expect to find
+ # several attributes in the current object (presumably defined
+ # as class attributes):
+ # * src_extensions -
+ # list of C/C++ source file extensions, eg. ['.c', '.cpp']
+ # * obj_extension -
+ # object file extension, eg. '.o' or '.obj'
+ # * static_lib_extension -
+ # extension for static library files, eg. '.a' or '.lib'
+ # * shared_lib_extension -
+ # extension for shared library/object files, eg. '.so', '.dll'
+ # * static_lib_format -
+ # format string for generating static library filenames,
+ # eg. 'lib%s.%s' or '%s.%s'
+ # * shared_lib_format
+ # format string for generating shared library filenames
+ # (probably same as static_lib_format, since the extension
+ # is one of the intended parameters to the format string)
+ # * exe_extension -
+ # extension for executable files, eg. '' or '.exe'
+
+ def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
+ if output_dir is None:
+ output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ base, ext = os.path.splitext(src_name)
+ base = os.path.splitdrive(base)[1] # Chop off the drive
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
+ if ext not in self.src_extensions:
+ raise UnknownFileError("unknown file type '%s' (from '%s')" %
+ (ext, src_name))
+ if strip_dir:
+ base = os.path.basename(base)
+ obj_names.append(os.path.join(output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+ def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
+ assert output_dir is not None
+ if strip_dir:
+ basename = os.path.basename(basename)
+ return os.path.join(output_dir, basename + self.shared_lib_extension)
+
+ def executable_filename(self, basename, strip_dir=False, output_dir=''):
+ assert output_dir is not None
+ if strip_dir:
+ basename = os.path.basename(basename)
+ return os.path.join(output_dir, basename + (self.exe_extension or ''))
+
+ def library_filename(self, libname, lib_type='static', # or 'shared'
+ strip_dir=False, output_dir=''):
+ assert output_dir is not None
+ if lib_type not in ("static", "shared", "dylib"):
+ raise ValueError(
+ "'lib_type' must be 'static', 'shared' or 'dylib'")
+ fmt = getattr(self, lib_type + "_lib_format")
+ ext = getattr(self, lib_type + "_lib_extension")
+
+ dir, base = os.path.split(libname)
+ filename = fmt % (base, ext)
+ if strip_dir:
+ dir = ''
+
+ return os.path.join(output_dir, dir, filename)
+
+
+ # -- Utility methods -----------------------------------------------
+
+ def execute(self, func, args, msg=None, level=1):
+ execute(func, args, msg, self.dry_run)
+
+ def spawn(self, cmd):
+ spawn(cmd, dry_run=self.dry_run)
+
+ def move_file(self, src, dst):
+ logger.info("moving %r to %r", src, dst)
+ if self.dry_run:
+ return
+ return move(src, dst)
+
+ def mkpath(self, name, mode=0o777):
+ name = os.path.normpath(name)
+ if os.path.isdir(name) or name == '':
+ return
+ if self.dry_run:
+ head = ''
+ for part in name.split(os.sep):
+ logger.info("created directory %s%s", head, part)
+ head += part + os.sep
+ return
+ os.makedirs(name, mode)
diff --git a/Lib/packaging/compiler/cygwinccompiler.py b/Lib/packaging/compiler/cygwinccompiler.py
new file mode 100644
index 0000000..9552667
--- /dev/null
+++ b/Lib/packaging/compiler/cygwinccompiler.py
@@ -0,0 +1,355 @@
+"""CCompiler implementations for Cygwin and mingw32 versions of GCC.
+
+This module contains the CygwinCCompiler class, a subclass of
+UnixCCompiler that handles the Cygwin port of the GNU C compiler to
+Windows, and the Mingw32CCompiler class which handles the mingw32 port
+of GCC (same as cygwin in no-cygwin mode).
+"""
+
+# problems:
+#
+# * if you use a msvc compiled python version (1.5.2)
+# 1. you have to insert a __GNUC__ section in its config.h
+# 2. you have to generate a import library for its dll
+# - create a def-file for python??.dll
+# - create a import library using
+# dlltool --dllname python15.dll --def python15.def \
+# --output-lib libpython15.a
+#
+# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+#
+# * We put export_symbols in a def-file, and don't use
+# --export-all-symbols because it doesn't worked reliable in some
+# tested configurations. And because other windows compilers also
+# need their symbols specified this no serious problem.
+#
+# tested configurations:
+#
+# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
+# (after patching python's config.h and for C++ some other include files)
+# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
+# (ld doesn't support -shared, so we use dllwrap)
+# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
+# - its dllwrap doesn't work, there is a bug in binutils 2.10.90
+# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
+# - using gcc -mdll instead dllwrap doesn't work without -static because
+# it tries to link against dlls instead their import libraries. (If
+# it finds the dll first.)
+# By specifying -static we force ld to link against the import libraries,
+# this is windows standard and there are normally not the necessary symbols
+# in the dlls.
+# *** only the version of June 2000 shows these problems
+# * cygwin gcc 3.2/ld 2.13.90 works
+# (ld supports -shared)
+# * mingw gcc 3.2/ld 2.13 works
+# (ld supports -shared)
+
+
+import os
+import sys
+
+from packaging import logger
+from packaging.compiler.unixccompiler import UnixCCompiler
+from packaging.util import write_file
+from packaging.errors import PackagingExecError, CompileError, UnknownFileError
+from packaging.util import get_compiler_versions
+import sysconfig
+
+# TODO use platform instead of sys.version
+# (platform does unholy sys.version parsing too, but at least it gives other
+# VMs a chance to override the returned values)
+
+
+def get_msvcr():
+ """Include the appropriate MSVC runtime library if Python was built
+ with MSVC 7.0 or later.
+ """
+ msc_pos = sys.version.find('MSC v.')
+ if msc_pos != -1:
+ msc_ver = sys.version[msc_pos+6:msc_pos+10]
+ if msc_ver == '1300':
+ # MSVC 7.0
+ return ['msvcr70']
+ elif msc_ver == '1310':
+ # MSVC 7.1
+ return ['msvcr71']
+ elif msc_ver == '1400':
+ # VS2005 / MSVC 8.0
+ return ['msvcr80']
+ elif msc_ver == '1500':
+ # VS2008 / MSVC 9.0
+ return ['msvcr90']
+ else:
+ raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+
+
+class CygwinCCompiler(UnixCCompiler):
+ """ Handles the Cygwin port of the GNU C compiler to Windows.
+ """
+ name = 'cygwin'
+ description = 'Cygwin port of GNU C Compiler for Win32'
+ obj_extension = ".o"
+ static_lib_extension = ".a"
+ shared_lib_extension = ".dll"
+ static_lib_format = "lib%s%s"
+ shared_lib_format = "%s%s"
+ exe_extension = ".exe"
+
+ def __init__(self, dry_run=False, force=False):
+ super(CygwinCCompiler, self).__init__(dry_run, force)
+
+ status, details = check_config_h()
+ logger.debug("Python's GCC status: %s (details: %s)", status, details)
+ if status is not CONFIG_H_OK:
+ self.warn(
+ "Python's pyconfig.h doesn't seem to support your compiler. "
+ "Reason: %s. "
+ "Compiling may fail because of undefined preprocessor macros."
+ % details)
+
+ self.gcc_version, self.ld_version, self.dllwrap_version = \
+ get_compiler_versions()
+ logger.debug(self.name + ": gcc %s, ld %s, dllwrap %s\n",
+ self.gcc_version,
+ self.ld_version,
+ self.dllwrap_version)
+
+ # ld_version >= "2.10.90" and < "2.13" should also be able to use
+ # gcc -mdll instead of dllwrap
+ # Older dllwraps had own version numbers, newer ones use the
+ # same as the rest of binutils ( also ld )
+ # dllwrap 2.10.90 is buggy
+ if self.ld_version >= "2.10.90":
+ self.linker_dll = "gcc"
+ else:
+ self.linker_dll = "dllwrap"
+
+ # ld_version >= "2.13" support -shared so use it instead of
+ # -mdll -static
+ if self.ld_version >= "2.13":
+ shared_option = "-shared"
+ else:
+ shared_option = "-mdll -static"
+
+ # Hard-code GCC because that's what this is all about.
+ # XXX optimization, warnings etc. should be customizable.
+ self.set_executables(compiler='gcc -mcygwin -O -Wall',
+ compiler_so='gcc -mcygwin -mdll -O -Wall',
+ compiler_cxx='g++ -mcygwin -O -Wall',
+ linker_exe='gcc -mcygwin',
+ linker_so=('%s -mcygwin %s' %
+ (self.linker_dll, shared_option)))
+
+ # cygwin and mingw32 need different sets of libraries
+ if self.gcc_version == "2.91.57":
+ # cygwin shouldn't need msvcrt, but without the dlls will crash
+ # (gcc version 2.91.57) -- perhaps something about initialization
+ self.dll_libraries=["msvcrt"]
+ self.warn(
+ "Consider upgrading to a newer version of gcc")
+ else:
+ # Include the appropriate MSVC runtime library if Python was built
+ # with MSVC 7.0 or later.
+ self.dll_libraries = get_msvcr()
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ """Compile the source by spawning GCC and windres if needed."""
+ if ext == '.rc' or ext == '.res':
+ # gcc needs '.res' and '.rc' compiled to object files !!!
+ try:
+ self.spawn(["windres", "-i", src, "-o", obj])
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+ else: # for other files use the C-compiler
+ try:
+ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+ extra_postargs)
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+
+ def link(self, target_desc, objects, output_filename, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=False, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+ """Link the objects."""
+ # use separate copies, so we can modify the lists
+ extra_preargs = list(extra_preargs or [])
+ libraries = list(libraries or [])
+ objects = list(objects or [])
+
+ # Additional libraries
+ libraries.extend(self.dll_libraries)
+
+ # handle export symbols by creating a def-file
+ # with executables this only works with gcc/ld as linker
+ if ((export_symbols is not None) and
+ (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+ # (The linker doesn't do anything if output is up-to-date.
+ # So it would probably better to check if we really need this,
+ # but for this we had to insert some unchanged parts of
+ # UnixCCompiler, and this is not what we want.)
+
+ # we want to put some files in the same directory as the
+ # object files are, build_temp doesn't help much
+ # where are the object files
+ temp_dir = os.path.dirname(objects[0])
+ # name of dll to give the helper files the same base name
+ dll_name, dll_extension = os.path.splitext(
+ os.path.basename(output_filename))
+
+ # generate the filenames for these files
+ def_file = os.path.join(temp_dir, dll_name + ".def")
+ lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
+
+ # Generate .def file
+ contents = [
+ "LIBRARY %s" % os.path.basename(output_filename),
+ "EXPORTS"]
+ for sym in export_symbols:
+ contents.append(sym)
+ self.execute(write_file, (def_file, contents),
+ "writing %s" % def_file)
+
+ # next add options for def-file and to creating import libraries
+
+ # dllwrap uses different options than gcc/ld
+ if self.linker_dll == "dllwrap":
+ extra_preargs.extend(("--output-lib", lib_file))
+ # for dllwrap we have to use a special option
+ extra_preargs.extend(("--def", def_file))
+ # we use gcc/ld here and can be sure ld is >= 2.9.10
+ else:
+ # doesn't work: bfd_close build\...\libfoo.a: Invalid operation
+ #extra_preargs.extend(("-Wl,--out-implib,%s" % lib_file))
+ # for gcc/ld the def-file is specified as any object files
+ objects.append(def_file)
+
+ #end: if ((export_symbols is not None) and
+ # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+ # who wants symbols and a many times larger output file
+ # should explicitly switch the debug mode on
+ # otherwise we let dllwrap/ld strip the output file
+ # (On my machine: 10KB < stripped_file < ??100KB
+ # unstripped_file = stripped_file + XXX KB
+ # ( XXX=254 for a typical python extension))
+ if not debug:
+ extra_preargs.append("-s")
+
+ super(CygwinCCompiler, self).link(
+ target_desc, objects, output_filename, output_dir, libraries,
+ library_dirs, runtime_library_dirs,
+ None, # export_symbols, we do this in our def-file
+ debug, extra_preargs, extra_postargs, build_temp, target_lang)
+
+ # -- Miscellaneous methods -----------------------------------------
+
+ def object_filenames(self, source_filenames, strip_dir=False,
+ output_dir=''):
+ """Adds supports for rc and res files."""
+ if output_dir is None:
+ output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+ base, ext = os.path.splitext(os.path.normcase(src_name))
+ if ext not in (self.src_extensions + ['.rc','.res']):
+ raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
+ if strip_dir:
+ base = os.path.basename(base)
+ if ext in ('.res', '.rc'):
+ # these need to be compiled to object files
+ obj_names.append(os.path.join(output_dir,
+ base + ext + self.obj_extension))
+ else:
+ obj_names.append(os.path.join(output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+# the same as cygwin plus some additional parameters
+class Mingw32CCompiler(CygwinCCompiler):
+ """ Handles the Mingw32 port of the GNU C compiler to Windows.
+ """
+ name = 'mingw32'
+ description = 'MinGW32 compiler'
+
+ def __init__(self, dry_run=False, force=False):
+ super(Mingw32CCompiler, self).__init__(dry_run, force)
+
+ # ld_version >= "2.13" support -shared so use it instead of
+ # -mdll -static
+ if self.ld_version >= "2.13":
+ shared_option = "-shared"
+ else:
+ shared_option = "-mdll -static"
+
+ # A real mingw32 doesn't need to specify a different entry point,
+ # but cygwin 2.91.57 in no-cygwin-mode needs it.
+ if self.gcc_version <= "2.91.57":
+ entry_point = '--entry _DllMain@12'
+ else:
+ entry_point = ''
+
+ self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
+ compiler_so='gcc -mno-cygwin -mdll -O -Wall',
+ compiler_cxx='g++ -mno-cygwin -O -Wall',
+ linker_exe='gcc -mno-cygwin',
+ linker_so='%s -mno-cygwin %s %s'
+ % (self.linker_dll, shared_option,
+ entry_point))
+ # Maybe we should also append -mthreads, but then the finished
+ # dlls need another dll (mingwm10.dll see Mingw32 docs)
+ # (-mthreads: Support thread-safe exception handling on `Mingw32')
+
+ # no additional libraries needed
+ self.dll_libraries=[]
+
+ # Include the appropriate MSVC runtime library if Python was built
+ # with MSVC 7.0 or later.
+ self.dll_libraries = get_msvcr()
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using a unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+ """Check if the current Python installation appears amenable to building
+ extensions with GCC.
+
+ Returns a tuple (status, details), where 'status' is one of the following
+ constants:
+
+ - CONFIG_H_OK: all is well, go ahead and compile
+ - CONFIG_H_NOTOK: doesn't look good
+ - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
+
+ 'details' is a human-readable string explaining the situation.
+
+ Note there are two ways to conclude "OK": either 'sys.version' contains
+ the string "GCC" (implying that this Python was built with GCC), or the
+ installed "pyconfig.h" contains the string "__GNUC__".
+ """
+
+ # XXX since this function also checks sys.version, it's not strictly a
+ # "pyconfig.h" check -- should probably be renamed...
+ # if sys.version contains GCC then python was compiled with GCC, and the
+ # pyconfig.h file should be OK
+ if "GCC" in sys.version:
+ return CONFIG_H_OK, "sys.version mentions 'GCC'"
+
+ # let's see if __GNUC__ is mentioned in python.h
+ fn = sysconfig.get_config_h_filename()
+ try:
+ with open(fn) as config_h:
+ if "__GNUC__" in config_h.read():
+ return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
+ else:
+ return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
+ except IOError as exc:
+ return (CONFIG_H_UNCERTAIN,
+ "couldn't read '%s': %s" % (fn, exc.strerror))
diff --git a/Lib/packaging/compiler/extension.py b/Lib/packaging/compiler/extension.py
new file mode 100644
index 0000000..66f6e9a
--- /dev/null
+++ b/Lib/packaging/compiler/extension.py
@@ -0,0 +1,121 @@
+"""Class representing C/C++ extension modules."""
+
+from packaging import logger
+
+# This class is really only used by the "build_ext" command, so it might
+# make sense to put it in distutils.command.build_ext. However, that
+# module is already big enough, and I want to make this class a bit more
+# complex to simplify some common cases ("foo" module in "foo.c") and do
+# better error-checking ("foo.c" actually exists).
+#
+# Also, putting this in build_ext.py means every setup script would have to
+# import that large-ish module (indirectly, through distutils.core) in
+# order to do anything.
+
+
+class Extension:
+ """Just a collection of attributes that describes an extension
+ module and everything needed to build it (hopefully in a portable
+ way, but there are hooks that let you be as unportable as you need).
+
+ Instance attributes:
+ name : string
+ the full name of the extension, including any packages -- ie.
+ *not* a filename or pathname, but Python dotted name
+ sources : [string]
+ list of source filenames, relative to the distribution root
+ (where the setup script lives), in Unix form (slash-separated)
+ for portability. Source files may be C, C++, SWIG (.i),
+ platform-specific resource files, or whatever else is recognized
+ by the "build_ext" command as source for a Python extension.
+ include_dirs : [string]
+ list of directories to search for C/C++ header files (in Unix
+ form for portability)
+ define_macros : [(name : string, value : string|None)]
+ list of macros to define; each macro is defined using a 2-tuple,
+ where 'value' is either the string to define it to or None to
+ define it without a particular value (equivalent of "#define
+ FOO" in source or -DFOO on Unix C compiler command line)
+ undef_macros : [string]
+ list of macros to undefine explicitly
+ library_dirs : [string]
+ list of directories to search for C/C++ libraries at link time
+ libraries : [string]
+ list of library names (not filenames or paths) to link against
+ runtime_library_dirs : [string]
+ list of directories to search for C/C++ libraries at run time
+ (for shared extensions, this is when the extension is loaded)
+ extra_objects : [string]
+ list of extra files to link with (eg. object files not implied
+ by 'sources', static library that must be explicitly specified,
+ binary resource files, etc.)
+ extra_compile_args : [string]
+ any extra platform- and compiler-specific information to use
+ when compiling the source files in 'sources'. For platforms and
+ compilers where "command line" makes sense, this is typically a
+ list of command-line arguments, but for other platforms it could
+ be anything.
+ extra_link_args : [string]
+ any extra platform- and compiler-specific information to use
+ when linking object files together to create the extension (or
+ to create a new static Python interpreter). Similar
+ interpretation as for 'extra_compile_args'.
+ export_symbols : [string]
+ list of symbols to be exported from a shared extension. Not
+ used on all platforms, and not generally necessary for Python
+ extensions, which typically export exactly one symbol: "init" +
+ extension_name.
+ swig_opts : [string]
+ any extra options to pass to SWIG if a source file has the .i
+ extension.
+ depends : [string]
+ list of files that the extension depends on
+ language : string
+ extension language (i.e. "c", "c++", "objc"). Will be detected
+ from the source extensions if not provided.
+ optional : boolean
+ specifies that a build failure in the extension should not abort the
+ build process, but simply not install the failing extension.
+ """
+
+ # **kwargs are allowed so that a warning is emitted instead of an
+ # exception
+ def __init__(self, name, sources, include_dirs=None, define_macros=None,
+ undef_macros=None, library_dirs=None, libraries=None,
+ runtime_library_dirs=None, extra_objects=None,
+ extra_compile_args=None, extra_link_args=None,
+ export_symbols=None, swig_opts=None, depends=None,
+ language=None, optional=None, **kw):
+ if not isinstance(name, str):
+ raise AssertionError("'name' must be a string")
+
+ if not isinstance(sources, list):
+ raise AssertionError("'sources' must be a list of strings")
+
+ for v in sources:
+ if not isinstance(v, str):
+ raise AssertionError("'sources' must be a list of strings")
+
+ self.name = name
+ self.sources = sources
+ self.include_dirs = include_dirs or []
+ self.define_macros = define_macros or []
+ self.undef_macros = undef_macros or []
+ self.library_dirs = library_dirs or []
+ self.libraries = libraries or []
+ self.runtime_library_dirs = runtime_library_dirs or []
+ self.extra_objects = extra_objects or []
+ self.extra_compile_args = extra_compile_args or []
+ self.extra_link_args = extra_link_args or []
+ self.export_symbols = export_symbols or []
+ self.swig_opts = swig_opts or []
+ self.depends = depends or []
+ self.language = language
+ self.optional = optional
+
+ # If there are unknown keyword options, warn about them
+ if len(kw) > 0:
+ options = [repr(option) for option in kw]
+ options = ', '.join(sorted(options))
+ logger.warning(
+ 'unknown arguments given to Extension: %s', options)
diff --git a/Lib/packaging/compiler/msvc9compiler.py b/Lib/packaging/compiler/msvc9compiler.py
new file mode 100644
index 0000000..029aa77
--- /dev/null
+++ b/Lib/packaging/compiler/msvc9compiler.py
@@ -0,0 +1,720 @@
+"""CCompiler implementation for the Microsoft Visual Studio 2008 compiler.
+
+The MSVCCompiler class is compatible with VS 2005 and VS 2008. Legacy
+support for older versions of VS are in the msvccompiler module.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+# finding DevStudio (through the registry)
+# ported to VS2005 and VS 2008 by Christian Heimes
+import os
+import subprocess
+import sys
+import re
+
+from packaging.errors import (PackagingExecError, PackagingPlatformError,
+ CompileError, LibError, LinkError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_lib_options
+from packaging import logger
+from packaging.util import get_platform
+
+import winreg
+
+RegOpenKeyEx = winreg.OpenKeyEx
+RegEnumKey = winreg.EnumKey
+RegEnumValue = winreg.EnumValue
+RegError = winreg.error
+
+HKEYS = (winreg.HKEY_USERS,
+ winreg.HKEY_CURRENT_USER,
+ winreg.HKEY_LOCAL_MACHINE,
+ winreg.HKEY_CLASSES_ROOT)
+
+VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
+WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
+NET_BASE = r"Software\Microsoft\.NETFramework"
+
+# A map keyed by get_platform() return values to values accepted by
+# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
+# the param to cross-compile on x86 targetting amd64.)
+PLAT_TO_VCVARS = {
+ 'win32' : 'x86',
+ 'win-amd64' : 'amd64',
+ 'win-ia64' : 'ia64',
+}
+
+
+class Reg:
+ """Helper class to read values from the registry
+ """
+
+ def get_value(cls, path, key):
+ for base in HKEYS:
+ d = cls.read_values(base, path)
+ if d and key in d:
+ return d[key]
+ raise KeyError(key)
+ get_value = classmethod(get_value)
+
+ def read_keys(cls, base, key):
+ """Return list of registry keys."""
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ L = []
+ i = 0
+ while True:
+ try:
+ k = RegEnumKey(handle, i)
+ except RegError:
+ break
+ L.append(k)
+ i += 1
+ return L
+ read_keys = classmethod(read_keys)
+
+ def read_values(cls, base, key):
+ """Return dict of registry keys and values.
+
+ All names are converted to lowercase.
+ """
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ d = {}
+ i = 0
+ while True:
+ try:
+ name, value, type = RegEnumValue(handle, i)
+ except RegError:
+ break
+ name = name.lower()
+ d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
+ i += 1
+ return d
+ read_values = classmethod(read_values)
+
+ def convert_mbcs(s):
+ dec = getattr(s, "decode", None)
+ if dec is not None:
+ try:
+ s = dec("mbcs")
+ except UnicodeError:
+ pass
+ return s
+ convert_mbcs = staticmethod(convert_mbcs)
+
+class MacroExpander:
+
+ def __init__(self, version):
+ self.macros = {}
+ self.vsbase = VS_BASE % version
+ self.load_macros(version)
+
+ def set_macro(self, macro, path, key):
+ self.macros["$(%s)" % macro] = Reg.get_value(path, key)
+
+ def load_macros(self, version):
+ self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
+ self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
+ self.set_macro("FrameworkDir", NET_BASE, "installroot")
+ try:
+ if version >= 8.0:
+ self.set_macro("FrameworkSDKDir", NET_BASE,
+ "sdkinstallrootv2.0")
+ else:
+ raise KeyError("sdkinstallrootv2.0")
+ except KeyError:
+ raise PackagingPlatformError(
+"""Python was built with Visual Studio 2008; extensions must be built with a
+compiler than can generate compatible binaries. Visual Studio 2008 was not
+found on this system. If you have Cygwin installed, you can try compiling
+with MingW32, by passing "-c mingw32" to pysetup.""")
+
+ if version >= 9.0:
+ self.set_macro("FrameworkVersion", self.vsbase, "clr version")
+ self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
+ else:
+ p = r"Software\Microsoft\NET Framework Setup\Product"
+ for base in HKEYS:
+ try:
+ h = RegOpenKeyEx(base, p)
+ except RegError:
+ continue
+ key = RegEnumKey(h, 0)
+ d = Reg.get_value(base, r"%s\%s" % (p, key))
+ self.macros["$(FrameworkVersion)"] = d["version"]
+
+ def sub(self, s):
+ for k, v in self.macros.items():
+ s = s.replace(k, v)
+ return s
+
+def get_build_version():
+ """Return the version of MSVC that was used to build Python.
+
+ For Python 2.3 and up, the version number is included in
+ sys.version. For earlier versions, assume the compiler is MSVC 6.
+ """
+ prefix = "MSC v."
+ i = sys.version.find(prefix)
+ if i == -1:
+ return 6
+ i = i + len(prefix)
+ s, rest = sys.version[i:].split(" ", 1)
+ majorVersion = int(s[:-2]) - 6
+ minorVersion = int(s[2:3]) / 10.0
+ # I don't think paths are affected by minor version in version 6
+ if majorVersion == 6:
+ minorVersion = 0
+ if majorVersion >= 6:
+ return majorVersion + minorVersion
+ # else we don't know what version of the compiler this is
+ return None
+
+def normalize_and_reduce_paths(paths):
+ """Return a list of normalized paths with duplicates removed.
+
+ The current order of paths is maintained.
+ """
+ # Paths are normalized so things like: /a and /a/ aren't both preserved.
+ reduced_paths = []
+ for p in paths:
+ np = os.path.normpath(p)
+ # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+ if np not in reduced_paths:
+ reduced_paths.append(np)
+ return reduced_paths
+
+def removeDuplicates(variable):
+ """Remove duplicate values of an environment variable.
+ """
+ oldList = variable.split(os.pathsep)
+ newList = []
+ for i in oldList:
+ if i not in newList:
+ newList.append(i)
+ newVariable = os.pathsep.join(newList)
+ return newVariable
+
+def find_vcvarsall(version):
+ """Find the vcvarsall.bat file
+
+ At first it tries to find the productdir of VS 2008 in the registry. If
+ that fails it falls back to the VS90COMNTOOLS env var.
+ """
+ vsbase = VS_BASE % version
+ try:
+ productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
+ "productdir")
+ except KeyError:
+ logger.debug("Unable to find productdir in registry")
+ productdir = None
+
+ if not productdir or not os.path.isdir(productdir):
+ toolskey = "VS%0.f0COMNTOOLS" % version
+ toolsdir = os.environ.get(toolskey, None)
+
+ if toolsdir and os.path.isdir(toolsdir):
+ productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
+ productdir = os.path.abspath(productdir)
+ if not os.path.isdir(productdir):
+ logger.debug("%s is not a valid directory", productdir)
+ return None
+ else:
+ logger.debug("env var %s is not set or invalid", toolskey)
+ if not productdir:
+ logger.debug("no productdir found")
+ return None
+ vcvarsall = os.path.join(productdir, "vcvarsall.bat")
+ if os.path.isfile(vcvarsall):
+ return vcvarsall
+ logger.debug("unable to find vcvarsall.bat")
+ return None
+
+def query_vcvarsall(version, arch="x86"):
+ """Launch vcvarsall.bat and read the settings from its environment
+ """
+ vcvarsall = find_vcvarsall(version)
+ interesting = set(("include", "lib", "libpath", "path"))
+ result = {}
+
+ if vcvarsall is None:
+ raise PackagingPlatformError("Unable to find vcvarsall.bat")
+ logger.debug("calling 'vcvarsall.bat %s' (version=%s)", arch, version)
+ popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
+ stdout, stderr = popen.communicate()
+ if popen.wait() != 0:
+ raise PackagingPlatformError(stderr.decode("mbcs"))
+
+ stdout = stdout.decode("mbcs")
+ for line in stdout.split("\n"):
+ line = Reg.convert_mbcs(line)
+ if '=' not in line:
+ continue
+ line = line.strip()
+ key, value = line.split('=', 1)
+ key = key.lower()
+ if key in interesting:
+ if value.endswith(os.pathsep):
+ value = value[:-1]
+ result[key] = removeDuplicates(value)
+
+ if len(result) != len(interesting):
+ raise ValueError(str(list(result)))
+
+ return result
+
+# More globals
+VERSION = get_build_version()
+if VERSION < 8.0:
+ raise PackagingPlatformError("VC %0.1f is not supported by this module" % VERSION)
+# MACROS = MacroExpander(VERSION)
+
+class MSVCCompiler(CCompiler) :
+ """Concrete class that implements an interface to Microsoft Visual C++,
+ as defined by the CCompiler abstract class."""
+
+ name = 'msvc'
+ description = 'Microsoft Visual C++'
+
+ # Just set this so CCompiler's constructor doesn't barf. We currently
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+ # as it really isn't necessary for this sort of single-compiler class.
+ # Would be nice to have a consistent interface with UnixCCompiler,
+ # though, so it's worth thinking about.
+ executables = {}
+
+ # Private class data (need to distinguish C from C++ source for compiler)
+ _c_extensions = ['.c']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
+ _rc_extensions = ['.rc']
+ _mc_extensions = ['.mc']
+
+ # Needed for the filename generation methods provided by the
+ # base class, CCompiler.
+ src_extensions = (_c_extensions + _cpp_extensions +
+ _rc_extensions + _mc_extensions)
+ res_extension = '.res'
+ obj_extension = '.obj'
+ static_lib_extension = '.lib'
+ shared_lib_extension = '.dll'
+ static_lib_format = shared_lib_format = '%s%s'
+ exe_extension = '.exe'
+
+ def __init__(self, dry_run=False, force=False):
+ super(MSVCCompiler, self).__init__(dry_run, force)
+ self.__version = VERSION
+ self.__root = r"Software\Microsoft\VisualStudio"
+ # self.__macros = MACROS
+ self.__paths = []
+ # target platform (.plat_name is consistent with 'bdist')
+ self.plat_name = None
+ self.__arch = None # deprecated name
+ self.initialized = False
+
+ def initialize(self, plat_name=None):
+ # multi-init means we would need to check platform same each time...
+ assert not self.initialized, "don't init multiple times"
+ if plat_name is None:
+ plat_name = get_platform()
+ # sanity check for platforms to prevent obscure errors later.
+ ok_plats = 'win32', 'win-amd64', 'win-ia64'
+ if plat_name not in ok_plats:
+ raise PackagingPlatformError("--plat-name must be one of %s" %
+ (ok_plats,))
+
+ if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
+ # Assume that the SDK set up everything alright; don't try to be
+ # smarter
+ self.cc = "cl.exe"
+ self.linker = "link.exe"
+ self.lib = "lib.exe"
+ self.rc = "rc.exe"
+ self.mc = "mc.exe"
+ else:
+ # On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
+ # to cross compile, you use 'x86_amd64'.
+ # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
+ # compile use 'x86' (ie, it runs the x86 compiler directly)
+ # No idea how itanium handles this, if at all.
+ if plat_name == get_platform() or plat_name == 'win32':
+ # native build or cross-compile to win32
+ plat_spec = PLAT_TO_VCVARS[plat_name]
+ else:
+ # cross compile from win32 -> some 64bit
+ plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
+ PLAT_TO_VCVARS[plat_name]
+
+ vc_env = query_vcvarsall(VERSION, plat_spec)
+
+ # take care to only use strings in the environment.
+ self.__paths = vc_env['path'].split(os.pathsep)
+ os.environ['lib'] = vc_env['lib']
+ os.environ['include'] = vc_env['include']
+
+ if len(self.__paths) == 0:
+ raise PackagingPlatformError("Python was built with %s, "
+ "and extensions need to be built with the same "
+ "version of the compiler, but it isn't installed."
+ % self.__product)
+
+ self.cc = self.find_exe("cl.exe")
+ self.linker = self.find_exe("link.exe")
+ self.lib = self.find_exe("lib.exe")
+ self.rc = self.find_exe("rc.exe") # resource compiler
+ self.mc = self.find_exe("mc.exe") # message compiler
+ #self.set_path_env_var('lib')
+ #self.set_path_env_var('include')
+
+ # extend the MSVC path with the current path
+ try:
+ for p in os.environ['path'].split(';'):
+ self.__paths.append(p)
+ except KeyError:
+ pass
+ self.__paths = normalize_and_reduce_paths(self.__paths)
+ os.environ['path'] = ";".join(self.__paths)
+
+ self.preprocess_options = None
+ if self.__arch == "x86":
+ self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
+ '/Z7', '/D_DEBUG']
+ else:
+ # Win64
+ self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+ '/Z7', '/D_DEBUG']
+
+ self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+ if self.__version >= 7:
+ self.ldflags_shared_debug = [
+ '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None'
+ ]
+ self.ldflags_static = [ '/nologo']
+
+ self.initialized = True
+
+ # -- Worker methods ------------------------------------------------
+
+ def object_filenames(self,
+ source_filenames,
+ strip_dir=False,
+ output_dir=''):
+ # Copied from ccompiler.py, extended to return .res as 'object'-file
+ # for .rc input file
+ if output_dir is None: output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ base, ext = os.path.splitext(src_name)
+ base = os.path.splitdrive(base)[1] # Chop off the drive
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
+ if ext not in self.src_extensions:
+ # Better to raise an exception instead of silently continuing
+ # and later complain about sources and targets having
+ # different lengths
+ raise CompileError("Don't know how to compile %s" % src_name)
+ if strip_dir:
+ base = os.path.basename(base)
+ if ext in self._rc_extensions:
+ obj_names.append(os.path.join(output_dir,
+ base + self.res_extension))
+ elif ext in self._mc_extensions:
+ obj_names.append(os.path.join(output_dir,
+ base + self.res_extension))
+ else:
+ obj_names.append(os.path.join(output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+
+ def compile(self, sources,
+ output_dir=None, macros=None, include_dirs=None, debug=False,
+ extra_preargs=None, extra_postargs=None, depends=None):
+
+ if not self.initialized:
+ self.initialize()
+ compile_info = self._setup_compile(output_dir, macros, include_dirs,
+ sources, depends, extra_postargs)
+ macros, objects, extra_postargs, pp_opts, build = compile_info
+
+ compile_opts = extra_preargs or []
+ compile_opts.append('/c')
+ if debug:
+ compile_opts.extend(self.compile_options_debug)
+ else:
+ compile_opts.extend(self.compile_options)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ if debug:
+ # pass the full pathname to MSVC in debug mode,
+ # this allows the debugger to find the source file
+ # without asking the user to browse for it
+ src = os.path.abspath(src)
+
+ if ext in self._c_extensions:
+ input_opt = "/Tc" + src
+ elif ext in self._cpp_extensions:
+ input_opt = "/Tp" + src
+ elif ext in self._rc_extensions:
+ # compile .RC to .RES file
+ input_opt = src
+ output_opt = "/fo" + obj
+ try:
+ self.spawn([self.rc] + pp_opts +
+ [output_opt] + [input_opt])
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+ continue
+ elif ext in self._mc_extensions:
+ # Compile .MC to .RC file to .RES file.
+ # * '-h dir' specifies the directory for the
+ # generated include file
+ # * '-r dir' specifies the target directory of the
+ # generated RC file and the binary message resource
+ # it includes
+ #
+ # For now (since there are no options to change this),
+ # we use the source-directory for the include file and
+ # the build directory for the RC file and message
+ # resources. This works at least for win32all.
+ h_dir = os.path.dirname(src)
+ rc_dir = os.path.dirname(obj)
+ try:
+ # first compile .MC to .RC and .H file
+ self.spawn([self.mc] +
+ ['-h', h_dir, '-r', rc_dir] + [src])
+ base, _ = os.path.splitext(os.path.basename(src))
+ rc_file = os.path.join(rc_dir, base + '.rc')
+ # then compile .RC to .RES file
+ self.spawn([self.rc] +
+ ["/fo" + obj] + [rc_file])
+
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+ continue
+ else:
+ # how to handle this file?
+ raise CompileError("Don't know how to compile %s to %s"
+ % (src, obj))
+
+ output_opt = "/Fo" + obj
+ try:
+ self.spawn([self.cc] + compile_opts + pp_opts +
+ [input_opt, output_opt] +
+ extra_postargs)
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+
+ return objects
+
+
+ def create_static_lib(self,
+ objects,
+ output_libname,
+ output_dir=None,
+ debug=False,
+ target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ output_filename = self.library_filename(output_libname,
+ output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ lib_args = objects + ['/OUT:' + output_filename]
+ if debug:
+ pass # XXX what goes here?
+ try:
+ self.spawn([self.lib] + lib_args)
+ except PackagingExecError as msg:
+ raise LibError(msg)
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+
+ def link(self, target_desc, objects, output_filename, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=False, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+ if not self.initialized:
+ self.initialize()
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ fixed_args = self._fix_lib_args(libraries, library_dirs,
+ runtime_library_dirs)
+ libraries, library_dirs, runtime_library_dirs = fixed_args
+
+ if runtime_library_dirs:
+ self.warn("don't know what to do with 'runtime_library_dirs': "
+ + str(runtime_library_dirs))
+
+ lib_opts = gen_lib_options(self,
+ library_dirs, runtime_library_dirs,
+ libraries)
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+ if target_desc == CCompiler.EXECUTABLE:
+ if debug:
+ ldflags = self.ldflags_shared_debug[1:]
+ else:
+ ldflags = self.ldflags_shared[1:]
+ else:
+ if debug:
+ ldflags = self.ldflags_shared_debug
+ else:
+ ldflags = self.ldflags_shared
+
+ export_opts = []
+ for sym in (export_symbols or []):
+ export_opts.append("/EXPORT:" + sym)
+
+ ld_args = (ldflags + lib_opts + export_opts +
+ objects + ['/OUT:' + output_filename])
+
+ # The MSVC linker generates .lib and .exp files, which cannot be
+ # suppressed by any linker switches. The .lib files may even be
+ # needed! Make sure they are generated in the temporary build
+ # directory. Since they have different names for debug and release
+ # builds, they can go into the same directory.
+ build_temp = os.path.dirname(objects[0])
+ if export_symbols is not None:
+ dll_name, dll_ext = os.path.splitext(
+ os.path.basename(output_filename))
+ implib_file = os.path.join(
+ build_temp,
+ self.library_filename(dll_name))
+ ld_args.append('/IMPLIB:' + implib_file)
+
+ # Embedded manifests are recommended - see MSDN article titled
+ # "How to: Embed a Manifest Inside a C/C++ Application"
+ # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
+ # Ask the linker to generate the manifest in the temp dir, so
+ # we can embed it later.
+ temp_manifest = os.path.join(
+ build_temp,
+ os.path.basename(output_filename) + ".manifest")
+ ld_args.append('/MANIFESTFILE:' + temp_manifest)
+
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+
+ self.mkpath(os.path.dirname(output_filename))
+ try:
+ self.spawn([self.linker] + ld_args)
+ except PackagingExecError as msg:
+ raise LinkError(msg)
+
+ # embed the manifest
+ # XXX - this is somewhat fragile - if mt.exe fails, distutils
+ # will still consider the DLL up-to-date, but it will not have a
+ # manifest. Maybe we should link to a temp file? OTOH, that
+ # implies a build environment error that shouldn't go undetected.
+ if target_desc == CCompiler.EXECUTABLE:
+ mfid = 1
+ else:
+ mfid = 2
+ self._remove_visual_c_ref(temp_manifest)
+ out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
+ try:
+ self.spawn(['mt.exe', '-nologo', '-manifest',
+ temp_manifest, out_arg])
+ except PackagingExecError as msg:
+ raise LinkError(msg)
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+ def _remove_visual_c_ref(self, manifest_file):
+ try:
+ # Remove references to the Visual C runtime, so they will
+ # fall through to the Visual C dependency of Python.exe.
+ # This way, when installed for a restricted user (e.g.
+ # runtimes are not in WinSxS folder, but in Python's own
+ # folder), the runtimes do not need to be in every folder
+ # with .pyd's.
+ with open(manifest_file) as manifest_f:
+ manifest_buf = manifest_f.read()
+ pattern = re.compile(
+ r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
+ r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
+ re.DOTALL)
+ manifest_buf = re.sub(pattern, "", manifest_buf)
+ pattern = "<dependentAssembly>\s*</dependentAssembly>"
+ manifest_buf = re.sub(pattern, "", manifest_buf)
+ with open(manifest_file, 'w') as manifest_f:
+ manifest_f.write(manifest_buf)
+ except IOError:
+ pass
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function, in
+ # ccompiler.py.
+
+ def library_dir_option(self, dir):
+ return "/LIBPATH:" + dir
+
+ def runtime_library_dir_option(self, dir):
+ raise PackagingPlatformError(
+ "don't know how to set runtime library search path for MSVC++")
+
+ def library_option(self, lib):
+ return self.library_filename(lib)
+
+
+ def find_library_file(self, dirs, lib, debug=False):
+ # Prefer a debugging library if found (and requested), but deal
+ # with it if we don't have one.
+ if debug:
+ try_names = [lib + "_d", lib]
+ else:
+ try_names = [lib]
+ for dir in dirs:
+ for name in try_names:
+ libfile = os.path.join(dir, self.library_filename(name))
+ if os.path.exists(libfile):
+ return libfile
+ else:
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
+
+ # Helper methods for using the MSVC registry settings
+
+ def find_exe(self, exe):
+ """Return path to an MSVC executable program.
+
+ Tries to find the program in several places: first, one of the
+ MSVC program search paths from the registry; next, the directories
+ in the PATH environment variable. If any of those work, return an
+ absolute path that is known to exist. If none of them work, just
+ return the original program name, 'exe'.
+ """
+ for p in self.__paths:
+ fn = os.path.join(os.path.abspath(p), exe)
+ if os.path.isfile(fn):
+ return fn
+
+ # didn't find it; try existing path
+ for p in os.environ['Path'].split(';'):
+ fn = os.path.join(os.path.abspath(p),exe)
+ if os.path.isfile(fn):
+ return fn
+
+ return exe
diff --git a/Lib/packaging/compiler/msvccompiler.py b/Lib/packaging/compiler/msvccompiler.py
new file mode 100644
index 0000000..39a10b2
--- /dev/null
+++ b/Lib/packaging/compiler/msvccompiler.py
@@ -0,0 +1,635 @@
+"""CCompiler implementation for old Microsoft Visual Studio compilers.
+
+For a compiler compatible with VS 2005 and 2008, use msvc9compiler.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+# finding DevStudio (through the registry)
+
+
+import sys
+import os
+
+from packaging.errors import (PackagingExecError, PackagingPlatformError,
+ CompileError, LibError, LinkError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_lib_options
+from packaging import logger
+
+_can_read_reg = False
+try:
+ import winreg
+
+ _can_read_reg = True
+ hkey_mod = winreg
+
+ RegOpenKeyEx = winreg.OpenKeyEx
+ RegEnumKey = winreg.EnumKey
+ RegEnumValue = winreg.EnumValue
+ RegError = winreg.error
+
+except ImportError:
+ try:
+ import win32api
+ import win32con
+ _can_read_reg = True
+ hkey_mod = win32con
+
+ RegOpenKeyEx = win32api.RegOpenKeyEx
+ RegEnumKey = win32api.RegEnumKey
+ RegEnumValue = win32api.RegEnumValue
+ RegError = win32api.error
+
+ except ImportError:
+ logger.warning(
+ "can't read registry to find the necessary compiler setting;\n"
+ "make sure that Python modules _winreg, win32api or win32con "
+ "are installed.")
+
+if _can_read_reg:
+ HKEYS = (hkey_mod.HKEY_USERS,
+ hkey_mod.HKEY_CURRENT_USER,
+ hkey_mod.HKEY_LOCAL_MACHINE,
+ hkey_mod.HKEY_CLASSES_ROOT)
+
+
+def read_keys(base, key):
+ """Return list of registry keys."""
+
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ L = []
+ i = 0
+ while True:
+ try:
+ k = RegEnumKey(handle, i)
+ except RegError:
+ break
+ L.append(k)
+ i = i + 1
+ return L
+
+
+def read_values(base, key):
+ """Return dict of registry keys and values.
+
+ All names are converted to lowercase.
+ """
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ d = {}
+ i = 0
+ while True:
+ try:
+ name, value, type = RegEnumValue(handle, i)
+ except RegError:
+ break
+ name = name.lower()
+ d[convert_mbcs(name)] = convert_mbcs(value)
+ i = i + 1
+ return d
+
+
+def convert_mbcs(s):
+ enc = getattr(s, "encode", None)
+ if enc is not None:
+ try:
+ s = enc("mbcs")
+ except UnicodeError:
+ pass
+ return s
+
+
+class MacroExpander:
+
+ def __init__(self, version):
+ self.macros = {}
+ self.load_macros(version)
+
+ def set_macro(self, macro, path, key):
+ for base in HKEYS:
+ d = read_values(base, path)
+ if d:
+ self.macros["$(%s)" % macro] = d[key]
+ break
+
+ def load_macros(self, version):
+ vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
+ self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
+ self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
+ net = r"Software\Microsoft\.NETFramework"
+ self.set_macro("FrameworkDir", net, "installroot")
+ try:
+ if version > 7.0:
+ self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
+ else:
+ self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
+ except KeyError:
+ raise PackagingPlatformError(
+"""Python was built with Visual Studio 2003; extensions must be built with
+a compiler than can generate compatible binaries. Visual Studio 2003 was
+not found on this system. If you have Cygwin installed, you can try
+compiling with MingW32, by passing "-c mingw32" to pysetup.""")
+
+ p = r"Software\Microsoft\NET Framework Setup\Product"
+ for base in HKEYS:
+ try:
+ h = RegOpenKeyEx(base, p)
+ except RegError:
+ continue
+ key = RegEnumKey(h, 0)
+ d = read_values(base, r"%s\%s" % (p, key))
+ self.macros["$(FrameworkVersion)"] = d["version"]
+
+ def sub(self, s):
+ for k, v in self.macros.items():
+ s = s.replace(k, v)
+ return s
+
+
+def get_build_version():
+ """Return the version of MSVC that was used to build Python.
+
+ For Python 2.3 and up, the version number is included in
+ sys.version. For earlier versions, assume the compiler is MSVC 6.
+ """
+
+ prefix = "MSC v."
+ i = sys.version.find(prefix)
+ if i == -1:
+ return 6
+ i = i + len(prefix)
+ s, rest = sys.version[i:].split(" ", 1)
+ majorVersion = int(s[:-2]) - 6
+ minorVersion = int(s[2:3]) / 10.0
+ # I don't think paths are affected by minor version in version 6
+ if majorVersion == 6:
+ minorVersion = 0
+ if majorVersion >= 6:
+ return majorVersion + minorVersion
+ # else we don't know what version of the compiler this is
+ return None
+
+
+def get_build_architecture():
+ """Return the processor architecture.
+
+ Possible results are "Intel", "Itanium", or "AMD64".
+ """
+
+ prefix = " bit ("
+ i = sys.version.find(prefix)
+ if i == -1:
+ return "Intel"
+ j = sys.version.find(")", i)
+ return sys.version[i+len(prefix):j]
+
+
+def normalize_and_reduce_paths(paths):
+ """Return a list of normalized paths with duplicates removed.
+
+ The current order of paths is maintained.
+ """
+ # Paths are normalized so things like: /a and /a/ aren't both preserved.
+ reduced_paths = []
+ for p in paths:
+ np = os.path.normpath(p)
+ # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+ if np not in reduced_paths:
+ reduced_paths.append(np)
+ return reduced_paths
+
+
+class MSVCCompiler(CCompiler):
+ """Concrete class that implements an interface to Microsoft Visual C++,
+ as defined by the CCompiler abstract class."""
+
+ name = 'msvc'
+ description = "Microsoft Visual C++"
+
+ # Just set this so CCompiler's constructor doesn't barf. We currently
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+ # as it really isn't necessary for this sort of single-compiler class.
+ # Would be nice to have a consistent interface with UnixCCompiler,
+ # though, so it's worth thinking about.
+ executables = {}
+
+ # Private class data (need to distinguish C from C++ source for compiler)
+ _c_extensions = ['.c']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
+ _rc_extensions = ['.rc']
+ _mc_extensions = ['.mc']
+
+ # Needed for the filename generation methods provided by the
+ # base class, CCompiler.
+ src_extensions = (_c_extensions + _cpp_extensions +
+ _rc_extensions + _mc_extensions)
+ res_extension = '.res'
+ obj_extension = '.obj'
+ static_lib_extension = '.lib'
+ shared_lib_extension = '.dll'
+ static_lib_format = shared_lib_format = '%s%s'
+ exe_extension = '.exe'
+
+ def __init__(self, dry_run=False, force=False):
+ super(MSVCCompiler, self).__init__(dry_run, force)
+ self.__version = get_build_version()
+ self.__arch = get_build_architecture()
+ if self.__arch == "Intel":
+ # x86
+ if self.__version >= 7:
+ self.__root = r"Software\Microsoft\VisualStudio"
+ self.__macros = MacroExpander(self.__version)
+ else:
+ self.__root = r"Software\Microsoft\Devstudio"
+ self.__product = "Visual Studio version %s" % self.__version
+ else:
+ # Win64. Assume this was built with the platform SDK
+ self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
+
+ self.initialized = False
+
+ def initialize(self):
+ self.__paths = []
+ if ("DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and
+ self.find_exe("cl.exe")):
+ # Assume that the SDK set up everything alright; don't try to be
+ # smarter
+ self.cc = "cl.exe"
+ self.linker = "link.exe"
+ self.lib = "lib.exe"
+ self.rc = "rc.exe"
+ self.mc = "mc.exe"
+ else:
+ self.__paths = self.get_msvc_paths("path")
+
+ if len(self.__paths) == 0:
+ raise PackagingPlatformError("Python was built with %s "
+ "and extensions need to be built with the same "
+ "version of the compiler, but it isn't installed." %
+ self.__product)
+
+ self.cc = self.find_exe("cl.exe")
+ self.linker = self.find_exe("link.exe")
+ self.lib = self.find_exe("lib.exe")
+ self.rc = self.find_exe("rc.exe") # resource compiler
+ self.mc = self.find_exe("mc.exe") # message compiler
+ self.set_path_env_var('lib')
+ self.set_path_env_var('include')
+
+ # extend the MSVC path with the current path
+ try:
+ for p in os.environ['path'].split(';'):
+ self.__paths.append(p)
+ except KeyError:
+ pass
+ self.__paths = normalize_and_reduce_paths(self.__paths)
+ os.environ['path'] = ';'.join(self.__paths)
+
+ self.preprocess_options = None
+ if self.__arch == "Intel":
+ self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GX',
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
+ '/Z7', '/D_DEBUG']
+ else:
+ # Win64
+ self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GS-',
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+ '/Z7', '/D_DEBUG']
+
+ self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+ if self.__version >= 7:
+ self.ldflags_shared_debug = [
+ '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
+ ]
+ else:
+ self.ldflags_shared_debug = [
+ '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
+ ]
+ self.ldflags_static = [ '/nologo']
+
+ self.initialized = True
+
+ # -- Worker methods ------------------------------------------------
+
+ def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
+ # Copied from ccompiler.py, extended to return .res as 'object'-file
+ # for .rc input file
+ if output_dir is None:
+ output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ base, ext = os.path.splitext(src_name)
+ base = os.path.splitdrive(base)[1] # Chop off the drive
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
+ if ext not in self.src_extensions:
+ # Better to raise an exception instead of silently continuing
+ # and later complain about sources and targets having
+ # different lengths
+ raise CompileError("Don't know how to compile %s" % src_name)
+ if strip_dir:
+ base = os.path.basename(base)
+ if ext in self._rc_extensions:
+ obj_names.append(os.path.join(output_dir,
+ base + self.res_extension))
+ elif ext in self._mc_extensions:
+ obj_names.append(os.path.join(output_dir,
+ base + self.res_extension))
+ else:
+ obj_names.append(os.path.join(output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+ def compile(self, sources,
+ output_dir=None, macros=None, include_dirs=None, debug=False,
+ extra_preargs=None, extra_postargs=None, depends=None):
+
+ if not self.initialized:
+ self.initialize()
+ macros, objects, extra_postargs, pp_opts, build = \
+ self._setup_compile(output_dir, macros, include_dirs, sources,
+ depends, extra_postargs)
+
+ compile_opts = extra_preargs or []
+ compile_opts.append('/c')
+ if debug:
+ compile_opts.extend(self.compile_options_debug)
+ else:
+ compile_opts.extend(self.compile_options)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ if debug:
+ # pass the full pathname to MSVC in debug mode,
+ # this allows the debugger to find the source file
+ # without asking the user to browse for it
+ src = os.path.abspath(src)
+
+ if ext in self._c_extensions:
+ input_opt = "/Tc" + src
+ elif ext in self._cpp_extensions:
+ input_opt = "/Tp" + src
+ elif ext in self._rc_extensions:
+ # compile .RC to .RES file
+ input_opt = src
+ output_opt = "/fo" + obj
+ try:
+ self.spawn([self.rc] + pp_opts +
+ [output_opt] + [input_opt])
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+ continue
+ elif ext in self._mc_extensions:
+
+ # Compile .MC to .RC file to .RES file.
+ # * '-h dir' specifies the directory for the
+ # generated include file
+ # * '-r dir' specifies the target directory of the
+ # generated RC file and the binary message resource
+ # it includes
+ #
+ # For now (since there are no options to change this),
+ # we use the source-directory for the include file and
+ # the build directory for the RC file and message
+ # resources. This works at least for win32all.
+
+ h_dir = os.path.dirname(src)
+ rc_dir = os.path.dirname(obj)
+ try:
+ # first compile .MC to .RC and .H file
+ self.spawn([self.mc] +
+ ['-h', h_dir, '-r', rc_dir] + [src])
+ base, _ = os.path.splitext(os.path.basename(src))
+ rc_file = os.path.join(rc_dir, base + '.rc')
+ # then compile .RC to .RES file
+ self.spawn([self.rc] +
+ ["/fo" + obj] + [rc_file])
+
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+ continue
+ else:
+ # how to handle this file?
+ raise CompileError(
+ "Don't know how to compile %s to %s" %
+ (src, obj))
+
+ output_opt = "/Fo" + obj
+ try:
+ self.spawn([self.cc] + compile_opts + pp_opts +
+ [input_opt, output_opt] +
+ extra_postargs)
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+
+ return objects
+
+ def create_static_lib(self, objects, output_libname, output_dir=None,
+ debug=False, target_lang=None):
+ if not self.initialized:
+ self.initialize()
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ output_filename = \
+ self.library_filename(output_libname, output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ lib_args = objects + ['/OUT:' + output_filename]
+ if debug:
+ pass # XXX what goes here?
+ try:
+ self.spawn([self.lib] + lib_args)
+ except PackagingExecError as msg:
+ raise LibError(msg)
+
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+ def link(self, target_desc, objects, output_filename, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=False, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ libraries, library_dirs, runtime_library_dirs = \
+ self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+ if runtime_library_dirs:
+ self.warn("don't know what to do with 'runtime_library_dirs': %s"
+ % (runtime_library_dirs,))
+
+ lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+ libraries)
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+
+ if target_desc == CCompiler.EXECUTABLE:
+ if debug:
+ ldflags = self.ldflags_shared_debug[1:]
+ else:
+ ldflags = self.ldflags_shared[1:]
+ else:
+ if debug:
+ ldflags = self.ldflags_shared_debug
+ else:
+ ldflags = self.ldflags_shared
+
+ export_opts = []
+ for sym in (export_symbols or []):
+ export_opts.append("/EXPORT:" + sym)
+
+ ld_args = (ldflags + lib_opts + export_opts +
+ objects + ['/OUT:' + output_filename])
+
+ # The MSVC linker generates .lib and .exp files, which cannot be
+ # suppressed by any linker switches. The .lib files may even be
+ # needed! Make sure they are generated in the temporary build
+ # directory. Since they have different names for debug and release
+ # builds, they can go into the same directory.
+ if export_symbols is not None:
+ dll_name, dll_ext = os.path.splitext(
+ os.path.basename(output_filename))
+ implib_file = os.path.join(
+ os.path.dirname(objects[0]),
+ self.library_filename(dll_name))
+ ld_args.append('/IMPLIB:' + implib_file)
+
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+
+ self.mkpath(os.path.dirname(output_filename))
+ try:
+ self.spawn([self.linker] + ld_args)
+ except PackagingExecError as msg:
+ raise LinkError(msg)
+
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function, in
+ # ccompiler.py.
+
+ def library_dir_option(self, dir):
+ return "/LIBPATH:" + dir
+
+ def runtime_library_dir_option(self, dir):
+ raise PackagingPlatformError("don't know how to set runtime library search path for MSVC++")
+
+ def library_option(self, lib):
+ return self.library_filename(lib)
+
+ def find_library_file(self, dirs, lib, debug=False):
+ # Prefer a debugging library if found (and requested), but deal
+ # with it if we don't have one.
+ if debug:
+ try_names = [lib + "_d", lib]
+ else:
+ try_names = [lib]
+ for dir in dirs:
+ for name in try_names:
+ libfile = os.path.join(dir, self.library_filename(name))
+ if os.path.exists(libfile):
+ return libfile
+ else:
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
+
+ # Helper methods for using the MSVC registry settings
+
+ def find_exe(self, exe):
+ """Return path to an MSVC executable program.
+
+ Tries to find the program in several places: first, one of the
+ MSVC program search paths from the registry; next, the directories
+ in the PATH environment variable. If any of those work, return an
+ absolute path that is known to exist. If none of them work, just
+ return the original program name, 'exe'.
+ """
+
+ for p in self.__paths:
+ fn = os.path.join(os.path.abspath(p), exe)
+ if os.path.isfile(fn):
+ return fn
+
+ # didn't find it; try existing path
+ for p in os.environ['Path'].split(';'):
+ fn = os.path.join(os.path.abspath(p), exe)
+ if os.path.isfile(fn):
+ return fn
+
+ return exe
+
+ def get_msvc_paths(self, path, platform='x86'):
+ """Get a list of devstudio directories (include, lib or path).
+
+ Return a list of strings. The list will be empty if unable to
+ access the registry or appropriate registry keys not found.
+ """
+
+ if not _can_read_reg:
+ return []
+
+ path = path + " dirs"
+ if self.__version >= 7:
+ key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
+ % (self.__root, self.__version))
+ else:
+ key = (r"%s\6.0\Build System\Components\Platforms"
+ r"\Win32 (%s)\Directories" % (self.__root, platform))
+
+ for base in HKEYS:
+ d = read_values(base, key)
+ if d:
+ if self.__version >= 7:
+ return self.__macros.sub(d[path]).split(";")
+ else:
+ return d[path].split(";")
+ # MSVC 6 seems to create the registry entries we need only when
+ # the GUI is run.
+ if self.__version == 6:
+ for base in HKEYS:
+ if read_values(base, r"%s\6.0" % self.__root) is not None:
+ self.warn("It seems you have Visual Studio 6 installed, "
+ "but the expected registry settings are not present.\n"
+ "You must at least run the Visual Studio GUI once "
+ "so that these entries are created.")
+ break
+ return []
+
+ def set_path_env_var(self, name):
+ """Set environment variable 'name' to an MSVC path type value.
+
+ This is equivalent to a SET command prior to execution of spawned
+ commands.
+ """
+
+ if name == "lib":
+ p = self.get_msvc_paths("library")
+ else:
+ p = self.get_msvc_paths(name)
+ if p:
+ os.environ[name] = ';'.join(p)
+
+
+if get_build_version() >= 8.0:
+ logger.debug("importing new compiler from distutils.msvc9compiler")
+ OldMSVCCompiler = MSVCCompiler
+ from packaging.compiler.msvc9compiler import MSVCCompiler
+ # get_build_architecture not really relevant now we support cross-compile
+ from packaging.compiler.msvc9compiler import MacroExpander
diff --git a/Lib/packaging/compiler/unixccompiler.py b/Lib/packaging/compiler/unixccompiler.py
new file mode 100644
index 0000000..3458faa
--- /dev/null
+++ b/Lib/packaging/compiler/unixccompiler.py
@@ -0,0 +1,339 @@
+"""CCompiler implementation for Unix compilers.
+
+This module contains the UnixCCompiler class, a subclass of CCompiler
+that handles the "typical" Unix-style command-line C compiler:
+ * macros defined with -Dname[=value]
+ * macros undefined with -Uname
+ * include search directories specified with -Idir
+ * libraries specified with -lllib
+ * library search directories specified with -Ldir
+ * compile handled by 'cc' (or similar) executable with -c option:
+ compiles .c to .o
+ * link static library handled by 'ar' command (possibly with 'ranlib')
+ * link shared library handled by 'cc -shared'
+"""
+
+import os, sys
+
+from packaging.util import newer
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_preprocess_options, gen_lib_options
+from packaging.errors import (PackagingExecError, CompileError,
+ LibError, LinkError)
+from packaging import logger
+import sysconfig
+
+
+# XXX Things not currently handled:
+# * optimization/debug/warning flags; we just use whatever's in Python's
+# Makefile and live with it. Is this adequate? If not, we might
+# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
+# SunCCompiler, and I suspect down that road lies madness.
+# * even if we don't know a warning flag from an optimization flag,
+# we need some way for outsiders to feed preprocessor/compiler/linker
+# flags in to us -- eg. a sysadmin might want to mandate certain flags
+# via a site config file, or a user might want to set something for
+# compiling this module distribution only via the pysetup command
+# line, whatever. As long as these options come from something on the
+# current system, they can be as system-dependent as they like, and we
+# should just happily stuff them into the preprocessor/compiler/linker
+# options and carry on.
+
+def _darwin_compiler_fixup(compiler_so, cc_args):
+ """
+ This function will strip '-isysroot PATH' and '-arch ARCH' from the
+ compile flags if the user has specified one them in extra_compile_flags.
+
+ This is needed because '-arch ARCH' adds another architecture to the
+ build, without a way to remove an architecture. Furthermore GCC will
+ barf if multiple '-isysroot' arguments are present.
+ """
+ stripArch = stripSysroot = False
+
+ compiler_so = list(compiler_so)
+ kernel_version = os.uname()[2] # 8.4.3
+ major_version = int(kernel_version.split('.')[0])
+
+ if major_version < 8:
+ # OSX before 10.4.0, these don't support -arch and -isysroot at
+ # all.
+ stripArch = stripSysroot = True
+ else:
+ stripArch = '-arch' in cc_args
+ stripSysroot = '-isysroot' in cc_args
+
+ if stripArch or 'ARCHFLAGS' in os.environ:
+ while True:
+ try:
+ index = compiler_so.index('-arch')
+ # Strip this argument and the next one:
+ del compiler_so[index:index+2]
+ except ValueError:
+ break
+
+ if 'ARCHFLAGS' in os.environ and not stripArch:
+ # User specified different -arch flags in the environ,
+ # see also the sysconfig
+ compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
+
+ if stripSysroot:
+ try:
+ index = compiler_so.index('-isysroot')
+ # Strip this argument and the next one:
+ del compiler_so[index:index+2]
+ except ValueError:
+ pass
+
+ # Check if the SDK that is used during compilation actually exists,
+ # the universal build requires the usage of a universal SDK and not all
+ # users have that installed by default.
+ sysroot = None
+ if '-isysroot' in cc_args:
+ idx = cc_args.index('-isysroot')
+ sysroot = cc_args[idx+1]
+ elif '-isysroot' in compiler_so:
+ idx = compiler_so.index('-isysroot')
+ sysroot = compiler_so[idx+1]
+
+ if sysroot and not os.path.isdir(sysroot):
+ logger.warning(
+ "compiling with an SDK that doesn't seem to exist: %r;\n"
+ "please check your Xcode installation", sysroot)
+
+ return compiler_so
+
+class UnixCCompiler(CCompiler):
+
+ name = 'unix'
+ description = 'Standard UNIX-style compiler'
+
+ # These are used by CCompiler in two places: the constructor sets
+ # instance attributes 'preprocessor', 'compiler', etc. from them, and
+ # 'set_executable()' allows any of these to be set. The defaults here
+ # are pretty generic; they will probably have to be set by an outsider
+ # (eg. using information discovered by the sysconfig about building
+ # Python extensions).
+ executables = {'preprocessor' : None,
+ 'compiler' : ["cc"],
+ 'compiler_so' : ["cc"],
+ 'compiler_cxx' : ["cc"],
+ 'linker_so' : ["cc", "-shared"],
+ 'linker_exe' : ["cc"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : None,
+ }
+
+ if sys.platform[:6] == "darwin":
+ executables['ranlib'] = ["ranlib"]
+
+ # Needed for the filename generation methods provided by the base
+ # class, CCompiler. XXX whoever instantiates/uses a particular
+ # UnixCCompiler instance should set 'shared_lib_ext' -- we set a
+ # reasonable common default here, but it's not necessarily used on all
+ # Unices!
+
+ src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
+ obj_extension = ".o"
+ static_lib_extension = ".a"
+ shared_lib_extension = ".so"
+ dylib_lib_extension = ".dylib"
+ static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
+ if sys.platform == "cygwin":
+ exe_extension = ".exe"
+
+ def preprocess(self, source,
+ output_file=None, macros=None, include_dirs=None,
+ extra_preargs=None, extra_postargs=None):
+ ignore, macros, include_dirs = \
+ self._fix_compile_args(None, macros, include_dirs)
+ pp_opts = gen_preprocess_options(macros, include_dirs)
+ pp_args = self.preprocessor + pp_opts
+ if output_file:
+ pp_args.extend(('-o', output_file))
+ if extra_preargs:
+ pp_args[:0] = extra_preargs
+ if extra_postargs:
+ pp_args.extend(extra_postargs)
+ pp_args.append(source)
+
+ # We need to preprocess: either we're being forced to, or we're
+ # generating output to stdout, or there's a target output file and
+ # the source file is newer than the target (or the target doesn't
+ # exist).
+ if self.force or output_file is None or newer(source, output_file):
+ if output_file:
+ self.mkpath(os.path.dirname(output_file))
+ try:
+ self.spawn(pp_args)
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ compiler_so = self.compiler_so
+ if sys.platform == 'darwin':
+ compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
+ try:
+ self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+ extra_postargs)
+ except PackagingExecError as msg:
+ raise CompileError(msg)
+
+ def create_static_lib(self, objects, output_libname,
+ output_dir=None, debug=False, target_lang=None):
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+
+ output_filename = \
+ self.library_filename(output_libname, output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ self.mkpath(os.path.dirname(output_filename))
+ self.spawn(self.archiver +
+ [output_filename] +
+ objects + self.objects)
+
+ # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+ # think the only major Unix that does. Maybe we need some
+ # platform intelligence here to skip ranlib if it's not
+ # needed -- or maybe Python's configure script took care of
+ # it for us, hence the check for leading colon.
+ if self.ranlib:
+ try:
+ self.spawn(self.ranlib + [output_filename])
+ except PackagingExecError as msg:
+ raise LibError(msg)
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+ def link(self, target_desc, objects,
+ output_filename, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=False, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ libraries, library_dirs, runtime_library_dirs = \
+ self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+ lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+ libraries)
+ if type(output_dir) not in (str, type(None)):
+ raise TypeError("'output_dir' must be a string or None")
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+ ld_args = (objects + self.objects +
+ lib_opts + ['-o', output_filename])
+ if debug:
+ ld_args[:0] = ['-g']
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+ self.mkpath(os.path.dirname(output_filename))
+ try:
+ if target_desc == CCompiler.EXECUTABLE:
+ linker = self.linker_exe[:]
+ else:
+ linker = self.linker_so[:]
+ if target_lang == "c++" and self.compiler_cxx:
+ # skip over environment variable settings if /usr/bin/env
+ # is used to set up the linker's environment.
+ # This is needed on OSX. Note: this assumes that the
+ # normal and C++ compiler have the same environment
+ # settings.
+ i = 0
+ if os.path.basename(linker[0]) == "env":
+ i = 1
+ while '=' in linker[i]:
+ i = i + 1
+
+ linker[i] = self.compiler_cxx[i]
+
+ if sys.platform == 'darwin':
+ linker = _darwin_compiler_fixup(linker, ld_args)
+
+ self.spawn(linker + ld_args)
+ except PackagingExecError as msg:
+ raise LinkError(msg)
+ else:
+ logger.debug("skipping %s (up-to-date)", output_filename)
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function, in
+ # ccompiler.py.
+
+ def library_dir_option(self, dir):
+ return "-L" + dir
+
+ def _is_gcc(self, compiler_name):
+ return "gcc" in compiler_name or "g++" in compiler_name
+
+ def runtime_library_dir_option(self, dir):
+ # XXX Hackish, at the very least. See Python bug #445902:
+ # http://sourceforge.net/tracker/index.php
+ # ?func=detail&aid=445902&group_id=5470&atid=105470
+ # Linkers on different platforms need different options to
+ # specify that directories need to be added to the list of
+ # directories searched for dependencies when a dynamic library
+ # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to
+ # be told to pass the -R option through to the linker, whereas
+ # other compilers and gcc on other systems just know this.
+ # Other compilers may need something slightly different. At
+ # this time, there's no way to determine this information from
+ # the configuration data stored in the Python installation, so
+ # we use this hack.
+
+ compiler = os.path.basename(sysconfig.get_config_var("CC"))
+ if sys.platform[:6] == "darwin":
+ # MacOSX's linker doesn't understand the -R flag at all
+ return "-L" + dir
+ elif sys.platform[:5] == "hp-ux":
+ if self._is_gcc(compiler):
+ return ["-Wl,+s", "-L" + dir]
+ return ["+s", "-L" + dir]
+ elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
+ return ["-rpath", dir]
+ elif self._is_gcc(compiler):
+ # gcc on non-GNU systems does not need -Wl, but can
+ # use it anyway. Since distutils has always passed in
+ # -Wl whenever gcc was used in the past it is probably
+ # safest to keep doing so.
+ if sysconfig.get_config_var("GNULD") == "yes":
+ # GNU ld needs an extra option to get a RUNPATH
+ # instead of just an RPATH.
+ return "-Wl,--enable-new-dtags,-R" + dir
+ else:
+ return "-Wl,-R" + dir
+ elif sys.platform[:3] == "aix":
+ return "-blibpath:" + dir
+ else:
+ # No idea how --enable-new-dtags would be passed on to
+ # ld if this system was using GNU ld. Don't know if a
+ # system like this even exists.
+ return "-R" + dir
+
+ def library_option(self, lib):
+ return "-l" + lib
+
+ def find_library_file(self, dirs, lib, debug=False):
+ shared_f = self.library_filename(lib, lib_type='shared')
+ dylib_f = self.library_filename(lib, lib_type='dylib')
+ static_f = self.library_filename(lib, lib_type='static')
+
+ for dir in dirs:
+ shared = os.path.join(dir, shared_f)
+ dylib = os.path.join(dir, dylib_f)
+ static = os.path.join(dir, static_f)
+ # We're second-guessing the linker here, with not much hard
+ # data to go on: GCC seems to prefer the shared library, so I'm
+ # assuming that *all* Unix C compilers do. And of course I'm
+ # ignoring even GCC's "-static" option. So sue me.
+ if os.path.exists(dylib):
+ return dylib
+ elif os.path.exists(shared):
+ return shared
+ elif os.path.exists(static):
+ return static
+
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
diff --git a/Lib/packaging/config.py b/Lib/packaging/config.py
new file mode 100644
index 0000000..ab026a8
--- /dev/null
+++ b/Lib/packaging/config.py
@@ -0,0 +1,391 @@
+"""Utilities to find and read config files used by packaging."""
+
+import os
+import sys
+import logging
+
+from shlex import split
+from configparser import RawConfigParser
+from packaging import logger
+from packaging.errors import PackagingOptionError
+from packaging.compiler.extension import Extension
+from packaging.util import (check_environ, iglob, resolve_name, strtobool,
+ split_multiline)
+from packaging.compiler import set_compiler
+from packaging.command import set_command
+from packaging.markers import interpret
+
+
+def _check_name(name, packages):
+ if '.' not in name:
+ return
+ parts = name.split('.')
+ parent = '.'.join(parts[:-1])
+ if parent not in packages:
+ # we could log a warning instead of raising, but what's the use
+ # of letting people build modules they can't import?
+ raise PackagingOptionError(
+ 'parent package for extension %r not found' % name)
+
+
+def _pop_values(values_dct, key):
+ """Remove values from the dictionary and convert them as a list"""
+ vals_str = values_dct.pop(key, '')
+ if not vals_str:
+ return
+ fields = []
+ # the line separator is \n for setup.cfg files
+ for field in vals_str.split('\n'):
+ tmp_vals = field.split('--')
+ if len(tmp_vals) == 2 and not interpret(tmp_vals[1]):
+ continue
+ fields.append(tmp_vals[0])
+ # Get bash options like `gcc -print-file-name=libgcc.a` XXX bash options?
+ vals = split(' '.join(fields))
+ if vals:
+ return vals
+
+
+def _rel_path(base, path):
+ # normalizes and returns a lstripped-/-separated path
+ base = base.replace(os.path.sep, '/')
+ path = path.replace(os.path.sep, '/')
+ assert path.startswith(base)
+ return path[len(base):].lstrip('/')
+
+
+def get_resources_dests(resources_root, rules):
+ """Find destinations for resources files"""
+ destinations = {}
+ for base, suffix, dest in rules:
+ prefix = os.path.join(resources_root, base)
+ for abs_base in iglob(prefix):
+ abs_glob = os.path.join(abs_base, suffix)
+ for abs_path in iglob(abs_glob):
+ resource_file = _rel_path(resources_root, abs_path)
+ if dest is None: # remove the entry if it was here
+ destinations.pop(resource_file, None)
+ else:
+ rel_path = _rel_path(abs_base, abs_path)
+ rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
+ destinations[resource_file] = rel_dest + '/' + rel_path
+ return destinations
+
+
+class Config:
+ """Class used to work with configuration files"""
+ def __init__(self, dist):
+ self.dist = dist
+ self.setup_hooks = []
+
+ def run_hooks(self, config):
+ """Run setup hooks in the order defined in the spec."""
+ for hook in self.setup_hooks:
+ hook(config)
+
+ def find_config_files(self):
+ """Find as many configuration files as should be processed for this
+ platform, and return a list of filenames in the order in which they
+ should be parsed. The filenames returned are guaranteed to exist
+ (modulo nasty race conditions).
+
+ There are three possible config files: packaging.cfg in the
+ Packaging installation directory (ie. where the top-level
+ Packaging __inst__.py file lives), a file in the user's home
+ directory named .pydistutils.cfg on Unix and pydistutils.cfg
+ on Windows/Mac; and setup.cfg in the current directory.
+
+ The file in the user's home directory can be disabled with the
+ --no-user-cfg option.
+ """
+ files = []
+ check_environ()
+
+ # Where to look for the system-wide Packaging config file
+ sys_dir = os.path.dirname(sys.modules['packaging'].__file__)
+
+ # Look for the system config file
+ sys_file = os.path.join(sys_dir, "packaging.cfg")
+ if os.path.isfile(sys_file):
+ files.append(sys_file)
+
+ # What to call the per-user config file
+ if os.name == 'posix':
+ user_filename = ".pydistutils.cfg"
+ else:
+ user_filename = "pydistutils.cfg"
+
+ # And look for the user config file
+ if self.dist.want_user_cfg:
+ user_file = os.path.join(os.path.expanduser('~'), user_filename)
+ if os.path.isfile(user_file):
+ files.append(user_file)
+
+ # All platforms support local setup.cfg
+ local_file = "setup.cfg"
+ if os.path.isfile(local_file):
+ files.append(local_file)
+
+ if logger.isEnabledFor(logging.DEBUG):
+ logger.debug("using config files: %s", ', '.join(files))
+ return files
+
+ def _convert_metadata(self, name, value):
+ # converts a value found in setup.cfg into a valid metadata
+ # XXX
+ return value
+
+ def _read_setup_cfg(self, parser, cfg_filename):
+ cfg_directory = os.path.dirname(os.path.abspath(cfg_filename))
+ content = {}
+ for section in parser.sections():
+ content[section] = dict(parser.items(section))
+
+ # global setup hooks are called first
+ if 'global' in content:
+ if 'setup_hooks' in content['global']:
+ setup_hooks = split_multiline(content['global']['setup_hooks'])
+
+ # add project directory to sys.path, to allow hooks to be
+ # distributed with the project
+ sys.path.insert(0, cfg_directory)
+ try:
+ for line in setup_hooks:
+ try:
+ hook = resolve_name(line)
+ except ImportError as e:
+ logger.warning('cannot find setup hook: %s',
+ e.args[0])
+ else:
+ self.setup_hooks.append(hook)
+ self.run_hooks(content)
+ finally:
+ sys.path.pop(0)
+
+ metadata = self.dist.metadata
+
+ # setting the metadata values
+ if 'metadata' in content:
+ for key, value in content['metadata'].items():
+ key = key.replace('_', '-')
+ if metadata.is_multi_field(key):
+ value = split_multiline(value)
+
+ if key == 'project-url':
+ value = [(label.strip(), url.strip())
+ for label, url in
+ [v.split(',') for v in value]]
+
+ if key == 'description-file':
+ if 'description' in content['metadata']:
+ msg = ("description and description-file' are "
+ "mutually exclusive")
+ raise PackagingOptionError(msg)
+
+ filenames = value.split()
+
+ # concatenate all files
+ value = []
+ for filename in filenames:
+ # will raise if file not found
+ with open(filename) as description_file:
+ value.append(description_file.read().strip())
+ # add filename as a required file
+ if filename not in metadata.requires_files:
+ metadata.requires_files.append(filename)
+ value = '\n'.join(value).strip()
+ key = 'description'
+
+ if metadata.is_metadata_field(key):
+ metadata[key] = self._convert_metadata(key, value)
+
+ if 'files' in content:
+ files = content['files']
+ self.dist.package_dir = files.pop('packages_root', None)
+
+ files = dict((key, split_multiline(value)) for key, value in
+ files.items())
+
+ self.dist.packages = []
+
+ packages = files.get('packages', [])
+ if isinstance(packages, str):
+ packages = [packages]
+
+ for package in packages:
+ if ':' in package:
+ dir_, package = package.split(':')
+ self.dist.package_dir[package] = dir_
+ self.dist.packages.append(package)
+
+ self.dist.py_modules = files.get('modules', [])
+ if isinstance(self.dist.py_modules, str):
+ self.dist.py_modules = [self.dist.py_modules]
+ self.dist.scripts = files.get('scripts', [])
+ if isinstance(self.dist.scripts, str):
+ self.dist.scripts = [self.dist.scripts]
+
+ self.dist.package_data = {}
+ # bookkeeping for the loop below
+ firstline = True
+ prev = None
+
+ for line in files.get('package_data', []):
+ if '=' in line:
+ # package name -- file globs or specs
+ key, value = line.split('=')
+ prev = self.dist.package_data[key.strip()] = value.split()
+ elif firstline:
+ # invalid continuation on the first line
+ raise PackagingOptionError(
+ 'malformed package_data first line: %r (misses "=")' %
+ line)
+ else:
+ # continuation, add to last seen package name
+ prev.extend(line.split())
+
+ firstline = False
+
+ self.dist.data_files = []
+ for data in files.get('data_files', []):
+ data = data.split('=')
+ if len(data) != 2:
+ continue
+ key, value = data
+ values = [v.strip() for v in value.split(',')]
+ self.dist.data_files.append((key, values))
+
+ # manifest template
+ self.dist.extra_files = files.get('extra_files', [])
+
+ resources = []
+ for rule in files.get('resources', []):
+ glob, destination = rule.split('=', 1)
+ rich_glob = glob.strip().split(' ', 1)
+ if len(rich_glob) == 2:
+ prefix, suffix = rich_glob
+ else:
+ assert len(rich_glob) == 1
+ prefix = ''
+ suffix = glob
+ if destination == '<exclude>':
+ destination = None
+ resources.append(
+ (prefix.strip(), suffix.strip(), destination.strip()))
+ self.dist.data_files = get_resources_dests(
+ cfg_directory, resources)
+
+ ext_modules = self.dist.ext_modules
+ for section_key in content:
+ # no str.partition in 2.4 :(
+ labels = section_key.split(':')
+ if len(labels) == 2 and labels[0] == 'extension':
+ values_dct = content[section_key]
+ if 'name' in values_dct:
+ raise PackagingOptionError(
+ 'extension name should be given as [extension: name], '
+ 'not as key')
+ name = labels[1].strip()
+ _check_name(name, self.dist.packages)
+ ext_modules.append(Extension(
+ name,
+ _pop_values(values_dct, 'sources'),
+ _pop_values(values_dct, 'include_dirs'),
+ _pop_values(values_dct, 'define_macros'),
+ _pop_values(values_dct, 'undef_macros'),
+ _pop_values(values_dct, 'library_dirs'),
+ _pop_values(values_dct, 'libraries'),
+ _pop_values(values_dct, 'runtime_library_dirs'),
+ _pop_values(values_dct, 'extra_objects'),
+ _pop_values(values_dct, 'extra_compile_args'),
+ _pop_values(values_dct, 'extra_link_args'),
+ _pop_values(values_dct, 'export_symbols'),
+ _pop_values(values_dct, 'swig_opts'),
+ _pop_values(values_dct, 'depends'),
+ values_dct.pop('language', None),
+ values_dct.pop('optional', None),
+ **values_dct))
+
+ def parse_config_files(self, filenames=None):
+ if filenames is None:
+ filenames = self.find_config_files()
+
+ logger.debug("Distribution.parse_config_files():")
+
+ parser = RawConfigParser()
+
+ for filename in filenames:
+ logger.debug(" reading %s", filename)
+ parser.read(filename, encoding='utf-8')
+
+ if os.path.split(filename)[-1] == 'setup.cfg':
+ self._read_setup_cfg(parser, filename)
+
+ for section in parser.sections():
+ if section == 'global':
+ if parser.has_option('global', 'compilers'):
+ self._load_compilers(parser.get('global', 'compilers'))
+
+ if parser.has_option('global', 'commands'):
+ self._load_commands(parser.get('global', 'commands'))
+
+ options = parser.options(section)
+ opt_dict = self.dist.get_option_dict(section)
+
+ for opt in options:
+ if opt == '__name__':
+ continue
+ val = parser.get(section, opt)
+ opt = opt.replace('-', '_')
+
+ if opt == 'sub_commands':
+ val = split_multiline(val)
+ if isinstance(val, str):
+ val = [val]
+
+ # Hooks use a suffix system to prevent being overriden
+ # by a config file processed later (i.e. a hook set in
+ # the user config file cannot be replaced by a hook
+ # set in a project config file, unless they have the
+ # same suffix).
+ if (opt.startswith("pre_hook.") or
+ opt.startswith("post_hook.")):
+ hook_type, alias = opt.split(".")
+ hook_dict = opt_dict.setdefault(
+ hook_type, (filename, {}))[1]
+ hook_dict[alias] = val
+ else:
+ opt_dict[opt] = filename, val
+
+ # Make the RawConfigParser forget everything (so we retain
+ # the original filenames that options come from)
+ parser.__init__()
+
+ # If there was a "global" section in the config file, use it
+ # to set Distribution options.
+ if 'global' in self.dist.command_options:
+ for opt, (src, val) in self.dist.command_options['global'].items():
+ alias = self.dist.negative_opt.get(opt)
+ try:
+ if alias:
+ setattr(self.dist, alias, not strtobool(val))
+ elif opt == 'dry_run': # FIXME ugh!
+ setattr(self.dist, opt, strtobool(val))
+ else:
+ setattr(self.dist, opt, val)
+ except ValueError as msg:
+ raise PackagingOptionError(msg)
+
+ def _load_compilers(self, compilers):
+ compilers = split_multiline(compilers)
+ if isinstance(compilers, str):
+ compilers = [compilers]
+ for compiler in compilers:
+ set_compiler(compiler.strip())
+
+ def _load_commands(self, commands):
+ commands = split_multiline(commands)
+ if isinstance(commands, str):
+ commands = [commands]
+ for command in commands:
+ set_command(command.strip())
diff --git a/Lib/packaging/create.py b/Lib/packaging/create.py
new file mode 100644
index 0000000..3d45ca9
--- /dev/null
+++ b/Lib/packaging/create.py
@@ -0,0 +1,682 @@
+"""Interactive helper used to create a setup.cfg file.
+
+This script will generate a packaging configuration file by looking at
+the current directory and asking the user questions. It is intended to
+be called as *pysetup create*.
+"""
+
+# Original code by Sean Reifschneider <jafo@tummy.com>
+
+# Original TODO list:
+# Look for a license file and automatically add the category.
+# When a .c file is found during the walk, can we add it as an extension?
+# Ask if there is a maintainer different that the author
+# Ask for the platform (can we detect this via "import win32" or something?)
+# Ask for the dependencies.
+# Ask for the Requires-Dist
+# Ask for the Provides-Dist
+# Ask for a description
+# Detect scripts (not sure how. #! outside of package?)
+
+import os
+import re
+import imp
+import sys
+import glob
+import shutil
+import sysconfig
+from hashlib import md5
+from textwrap import dedent
+from tokenize import detect_encoding
+from configparser import RawConfigParser
+
+from packaging import logger
+# importing this with an underscore as it should be replaced by the
+# dict form or another structures for all purposes
+from packaging._trove import all_classifiers as _CLASSIFIERS_LIST
+from packaging.version import is_valid_version
+
+_FILENAME = 'setup.cfg'
+_DEFAULT_CFG = '.pypkgcreate' # FIXME use a section in user .pydistutils.cfg
+
+_helptext = {
+ 'name': '''
+The name of the project to be packaged, usually a single word composed
+of lower-case characters such as "zope.interface", "sqlalchemy" or
+"CherryPy".
+''',
+ 'version': '''
+Version number of the software, typically 2 or 3 numbers separated by
+dots such as "1.0", "0.6b3", or "3.2.1". "0.1.0" is recommended for
+initial development.
+''',
+ 'summary': '''
+A one-line summary of what this project is or does, typically a sentence
+80 characters or less in length.
+''',
+ 'author': '''
+The full name of the author (typically you).
+''',
+ 'author_email': '''
+Email address of the project author.
+''',
+ 'do_classifier': '''
+Trove classifiers are optional identifiers that allow you to specify the
+intended audience by saying things like "Beta software with a text UI
+for Linux under the PSF license". However, this can be a somewhat
+involved process.
+''',
+ 'packages': '''
+Python packages included in the project.
+''',
+ 'modules': '''
+Pure Python modules included in the project.
+''',
+ 'extra_files': '''
+You can provide extra files/dirs contained in your project.
+It has to follow the template syntax. XXX add help here.
+''',
+
+ 'home_page': '''
+The home page for the project, typically a public Web page.
+''',
+ 'trove_license': '''
+Optionally you can specify a license. Type a string that identifies a
+common license, and then you can select a list of license specifiers.
+''',
+ 'trove_generic': '''
+Optionally, you can set other trove identifiers for things such as the
+human language, programming language, user interface, etc.
+''',
+ 'setup.py found': '''
+The setup.py script will be executed to retrieve the metadata.
+An interactive helper will be run if you answer "n",
+''',
+}
+
+PROJECT_MATURITY = ['Development Status :: 1 - Planning',
+ 'Development Status :: 2 - Pre-Alpha',
+ 'Development Status :: 3 - Alpha',
+ 'Development Status :: 4 - Beta',
+ 'Development Status :: 5 - Production/Stable',
+ 'Development Status :: 6 - Mature',
+ 'Development Status :: 7 - Inactive']
+
+# XXX everything needs docstrings and tests (both low-level tests of various
+# methods and functional tests of running the script)
+
+
+def load_setup():
+ """run the setup script (i.e the setup.py file)
+
+ This function load the setup file in all cases (even if it have already
+ been loaded before, because we are monkey patching its setup function with
+ a particular one"""
+ with open("setup.py", "rb") as f:
+ encoding, lines = detect_encoding(f.readline)
+ with open("setup.py", encoding=encoding) as f:
+ imp.load_module("setup", f, "setup.py", (".py", "r", imp.PY_SOURCE))
+
+
+def ask_yn(question, default=None, helptext=None):
+ question += ' (y/n)'
+ while True:
+ answer = ask(question, default, helptext, required=True)
+ if answer and answer[0].lower() in ('y', 'n'):
+ return answer[0].lower()
+
+ logger.error('You must select "Y" or "N".')
+
+
+# XXX use util.ask
+# FIXME: if prompt ends with '?', don't add ':'
+
+
+def ask(question, default=None, helptext=None, required=True,
+ lengthy=False, multiline=False):
+ prompt = '%s: ' % (question,)
+ if default:
+ prompt = '%s [%s]: ' % (question, default)
+ if default and len(question) + len(default) > 70:
+ prompt = '%s\n [%s]: ' % (question, default)
+ if lengthy or multiline:
+ prompt += '\n > '
+
+ if not helptext:
+ helptext = 'No additional help available.'
+
+ helptext = helptext.strip("\n")
+
+ while True:
+ line = input(prompt).strip()
+ if line == '?':
+ print('=' * 70)
+ print(helptext)
+ print('=' * 70)
+ continue
+ if default and not line:
+ return default
+ if not line and required:
+ print('*' * 70)
+ print('This value cannot be empty.')
+ print('===========================')
+ if helptext:
+ print(helptext)
+ print('*' * 70)
+ continue
+ return line
+
+
+def convert_yn_to_bool(yn, yes=True, no=False):
+ """Convert a y/yes or n/no to a boolean value."""
+ if yn.lower().startswith('y'):
+ return yes
+ else:
+ return no
+
+
+def _build_classifiers_dict(classifiers):
+ d = {}
+ for key in classifiers:
+ subdict = d
+ for subkey in key.split(' :: '):
+ if subkey not in subdict:
+ subdict[subkey] = {}
+ subdict = subdict[subkey]
+ return d
+
+CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST)
+
+
+def _build_licences(classifiers):
+ res = []
+ for index, item in enumerate(classifiers):
+ if not item.startswith('License :: '):
+ continue
+ res.append((index, item.split(' :: ')[-1].lower()))
+ return res
+
+LICENCES = _build_licences(_CLASSIFIERS_LIST)
+
+
+class MainProgram:
+ """Make a project setup configuration file (setup.cfg)."""
+
+ def __init__(self):
+ self.configparser = None
+ self.classifiers = set()
+ self.data = {'name': '',
+ 'version': '1.0.0',
+ 'classifier': self.classifiers,
+ 'packages': [],
+ 'modules': [],
+ 'platform': [],
+ 'resources': [],
+ 'extra_files': [],
+ 'scripts': [],
+ }
+ self._load_defaults()
+
+ def __call__(self):
+ setupcfg_defined = False
+ if self.has_setup_py() and self._prompt_user_for_conversion():
+ setupcfg_defined = self.convert_py_to_cfg()
+ if not setupcfg_defined:
+ self.define_cfg_values()
+ self._write_cfg()
+
+ def has_setup_py(self):
+ """Test for the existence of a setup.py file."""
+ return os.path.exists('setup.py')
+
+ def define_cfg_values(self):
+ self.inspect()
+ self.query_user()
+
+ def _lookup_option(self, key):
+ if not self.configparser.has_option('DEFAULT', key):
+ return None
+ return self.configparser.get('DEFAULT', key)
+
+ def _load_defaults(self):
+ # Load default values from a user configuration file
+ self.configparser = RawConfigParser()
+ # TODO replace with section in distutils config file
+ default_cfg = os.path.expanduser(os.path.join('~', _DEFAULT_CFG))
+ self.configparser.read(default_cfg)
+ self.data['author'] = self._lookup_option('author')
+ self.data['author_email'] = self._lookup_option('author_email')
+
+ def _prompt_user_for_conversion(self):
+ # Prompt the user about whether they would like to use the setup.py
+ # conversion utility to generate a setup.cfg or generate the setup.cfg
+ # from scratch
+ answer = ask_yn(('A legacy setup.py has been found.\n'
+ 'Would you like to convert it to a setup.cfg?'),
+ default="y",
+ helptext=_helptext['setup.py found'])
+ return convert_yn_to_bool(answer)
+
+ def _dotted_packages(self, data):
+ packages = sorted(data)
+ modified_pkgs = []
+ for pkg in packages:
+ pkg = pkg.lstrip('./')
+ pkg = pkg.replace('/', '.')
+ modified_pkgs.append(pkg)
+ return modified_pkgs
+
+ def _write_cfg(self):
+ if os.path.exists(_FILENAME):
+ if os.path.exists('%s.old' % _FILENAME):
+ message = ("ERROR: %(name)s.old backup exists, please check "
+ "that current %(name)s is correct and remove "
+ "%(name)s.old" % {'name': _FILENAME})
+ logger.error(message)
+ return
+ shutil.move(_FILENAME, '%s.old' % _FILENAME)
+
+ with open(_FILENAME, 'w', encoding='utf-8') as fp:
+ fp.write('[metadata]\n')
+ # TODO use metadata module instead of hard-coding field-specific
+ # behavior here
+
+ # simple string entries
+ for name in ('name', 'version', 'summary', 'download_url'):
+ fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN')))
+
+ # optional string entries
+ if 'keywords' in self.data and self.data['keywords']:
+ # XXX shoud use comma to separate, not space
+ fp.write('keywords = %s\n' % ' '.join(self.data['keywords']))
+ for name in ('home_page', 'author', 'author_email',
+ 'maintainer', 'maintainer_email', 'description-file'):
+ if name in self.data and self.data[name]:
+ fp.write('%s = %s\n' % (name, self.data[name]))
+ if 'description' in self.data:
+ fp.write(
+ 'description = %s\n'
+ % '\n |'.join(self.data['description'].split('\n')))
+
+ # multiple use string entries
+ for name in ('platform', 'supported-platform', 'classifier',
+ 'requires-dist', 'provides-dist', 'obsoletes-dist',
+ 'requires-external'):
+ if not(name in self.data and self.data[name]):
+ continue
+ fp.write('%s = ' % name)
+ fp.write(''.join(' %s\n' % val
+ for val in self.data[name]).lstrip())
+
+ fp.write('\n[files]\n')
+
+ for name in ('packages', 'modules', 'scripts', 'extra_files'):
+ if not(name in self.data and self.data[name]):
+ continue
+ fp.write('%s = %s\n'
+ % (name, '\n '.join(self.data[name]).strip()))
+
+ if self.data.get('package_data'):
+ fp.write('package_data =\n')
+ for pkg, spec in sorted(self.data['package_data'].items()):
+ # put one spec per line, indented under the package name
+ indent = ' ' * (len(pkg) + 7)
+ spec = ('\n' + indent).join(spec)
+ fp.write(' %s = %s\n' % (pkg, spec))
+ fp.write('\n')
+
+ if self.data.get('resources'):
+ fp.write('resources =\n')
+ for src, dest in self.data['resources']:
+ fp.write(' %s = %s\n' % (src, dest))
+ fp.write('\n')
+
+ os.chmod(_FILENAME, 0o644)
+ logger.info('Wrote "%s".' % _FILENAME)
+
+ def convert_py_to_cfg(self):
+ """Generate a setup.cfg from an existing setup.py.
+
+ It only exports the distutils metadata (setuptools specific metadata
+ is not currently supported).
+ """
+ data = self.data
+
+ def setup_mock(**attrs):
+ """Mock the setup(**attrs) in order to retrieve metadata."""
+
+ # TODO use config and metadata instead of Distribution
+ from distutils.dist import Distribution
+ dist = Distribution(attrs)
+ dist.parse_config_files()
+
+ # 1. retrieve metadata fields that are quite similar in
+ # PEP 314 and PEP 345
+ labels = (('name',) * 2,
+ ('version',) * 2,
+ ('author',) * 2,
+ ('author_email',) * 2,
+ ('maintainer',) * 2,
+ ('maintainer_email',) * 2,
+ ('description', 'summary'),
+ ('long_description', 'description'),
+ ('url', 'home_page'),
+ ('platforms', 'platform'),
+ ('provides', 'provides-dist'),
+ ('obsoletes', 'obsoletes-dist'),
+ ('requires', 'requires-dist'))
+
+ get = lambda lab: getattr(dist.metadata, lab.replace('-', '_'))
+ data.update((new, get(old)) for old, new in labels if get(old))
+
+ # 2. retrieve data that requires special processing
+ data['classifier'].update(dist.get_classifiers() or [])
+ data['scripts'].extend(dist.scripts or [])
+ data['packages'].extend(dist.packages or [])
+ data['modules'].extend(dist.py_modules or [])
+ # 2.1 data_files -> resources
+ if dist.data_files:
+ if (len(dist.data_files) < 2 or
+ isinstance(dist.data_files[1], str)):
+ dist.data_files = [('', dist.data_files)]
+ # add tokens in the destination paths
+ vars = {'distribution.name': data['name']}
+ path_tokens = sysconfig.get_paths(vars=vars).items()
+ # sort tokens to use the longest one first
+ path_tokens = sorted(path_tokens, key=lambda x: len(x[1]))
+ for dest, srcs in (dist.data_files or []):
+ dest = os.path.join(sys.prefix, dest)
+ dest = dest.replace(os.path.sep, '/')
+ for tok, path in path_tokens:
+ path = path.replace(os.path.sep, '/')
+ if not dest.startswith(path):
+ continue
+
+ dest = ('{%s}' % tok) + dest[len(path):]
+ files = [('/ '.join(src.rsplit('/', 1)), dest)
+ for src in srcs]
+ data['resources'].extend(files)
+
+ # 2.2 package_data
+ data['package_data'] = dist.package_data.copy()
+
+ # Use README file if its content is the desciption
+ if "description" in data:
+ ref = md5(re.sub('\s', '',
+ self.data['description']).lower().encode())
+ ref = ref.digest()
+ for readme in glob.glob('README*'):
+ with open(readme, encoding='utf-8') as fp:
+ contents = fp.read()
+ contents = re.sub('\s', '', contents.lower()).encode()
+ val = md5(contents).digest()
+ if val == ref:
+ del data['description']
+ data['description-file'] = readme
+ break
+
+ # apply monkey patch to distutils (v1) and setuptools (if needed)
+ # (abort the feature if distutils v1 has been killed)
+ try:
+ from distutils import core
+ core.setup # make sure it's not d2 maskerading as d1
+ except (ImportError, AttributeError):
+ return
+ saved_setups = [(core, core.setup)]
+ core.setup = setup_mock
+ try:
+ import setuptools
+ except ImportError:
+ pass
+ else:
+ saved_setups.append((setuptools, setuptools.setup))
+ setuptools.setup = setup_mock
+ # get metadata by executing the setup.py with the patched setup(...)
+ success = False # for python < 2.4
+ try:
+ load_setup()
+ success = True
+ finally: # revert monkey patches
+ for patched_module, original_setup in saved_setups:
+ patched_module.setup = original_setup
+ if not self.data:
+ raise ValueError('Unable to load metadata from setup.py')
+ return success
+
+ def inspect(self):
+ """Inspect the current working diretory for a name and version.
+
+ This information is harvested in where the directory is named
+ like [name]-[version].
+ """
+ dir_name = os.path.basename(os.getcwd())
+ self.data['name'] = dir_name
+ match = re.match(r'(.*)-(\d.+)', dir_name)
+ if match:
+ self.data['name'] = match.group(1)
+ self.data['version'] = match.group(2)
+ # TODO needs testing!
+ if not is_valid_version(self.data['version']):
+ msg = "Invalid version discovered: %s" % self.data['version']
+ raise ValueError(msg)
+
+ def query_user(self):
+ self.data['name'] = ask('Project name', self.data['name'],
+ _helptext['name'])
+
+ self.data['version'] = ask('Current version number',
+ self.data.get('version'), _helptext['version'])
+ self.data['summary'] = ask('Project description summary',
+ self.data.get('summary'), _helptext['summary'],
+ lengthy=True)
+ self.data['author'] = ask('Author name',
+ self.data.get('author'), _helptext['author'])
+ self.data['author_email'] = ask('Author email address',
+ self.data.get('author_email'), _helptext['author_email'])
+ self.data['home_page'] = ask('Project home page',
+ self.data.get('home_page'), _helptext['home_page'],
+ required=False)
+
+ if ask_yn('Do you want me to automatically build the file list '
+ 'with everything I can find in the current directory? '
+ 'If you say no, you will have to define them manually.') == 'y':
+ self._find_files()
+ else:
+ while ask_yn('Do you want to add a single module?'
+ ' (you will be able to add full packages next)',
+ helptext=_helptext['modules']) == 'y':
+ self._set_multi('Module name', 'modules')
+
+ while ask_yn('Do you want to add a package?',
+ helptext=_helptext['packages']) == 'y':
+ self._set_multi('Package name', 'packages')
+
+ while ask_yn('Do you want to add an extra file?',
+ helptext=_helptext['extra_files']) == 'y':
+ self._set_multi('Extra file/dir name', 'extra_files')
+
+ if ask_yn('Do you want to set Trove classifiers?',
+ helptext=_helptext['do_classifier']) == 'y':
+ self.set_classifier()
+
+ def _find_files(self):
+ # we are looking for python modules and packages,
+ # other stuff are added as regular files
+ pkgs = self.data['packages']
+ modules = self.data['modules']
+ extra_files = self.data['extra_files']
+
+ def is_package(path):
+ return os.path.exists(os.path.join(path, '__init__.py'))
+
+ curdir = os.getcwd()
+ scanned = []
+ _pref = ['lib', 'include', 'dist', 'build', '.', '~']
+ _suf = ['.pyc']
+
+ def to_skip(path):
+ path = relative(path)
+
+ for pref in _pref:
+ if path.startswith(pref):
+ return True
+
+ for suf in _suf:
+ if path.endswith(suf):
+ return True
+
+ return False
+
+ def relative(path):
+ return path[len(curdir) + 1:]
+
+ def dotted(path):
+ res = relative(path).replace(os.path.sep, '.')
+ if res.endswith('.py'):
+ res = res[:-len('.py')]
+ return res
+
+ # first pass: packages
+ for root, dirs, files in os.walk(curdir):
+ if to_skip(root):
+ continue
+ for dir_ in sorted(dirs):
+ if to_skip(dir_):
+ continue
+ fullpath = os.path.join(root, dir_)
+ dotted_name = dotted(fullpath)
+ if is_package(fullpath) and dotted_name not in pkgs:
+ pkgs.append(dotted_name)
+ scanned.append(fullpath)
+
+ # modules and extra files
+ for root, dirs, files in os.walk(curdir):
+ if to_skip(root):
+ continue
+
+ if any(root.startswith(path) for path in scanned):
+ continue
+
+ for file in sorted(files):
+ fullpath = os.path.join(root, file)
+ if to_skip(fullpath):
+ continue
+ # single module?
+ if os.path.splitext(file)[-1] == '.py':
+ modules.append(dotted(fullpath))
+ else:
+ extra_files.append(relative(fullpath))
+
+ def _set_multi(self, question, name):
+ existing_values = self.data[name]
+ value = ask(question, helptext=_helptext[name]).strip()
+ if value not in existing_values:
+ existing_values.append(value)
+
+ def set_classifier(self):
+ self.set_maturity_status(self.classifiers)
+ self.set_license(self.classifiers)
+ self.set_other_classifier(self.classifiers)
+
+ def set_other_classifier(self, classifiers):
+ if ask_yn('Do you want to set other trove identifiers?', 'n',
+ _helptext['trove_generic']) != 'y':
+ return
+ self.walk_classifiers(classifiers, [CLASSIFIERS], '')
+
+ def walk_classifiers(self, classifiers, trovepath, desc):
+ trove = trovepath[-1]
+
+ if not trove:
+ return
+
+ for key in sorted(trove):
+ if len(trove[key]) == 0:
+ if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y':
+ classifiers.add(desc[4:] + ' :: ' + key)
+ continue
+
+ if ask_yn('Do you want to set items under\n "%s" (%d sub-items)?'
+ % (key, len(trove[key])), 'n',
+ _helptext['trove_generic']) == 'y':
+ self.walk_classifiers(classifiers, trovepath + [trove[key]],
+ desc + ' :: ' + key)
+
+ def set_license(self, classifiers):
+ while True:
+ license = ask('What license do you use?',
+ helptext=_helptext['trove_license'], required=False)
+ if not license:
+ return
+
+ license_words = license.lower().split(' ')
+ found_list = []
+
+ for index, licence in LICENCES:
+ for word in license_words:
+ if word in licence:
+ found_list.append(index)
+ break
+
+ if len(found_list) == 0:
+ logger.error('Could not find a matching license for "%s"' %
+ license)
+ continue
+
+ question = 'Matching licenses:\n\n'
+
+ for index, list_index in enumerate(found_list):
+ question += ' %s) %s\n' % (index + 1,
+ _CLASSIFIERS_LIST[list_index])
+
+ question += ('\nType the number of the license you wish to use or '
+ '? to try again:')
+ choice = ask(question, required=False)
+
+ if choice == '?':
+ continue
+ if choice == '':
+ return
+
+ try:
+ index = found_list[int(choice) - 1]
+ except ValueError:
+ logger.error(
+ "Invalid selection, type a number from the list above.")
+
+ classifiers.add(_CLASSIFIERS_LIST[index])
+
+ def set_maturity_status(self, classifiers):
+ maturity_name = lambda mat: mat.split('- ')[-1]
+ maturity_question = '''\
+ Please select the project status:
+
+ %s
+
+ Status''' % '\n'.join('%s - %s' % (i, maturity_name(n))
+ for i, n in enumerate(PROJECT_MATURITY))
+ while True:
+ choice = ask(dedent(maturity_question), required=False)
+
+ if choice:
+ try:
+ choice = int(choice) - 1
+ key = PROJECT_MATURITY[choice]
+ classifiers.add(key)
+ return
+ except (IndexError, ValueError):
+ logger.error(
+ "Invalid selection, type a single digit number.")
+
+
+def main():
+ """Main entry point."""
+ program = MainProgram()
+ # # uncomment when implemented
+ # if not program.load_existing_setup_script():
+ # program.inspect_directory()
+ # program.query_user()
+ # program.update_config_file()
+ # program.write_setup_script()
+ # packaging.util.cfg_to_args()
+ program()
diff --git a/Lib/packaging/database.py b/Lib/packaging/database.py
new file mode 100644
index 0000000..e028dc5
--- /dev/null
+++ b/Lib/packaging/database.py
@@ -0,0 +1,651 @@
+"""PEP 376 implementation."""
+
+import os
+import re
+import csv
+import sys
+import zipimport
+from io import StringIO
+from hashlib import md5
+
+from packaging import logger
+from packaging.errors import PackagingError
+from packaging.version import suggest_normalized_version, VersionPredicate
+from packaging.metadata import Metadata
+
+
+__all__ = [
+ 'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
+ 'get_distributions', 'get_distribution', 'get_file_users',
+ 'provides_distribution', 'obsoletes_distribution',
+ 'enable_cache', 'disable_cache', 'clear_cache',
+ # XXX these functions' names look like get_file_users but are not related
+ 'get_file_path', 'get_file']
+
+
+# TODO update docs
+
+DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES')
+
+# Cache
+_cache_name = {} # maps names to Distribution instances
+_cache_name_egg = {} # maps names to EggInfoDistribution instances
+_cache_path = {} # maps paths to Distribution instances
+_cache_path_egg = {} # maps paths to EggInfoDistribution instances
+_cache_generated = False # indicates if .dist-info distributions are cached
+_cache_generated_egg = False # indicates if .dist-info and .egg are cached
+_cache_enabled = True
+
+
+def enable_cache():
+ """
+ Enables the internal cache.
+
+ Note that this function will not clear the cache in any case, for that
+ functionality see :func:`clear_cache`.
+ """
+ global _cache_enabled
+
+ _cache_enabled = True
+
+
+def disable_cache():
+ """
+ Disables the internal cache.
+
+ Note that this function will not clear the cache in any case, for that
+ functionality see :func:`clear_cache`.
+ """
+ global _cache_enabled
+
+ _cache_enabled = False
+
+
+def clear_cache():
+ """ Clears the internal cache. """
+ global _cache_generated, _cache_generated_egg
+
+ _cache_name.clear()
+ _cache_name_egg.clear()
+ _cache_path.clear()
+ _cache_path_egg.clear()
+ _cache_generated = False
+ _cache_generated_egg = False
+
+
+def _yield_distributions(include_dist, include_egg, paths):
+ """
+ Yield .dist-info and .egg(-info) distributions, based on the arguments
+
+ :parameter include_dist: yield .dist-info distributions
+ :parameter include_egg: yield .egg(-info) distributions
+ """
+ for path in paths:
+ realpath = os.path.realpath(path)
+ if not os.path.isdir(realpath):
+ continue
+ for dir in os.listdir(realpath):
+ dist_path = os.path.join(realpath, dir)
+ if include_dist and dir.endswith('.dist-info'):
+ yield Distribution(dist_path)
+ elif include_egg and (dir.endswith('.egg-info') or
+ dir.endswith('.egg')):
+ yield EggInfoDistribution(dist_path)
+
+
+def _generate_cache(use_egg_info, paths):
+ global _cache_generated, _cache_generated_egg
+
+ if _cache_generated_egg or (_cache_generated and not use_egg_info):
+ return
+ else:
+ gen_dist = not _cache_generated
+ gen_egg = use_egg_info
+
+ for dist in _yield_distributions(gen_dist, gen_egg, paths):
+ if isinstance(dist, Distribution):
+ _cache_path[dist.path] = dist
+ if dist.name not in _cache_name:
+ _cache_name[dist.name] = []
+ _cache_name[dist.name].append(dist)
+ else:
+ _cache_path_egg[dist.path] = dist
+ if dist.name not in _cache_name_egg:
+ _cache_name_egg[dist.name] = []
+ _cache_name_egg[dist.name].append(dist)
+
+ if gen_dist:
+ _cache_generated = True
+ if gen_egg:
+ _cache_generated_egg = True
+
+
+class Distribution:
+ """Created with the *path* of the ``.dist-info`` directory provided to the
+ constructor. It reads the metadata contained in ``METADATA`` when it is
+ instantiated."""
+
+ name = ''
+ """The name of the distribution."""
+
+ version = ''
+ """The version of the distribution."""
+
+ metadata = None
+ """A :class:`packaging.metadata.Metadata` instance loaded with
+ the distribution's ``METADATA`` file."""
+
+ requested = False
+ """A boolean that indicates whether the ``REQUESTED`` metadata file is
+ present (in other words, whether the package was installed by user
+ request or it was installed as a dependency)."""
+
+ def __init__(self, path):
+ if _cache_enabled and path in _cache_path:
+ self.metadata = _cache_path[path].metadata
+ else:
+ metadata_path = os.path.join(path, 'METADATA')
+ self.metadata = Metadata(path=metadata_path)
+
+ self.name = self.metadata['Name']
+ self.version = self.metadata['Version']
+ self.path = path
+
+ if _cache_enabled and path not in _cache_path:
+ _cache_path[path] = self
+
+ def __repr__(self):
+ return '<Distribution %r %s at %r>' % (
+ self.name, self.version, self.path)
+
+ def _get_records(self, local=False):
+ results = []
+ with self.get_distinfo_file('RECORD') as record:
+ record_reader = csv.reader(record, delimiter=',',
+ lineterminator='\n')
+ for row in record_reader:
+ missing = [None for i in range(len(row), 3)]
+ path, checksum, size = row + missing
+ if local:
+ path = path.replace('/', os.sep)
+ path = os.path.join(sys.prefix, path)
+ results.append((path, checksum, size))
+ return results
+
+ def get_resource_path(self, relative_path):
+ with self.get_distinfo_file('RESOURCES') as resources_file:
+ resources_reader = csv.reader(resources_file, delimiter=',',
+ lineterminator='\n')
+ for relative, destination in resources_reader:
+ if relative == relative_path:
+ return destination
+ raise KeyError(
+ 'no resource file with relative path %r is installed' %
+ relative_path)
+
+ def list_installed_files(self, local=False):
+ """
+ Iterates over the ``RECORD`` entries and returns a tuple
+ ``(path, md5, size)`` for each line. If *local* is ``True``,
+ the returned path is transformed into a local absolute path.
+ Otherwise the raw value from RECORD is returned.
+
+ A local absolute path is an absolute path in which occurrences of
+ ``'/'`` have been replaced by the system separator given by ``os.sep``.
+
+ :parameter local: flag to say if the path should be returned as a local
+ absolute path
+
+ :type local: boolean
+ :returns: iterator of (path, md5, size)
+ """
+ for result in self._get_records(local):
+ yield result
+
+ def uses(self, path):
+ """
+ Returns ``True`` if path is listed in ``RECORD``. *path* can be a local
+ absolute path or a relative ``'/'``-separated path.
+
+ :rtype: boolean
+ """
+ for p, checksum, size in self._get_records():
+ local_absolute = os.path.join(sys.prefix, p)
+ if path == p or path == local_absolute:
+ return True
+ return False
+
+ def get_distinfo_file(self, path, binary=False):
+ """
+ Returns a file located under the ``.dist-info`` directory. Returns a
+ ``file`` instance for the file pointed by *path*.
+
+ :parameter path: a ``'/'``-separated path relative to the
+ ``.dist-info`` directory or an absolute path;
+ If *path* is an absolute path and doesn't start
+ with the ``.dist-info`` directory path,
+ a :class:`PackagingError` is raised
+ :type path: string
+ :parameter binary: If *binary* is ``True``, opens the file in read-only
+ binary mode (``rb``), otherwise opens it in
+ read-only mode (``r``).
+ :rtype: file object
+ """
+ open_flags = 'r'
+ if binary:
+ open_flags += 'b'
+
+ # Check if it is an absolute path # XXX use relpath, add tests
+ if path.find(os.sep) >= 0:
+ # it's an absolute path?
+ distinfo_dirname, path = path.split(os.sep)[-2:]
+ if distinfo_dirname != self.path.split(os.sep)[-1]:
+ raise PackagingError(
+ 'dist-info file %r does not belong to the %r %s '
+ 'distribution' % (path, self.name, self.version))
+
+ # The file must be relative
+ if path not in DIST_FILES:
+ raise PackagingError('invalid path for a dist-info file: %r' %
+ path)
+
+ path = os.path.join(self.path, path)
+ return open(path, open_flags)
+
+ def list_distinfo_files(self, local=False):
+ """
+ Iterates over the ``RECORD`` entries and returns paths for each line if
+ the path is pointing to a file located in the ``.dist-info`` directory
+ or one of its subdirectories.
+
+ :parameter local: If *local* is ``True``, each returned path is
+ transformed into a local absolute path. Otherwise the
+ raw value from ``RECORD`` is returned.
+ :type local: boolean
+ :returns: iterator of paths
+ """
+ for path, checksum, size in self._get_records(local):
+ # XXX add separator or use real relpath algo
+ if path.startswith(self.path):
+ yield path
+
+ def __eq__(self, other):
+ return isinstance(other, Distribution) and self.path == other.path
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+
+class EggInfoDistribution:
+ """Created with the *path* of the ``.egg-info`` directory or file provided
+ to the constructor. It reads the metadata contained in the file itself, or
+ if the given path happens to be a directory, the metadata is read from the
+ file ``PKG-INFO`` under that directory."""
+
+ name = ''
+ """The name of the distribution."""
+
+ version = ''
+ """The version of the distribution."""
+
+ metadata = None
+ """A :class:`packaging.metadata.Metadata` instance loaded with
+ the distribution's ``METADATA`` file."""
+
+ _REQUIREMENT = re.compile(
+ r'(?P<name>[-A-Za-z0-9_.]+)\s*'
+ r'(?P<first>(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*'
+ r'(?P<rest>(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*'
+ r'(?P<extras>\[.*\])?')
+
+ def __init__(self, path):
+ self.path = path
+ if _cache_enabled and path in _cache_path_egg:
+ self.metadata = _cache_path_egg[path].metadata
+ self.name = self.metadata['Name']
+ self.version = self.metadata['Version']
+ return
+
+ # reused from Distribute's pkg_resources
+ def yield_lines(strs):
+ """Yield non-empty/non-comment lines of a ``basestring``
+ or sequence"""
+ if isinstance(strs, str):
+ for s in strs.splitlines():
+ s = s.strip()
+ # skip blank lines/comments
+ if s and not s.startswith('#'):
+ yield s
+ else:
+ for ss in strs:
+ for s in yield_lines(ss):
+ yield s
+
+ requires = None
+
+ if path.endswith('.egg'):
+ if os.path.isdir(path):
+ meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ self.metadata = Metadata(path=meta_path)
+ try:
+ req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
+ with open(req_path, 'r') as fp:
+ requires = fp.read()
+ except IOError:
+ requires = None
+ else:
+ # FIXME handle the case where zipfile is not available
+ zipf = zipimport.zipimporter(path)
+ fileobj = StringIO(
+ zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
+ self.metadata = Metadata(fileobj=fileobj)
+ try:
+ requires = zipf.get_data('EGG-INFO/requires.txt')
+ except IOError:
+ requires = None
+ self.name = self.metadata['Name']
+ self.version = self.metadata['Version']
+
+ elif path.endswith('.egg-info'):
+ if os.path.isdir(path):
+ path = os.path.join(path, 'PKG-INFO')
+ try:
+ with open(os.path.join(path, 'requires.txt'), 'r') as fp:
+ requires = fp.read()
+ except IOError:
+ requires = None
+ self.metadata = Metadata(path=path)
+ self.name = self.metadata['Name']
+ self.version = self.metadata['Version']
+
+ else:
+ raise ValueError('path must end with .egg-info or .egg, got %r' %
+ path)
+
+ if requires is not None:
+ if self.metadata['Metadata-Version'] == '1.1':
+ # we can't have 1.1 metadata *and* Setuptools requires
+ for field in ('Obsoletes', 'Requires', 'Provides'):
+ del self.metadata[field]
+
+ reqs = []
+
+ if requires is not None:
+ for line in yield_lines(requires):
+ if line.startswith('['):
+ logger.warning(
+ 'extensions in requires.txt are not supported '
+ '(used by %r %s)', self.name, self.version)
+ break
+ else:
+ match = self._REQUIREMENT.match(line.strip())
+ if not match:
+ # this happens when we encounter extras; since they
+ # are written at the end of the file we just exit
+ break
+ else:
+ if match.group('extras'):
+ msg = ('extra requirements are not supported '
+ '(used by %r %s)', self.name, self.version)
+ logger.warning(msg, self.name)
+ name = match.group('name')
+ version = None
+ if match.group('first'):
+ version = match.group('first')
+ if match.group('rest'):
+ version += match.group('rest')
+ version = version.replace(' ', '') # trim spaces
+ if version is None:
+ reqs.append(name)
+ else:
+ reqs.append('%s (%s)' % (name, version))
+
+ if len(reqs) > 0:
+ self.metadata['Requires-Dist'] += reqs
+
+ if _cache_enabled:
+ _cache_path_egg[self.path] = self
+
+ def __repr__(self):
+ return '<EggInfoDistribution %r %s at %r>' % (
+ self.name, self.version, self.path)
+
+ def list_installed_files(self, local=False):
+
+ def _md5(path):
+ with open(path, 'rb') as f:
+ content = f.read()
+ return md5(content).hexdigest()
+
+ def _size(path):
+ return os.stat(path).st_size
+
+ path = self.path
+ if local:
+ path = path.replace('/', os.sep)
+
+ # XXX What about scripts and data files ?
+ if os.path.isfile(path):
+ return [(path, _md5(path), _size(path))]
+ else:
+ files = []
+ for root, dir, files_ in os.walk(path):
+ for item in files_:
+ item = os.path.join(root, item)
+ files.append((item, _md5(item), _size(item)))
+ return files
+
+ return []
+
+ def uses(self, path):
+ return False
+
+ def __eq__(self, other):
+ return (isinstance(other, EggInfoDistribution) and
+ self.path == other.path)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+
+def distinfo_dirname(name, version):
+ """
+ The *name* and *version* parameters are converted into their
+ filename-escaped form, i.e. any ``'-'`` characters are replaced
+ with ``'_'`` other than the one in ``'dist-info'`` and the one
+ separating the name from the version number.
+
+ :parameter name: is converted to a standard distribution name by replacing
+ any runs of non- alphanumeric characters with a single
+ ``'-'``.
+ :type name: string
+ :parameter version: is converted to a standard version string. Spaces
+ become dots, and all other non-alphanumeric characters
+ (except dots) become dashes, with runs of multiple
+ dashes condensed to a single dash.
+ :type version: string
+ :returns: directory name
+ :rtype: string"""
+ file_extension = '.dist-info'
+ name = name.replace('-', '_')
+ normalized_version = suggest_normalized_version(version)
+ # Because this is a lookup procedure, something will be returned even if
+ # it is a version that cannot be normalized
+ if normalized_version is None:
+ # Unable to achieve normality?
+ normalized_version = version
+ return '-'.join([name, normalized_version]) + file_extension
+
+
+def get_distributions(use_egg_info=False, paths=None):
+ """
+ Provides an iterator that looks for ``.dist-info`` directories in
+ ``sys.path`` and returns :class:`Distribution` instances for each one of
+ them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info``
+ files and directores are iterated as well.
+
+ :rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution`
+ instances
+ """
+ if paths is None:
+ paths = sys.path
+
+ if not _cache_enabled:
+ for dist in _yield_distributions(True, use_egg_info, paths):
+ yield dist
+ else:
+ _generate_cache(use_egg_info, paths)
+
+ for dist in _cache_path.values():
+ yield dist
+
+ if use_egg_info:
+ for dist in _cache_path_egg.values():
+ yield dist
+
+
+def get_distribution(name, use_egg_info=False, paths=None):
+ """
+ Scans all elements in ``sys.path`` and looks for all directories
+ ending with ``.dist-info``. Returns a :class:`Distribution`
+ corresponding to the ``.dist-info`` directory that contains the
+ ``METADATA`` that matches *name* for the *name* metadata field.
+ If no distribution exists with the given *name* and the parameter
+ *use_egg_info* is set to ``True``, then all files and directories ending
+ with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is
+ returned if one is found that has metadata that matches *name* for the
+ *name* metadata field.
+
+ This function only returns the first result found, as no more than one
+ value is expected. If the directory is not found, ``None`` is returned.
+
+ :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None
+ """
+ if paths is None:
+ paths = sys.path
+
+ if not _cache_enabled:
+ for dist in _yield_distributions(True, use_egg_info, paths):
+ if dist.name == name:
+ return dist
+ else:
+ _generate_cache(use_egg_info, paths)
+
+ if name in _cache_name:
+ return _cache_name[name][0]
+ elif use_egg_info and name in _cache_name_egg:
+ return _cache_name_egg[name][0]
+ else:
+ return None
+
+
+def obsoletes_distribution(name, version=None, use_egg_info=False):
+ """
+ Iterates over all distributions to find which distributions obsolete
+ *name*.
+
+ If a *version* is provided, it will be used to filter the results.
+ If the argument *use_egg_info* is set to ``True``, then ``.egg-info``
+ distributions will be considered as well.
+
+ :type name: string
+ :type version: string
+ :parameter name:
+ """
+ for dist in get_distributions(use_egg_info):
+ obsoleted = (dist.metadata['Obsoletes-Dist'] +
+ dist.metadata['Obsoletes'])
+ for obs in obsoleted:
+ o_components = obs.split(' ', 1)
+ if len(o_components) == 1 or version is None:
+ if name == o_components[0]:
+ yield dist
+ break
+ else:
+ try:
+ predicate = VersionPredicate(obs)
+ except ValueError:
+ raise PackagingError(
+ 'distribution %r has ill-formed obsoletes field: '
+ '%r' % (dist.name, obs))
+ if name == o_components[0] and predicate.match(version):
+ yield dist
+ break
+
+
+def provides_distribution(name, version=None, use_egg_info=False):
+ """
+ Iterates over all distributions to find which distributions provide *name*.
+ If a *version* is provided, it will be used to filter the results. Scans
+ all elements in ``sys.path`` and looks for all directories ending with
+ ``.dist-info``. Returns a :class:`Distribution` corresponding to the
+ ``.dist-info`` directory that contains a ``METADATA`` that matches *name*
+ for the name metadata. If the argument *use_egg_info* is set to ``True``,
+ then all files and directories ending with ``.egg-info`` are considered
+ as well and returns an :class:`EggInfoDistribution` instance.
+
+ This function only returns the first result found, since no more than
+ one values are expected. If the directory is not found, returns ``None``.
+
+ :parameter version: a version specifier that indicates the version
+ required, conforming to the format in ``PEP-345``
+
+ :type name: string
+ :type version: string
+ """
+ predicate = None
+ if not version is None:
+ try:
+ predicate = VersionPredicate(name + ' (' + version + ')')
+ except ValueError:
+ raise PackagingError('invalid name or version: %r, %r' %
+ (name, version))
+
+ for dist in get_distributions(use_egg_info):
+ provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
+
+ for p in provided:
+ p_components = p.rsplit(' ', 1)
+ if len(p_components) == 1 or predicate is None:
+ if name == p_components[0]:
+ yield dist
+ break
+ else:
+ p_name, p_ver = p_components
+ if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
+ raise PackagingError(
+ 'distribution %r has invalid Provides field: %r' %
+ (dist.name, p))
+ p_ver = p_ver[1:-1] # trim off the parenthesis
+ if p_name == name and predicate.match(p_ver):
+ yield dist
+ break
+
+
+def get_file_users(path):
+ """
+ Iterates over all distributions to find out which distributions use
+ *path*.
+
+ :parameter path: can be a local absolute path or a relative
+ ``'/'``-separated path.
+ :type path: string
+ :rtype: iterator of :class:`Distribution` instances
+ """
+ for dist in get_distributions():
+ if dist.uses(path):
+ yield dist
+
+
+def get_file_path(distribution_name, relative_path):
+ """Return the path to a resource file."""
+ dist = get_distribution(distribution_name)
+ if dist is not None:
+ return dist.get_resource_path(relative_path)
+ raise LookupError('no distribution named %r found' % distribution_name)
+
+
+def get_file(distribution_name, relative_path, *args, **kwargs):
+ """Open and return a resource file."""
+ return open(get_file_path(distribution_name, relative_path),
+ *args, **kwargs)
diff --git a/Lib/packaging/depgraph.py b/Lib/packaging/depgraph.py
new file mode 100644
index 0000000..d633b63
--- /dev/null
+++ b/Lib/packaging/depgraph.py
@@ -0,0 +1,270 @@
+"""Class and functions dealing with dependencies between distributions.
+
+This module provides a DependencyGraph class to represent the
+dependencies between distributions. Auxiliary functions can generate a
+graph, find reverse dependencies, and print a graph in DOT format.
+"""
+
+import sys
+
+from io import StringIO
+from packaging.errors import PackagingError
+from packaging.version import VersionPredicate, IrrationalVersionError
+
+__all__ = ['DependencyGraph', 'generate_graph', 'dependent_dists',
+ 'graph_to_dot']
+
+
+class DependencyGraph:
+ """
+ Represents a dependency graph between distributions.
+
+ The dependency relationships are stored in an ``adjacency_list`` that maps
+ distributions to a list of ``(other, label)`` tuples where ``other``
+ is a distribution and the edge is labeled with ``label`` (i.e. the version
+ specifier, if such was provided). Also, for more efficient traversal, for
+ every distribution ``x``, a list of predecessors is kept in
+ ``reverse_list[x]``. An edge from distribution ``a`` to
+ distribution ``b`` means that ``a`` depends on ``b``. If any missing
+ dependencies are found, they are stored in ``missing``, which is a
+ dictionary that maps distributions to a list of requirements that were not
+ provided by any other distributions.
+ """
+
+ def __init__(self):
+ self.adjacency_list = {}
+ self.reverse_list = {}
+ self.missing = {}
+
+ def add_distribution(self, distribution):
+ """Add the *distribution* to the graph.
+
+ :type distribution: :class:`packaging.database.Distribution` or
+ :class:`packaging.database.EggInfoDistribution`
+ """
+ self.adjacency_list[distribution] = []
+ self.reverse_list[distribution] = []
+ self.missing[distribution] = []
+
+ def add_edge(self, x, y, label=None):
+ """Add an edge from distribution *x* to distribution *y* with the given
+ *label*.
+
+ :type x: :class:`packaging.database.Distribution` or
+ :class:`packaging.database.EggInfoDistribution`
+ :type y: :class:`packaging.database.Distribution` or
+ :class:`packaging.database.EggInfoDistribution`
+ :type label: ``str`` or ``None``
+ """
+ self.adjacency_list[x].append((y, label))
+ # multiple edges are allowed, so be careful
+ if x not in self.reverse_list[y]:
+ self.reverse_list[y].append(x)
+
+ def add_missing(self, distribution, requirement):
+ """
+ Add a missing *requirement* for the given *distribution*.
+
+ :type distribution: :class:`packaging.database.Distribution` or
+ :class:`packaging.database.EggInfoDistribution`
+ :type requirement: ``str``
+ """
+ self.missing[distribution].append(requirement)
+
+ def _repr_dist(self, dist):
+ return '%r %s' % (dist.name, dist.version)
+
+ def repr_node(self, dist, level=1):
+ """Prints only a subgraph"""
+ output = []
+ output.append(self._repr_dist(dist))
+ for other, label in self.adjacency_list[dist]:
+ dist = self._repr_dist(other)
+ if label is not None:
+ dist = '%s [%s]' % (dist, label)
+ output.append(' ' * level + str(dist))
+ suboutput = self.repr_node(other, level + 1)
+ subs = suboutput.split('\n')
+ output.extend(subs[1:])
+ return '\n'.join(output)
+
+ def __repr__(self):
+ """Representation of the graph"""
+ output = []
+ for dist, adjs in self.adjacency_list.items():
+ output.append(self.repr_node(dist))
+ return '\n'.join(output)
+
+
+def graph_to_dot(graph, f, skip_disconnected=True):
+ """Writes a DOT output for the graph to the provided file *f*.
+
+ If *skip_disconnected* is set to ``True``, then all distributions
+ that are not dependent on any other distribution are skipped.
+
+ :type f: has to support ``file``-like operations
+ :type skip_disconnected: ``bool``
+ """
+ disconnected = []
+
+ f.write("digraph dependencies {\n")
+ for dist, adjs in graph.adjacency_list.items():
+ if len(adjs) == 0 and not skip_disconnected:
+ disconnected.append(dist)
+ for other, label in adjs:
+ if not label is None:
+ f.write('"%s" -> "%s" [label="%s"]\n' %
+ (dist.name, other.name, label))
+ else:
+ f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+ if not skip_disconnected and len(disconnected) > 0:
+ f.write('subgraph disconnected {\n')
+ f.write('label = "Disconnected"\n')
+ f.write('bgcolor = red\n')
+
+ for dist in disconnected:
+ f.write('"%s"' % dist.name)
+ f.write('\n')
+ f.write('}\n')
+ f.write('}\n')
+
+
+def generate_graph(dists):
+ """Generates a dependency graph from the given distributions.
+
+ :parameter dists: a list of distributions
+ :type dists: list of :class:`packaging.database.Distribution` and
+ :class:`packaging.database.EggInfoDistribution` instances
+ :rtype: a :class:`DependencyGraph` instance
+ """
+ graph = DependencyGraph()
+ provided = {} # maps names to lists of (version, dist) tuples
+
+ # first, build the graph and find out the provides
+ for dist in dists:
+ graph.add_distribution(dist)
+ provides = (dist.metadata['Provides-Dist'] +
+ dist.metadata['Provides'] +
+ ['%s (%s)' % (dist.name, dist.version)])
+
+ for p in provides:
+ comps = p.strip().rsplit(" ", 1)
+ name = comps[0]
+ version = None
+ if len(comps) == 2:
+ version = comps[1]
+ if len(version) < 3 or version[0] != '(' or version[-1] != ')':
+ raise PackagingError('distribution %r has ill-formed'
+ 'provides field: %r' % (dist.name, p))
+ version = version[1:-1] # trim off parenthesis
+ if name not in provided:
+ provided[name] = []
+ provided[name].append((version, dist))
+
+ # now make the edges
+ for dist in dists:
+ requires = dist.metadata['Requires-Dist'] + dist.metadata['Requires']
+ for req in requires:
+ try:
+ predicate = VersionPredicate(req)
+ except IrrationalVersionError:
+ # XXX compat-mode if cannot read the version
+ name = req.split()[0]
+ predicate = VersionPredicate(name)
+
+ name = predicate.name
+
+ if name not in provided:
+ graph.add_missing(dist, req)
+ else:
+ matched = False
+ for version, provider in provided[name]:
+ try:
+ match = predicate.match(version)
+ except IrrationalVersionError:
+ # XXX small compat-mode
+ if version.split(' ') == 1:
+ match = True
+ else:
+ match = False
+
+ if match:
+ graph.add_edge(dist, provider, req)
+ matched = True
+ break
+ if not matched:
+ graph.add_missing(dist, req)
+ return graph
+
+
+def dependent_dists(dists, dist):
+ """Recursively generate a list of distributions from *dists* that are
+ dependent on *dist*.
+
+ :param dists: a list of distributions
+ :param dist: a distribution, member of *dists* for which we are interested
+ """
+ if dist not in dists:
+ raise ValueError('given distribution %r is not a member of the list' %
+ dist.name)
+ graph = generate_graph(dists)
+
+ dep = [dist] # dependent distributions
+ fringe = graph.reverse_list[dist] # list of nodes we should inspect
+
+ while not len(fringe) == 0:
+ node = fringe.pop()
+ dep.append(node)
+ for prev in graph.reverse_list[node]:
+ if prev not in dep:
+ fringe.append(prev)
+
+ dep.pop(0) # remove dist from dep, was there to prevent infinite loops
+ return dep
+
+
+def main():
+ # XXX move to run._graph
+ from packaging.database import get_distributions
+ tempout = StringIO()
+ try:
+ old = sys.stderr
+ sys.stderr = tempout
+ try:
+ dists = list(get_distributions(use_egg_info=True))
+ graph = generate_graph(dists)
+ finally:
+ sys.stderr = old
+ except Exception as e:
+ tempout.seek(0)
+ tempout = tempout.read()
+ print('Could not generate the graph')
+ print(tempout)
+ print(e)
+ sys.exit(1)
+
+ for dist, reqs in graph.missing.items():
+ if len(reqs) > 0:
+ print("Warning: Missing dependencies for %r:" % dist.name,
+ ", ".join(reqs))
+ # XXX replace with argparse
+ if len(sys.argv) == 1:
+ print('Dependency graph:')
+ print(' ', repr(graph).replace('\n', '\n '))
+ sys.exit(0)
+ elif len(sys.argv) > 1 and sys.argv[1] in ('-d', '--dot'):
+ if len(sys.argv) > 2:
+ filename = sys.argv[2]
+ else:
+ filename = 'depgraph.dot'
+
+ with open(filename, 'w') as f:
+ graph_to_dot(graph, f, True)
+ tempout.seek(0)
+ tempout = tempout.read()
+ print(tempout)
+ print('Dot file written at %r' % filename)
+ sys.exit(0)
+ else:
+ print('Supported option: -d [filename]')
+ sys.exit(1)
diff --git a/Lib/packaging/dist.py b/Lib/packaging/dist.py
new file mode 100644
index 0000000..607767e
--- /dev/null
+++ b/Lib/packaging/dist.py
@@ -0,0 +1,769 @@
+"""Class representing the project being built/installed/etc."""
+
+import os
+import re
+
+from packaging import logger
+from packaging.util import strtobool, resolve_name
+from packaging.config import Config
+from packaging.errors import (PackagingOptionError, PackagingArgError,
+ PackagingModuleError, PackagingClassError)
+from packaging.command import get_command_class, STANDARD_COMMANDS
+from packaging.command.cmd import Command
+from packaging.metadata import Metadata
+from packaging.fancy_getopt import FancyGetopt
+
+# Regex to define acceptable Packaging command names. This is not *quite*
+# the same as a Python name -- leading underscores are not allowed. The fact
+# that they're very similar is no coincidence: the default naming scheme is
+# to look for a Python module named after the command.
+command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+USAGE = """\
+usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+ or: %(script)s --help [cmd1 cmd2 ...]
+ or: %(script)s --help-commands
+ or: %(script)s cmd --help
+"""
+
+
+def gen_usage(script_name):
+ script = os.path.basename(script_name)
+ return USAGE % {'script': script}
+
+
+class Distribution:
+ """Class used to represent a project and work with it.
+
+ Most of the work hiding behind 'pysetup run' is really done within a
+ Distribution instance, which farms the work out to the commands
+ specified on the command line.
+ """
+
+ # 'global_options' describes the command-line options that may be
+ # supplied to the setup script prior to any actual commands.
+ # Eg. "pysetup run -n" or "pysetup run --dry-run" both take advantage of
+ # these global options. This list should be kept to a bare minimum,
+ # since every global option is also valid as a command option -- and we
+ # don't want to pollute the commands with too many options that they
+ # have minimal control over.
+ global_options = [
+ ('dry-run', 'n', "don't actually do anything"),
+ ('help', 'h', "show detailed help message"),
+ ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
+ ]
+
+ # 'common_usage' is a short (2-3 line) string describing the common
+ # usage of the setup script.
+ common_usage = """\
+Common commands: (see '--help-commands' for more)
+
+ pysetup run build will build the project underneath 'build/'
+ pysetup run install will install the project
+"""
+
+ # options that are not propagated to the commands
+ display_options = [
+ ('help-commands', None,
+ "list all available commands"),
+ ('use-2to3', None,
+ "use 2to3 to make source python 3.x compatible"),
+ ('convert-2to3-doctests', None,
+ "use 2to3 to convert doctests in separate text files"),
+ ]
+ display_option_names = [x[0].replace('-', '_') for x in display_options]
+
+ # negative options are options that exclude other options
+ negative_opt = {}
+
+ # -- Creation/initialization methods -------------------------------
+ def __init__(self, attrs=None):
+ """Construct a new Distribution instance: initialize all the
+ attributes of a Distribution, and then use 'attrs' (a dictionary
+ mapping attribute names to values) to assign some of those
+ attributes their "real" values. (Any attributes not mentioned in
+ 'attrs' will be assigned to some null value: 0, None, an empty list
+ or dictionary, etc.) Most importantly, initialize the
+ 'command_obj' attribute to the empty dictionary; this will be
+ filled in with real command objects by 'parse_command_line()'.
+ """
+
+ # Default values for our command-line options
+ self.dry_run = False
+ self.help = False
+ for attr in self.display_option_names:
+ setattr(self, attr, False)
+
+ # Store the configuration
+ self.config = Config(self)
+
+ # Store the distribution metadata (name, version, author, and so
+ # forth) in a separate object -- we're getting to have enough
+ # information here (and enough command-line options) that it's
+ # worth it.
+ self.metadata = Metadata()
+
+ # 'cmdclass' maps command names to class objects, so we
+ # can 1) quickly figure out which class to instantiate when
+ # we need to create a new command object, and 2) have a way
+ # for the setup script to override command classes
+ self.cmdclass = {}
+
+ # 'script_name' and 'script_args' are usually set to sys.argv[0]
+ # and sys.argv[1:], but they can be overridden when the caller is
+ # not necessarily a setup script run from the command line.
+ self.script_name = None
+ self.script_args = None
+
+ # 'command_options' is where we store command options between
+ # parsing them (from config files, the command line, etc.) and when
+ # they are actually needed -- ie. when the command in question is
+ # instantiated. It is a dictionary of dictionaries of 2-tuples:
+ # command_options = { command_name : { option : (source, value) } }
+ self.command_options = {}
+
+ # 'dist_files' is the list of (command, pyversion, file) that
+ # have been created by any dist commands run so far. This is
+ # filled regardless of whether the run is dry or not. pyversion
+ # gives sysconfig.get_python_version() if the dist file is
+ # specific to a Python version, 'any' if it is good for all
+ # Python versions on the target platform, and '' for a source
+ # file. pyversion should not be used to specify minimum or
+ # maximum required Python versions; use the metainfo for that
+ # instead.
+ self.dist_files = []
+
+ # These options are really the business of various commands, rather
+ # than of the Distribution itself. We provide aliases for them in
+ # Distribution as a convenience to the developer.
+ self.packages = []
+ self.package_data = {}
+ self.package_dir = None
+ self.py_modules = []
+ self.libraries = []
+ self.headers = []
+ self.ext_modules = []
+ self.ext_package = None
+ self.include_dirs = []
+ self.extra_path = None
+ self.scripts = []
+ self.data_files = {}
+ self.password = ''
+ self.use_2to3 = False
+ self.convert_2to3_doctests = []
+ self.extra_files = []
+
+ # And now initialize bookkeeping stuff that can't be supplied by
+ # the caller at all. 'command_obj' maps command names to
+ # Command instances -- that's how we enforce that every command
+ # class is a singleton.
+ self.command_obj = {}
+
+ # 'have_run' maps command names to boolean values; it keeps track
+ # of whether we have actually run a particular command, to make it
+ # cheap to "run" a command whenever we think we might need to -- if
+ # it's already been done, no need for expensive filesystem
+ # operations, we just check the 'have_run' dictionary and carry on.
+ # It's only safe to query 'have_run' for a command class that has
+ # been instantiated -- a false value will be inserted when the
+ # command object is created, and replaced with a true value when
+ # the command is successfully run. Thus it's probably best to use
+ # '.get()' rather than a straight lookup.
+ self.have_run = {}
+
+ # Now we'll use the attrs dictionary (ultimately, keyword args from
+ # the setup script) to possibly override any or all of these
+ # distribution options.
+
+ if attrs is not None:
+ # Pull out the set of command options and work on them
+ # specifically. Note that this order guarantees that aliased
+ # command options will override any supplied redundantly
+ # through the general options dictionary.
+ options = attrs.get('options')
+ if options is not None:
+ del attrs['options']
+ for command, cmd_options in options.items():
+ opt_dict = self.get_option_dict(command)
+ for opt, val in cmd_options.items():
+ opt_dict[opt] = ("setup script", val)
+
+ # Now work on the rest of the attributes. Any attribute that's
+ # not already defined is invalid!
+ for key, val in attrs.items():
+ if self.metadata.is_metadata_field(key):
+ self.metadata[key] = val
+ elif hasattr(self, key):
+ setattr(self, key, val)
+ else:
+ logger.warning(
+ 'unknown argument given to Distribution: %r', key)
+
+ # no-user-cfg is handled before other command line args
+ # because other args override the config files, and this
+ # one is needed before we can load the config files.
+ # If attrs['script_args'] wasn't passed, assume false.
+ #
+ # This also make sure we just look at the global options
+ self.want_user_cfg = True
+
+ if self.script_args is not None:
+ for arg in self.script_args:
+ if not arg.startswith('-'):
+ break
+ if arg == '--no-user-cfg':
+ self.want_user_cfg = False
+ break
+
+ self.finalize_options()
+
+ def get_option_dict(self, command):
+ """Get the option dictionary for a given command. If that
+ command's option dictionary hasn't been created yet, then create it
+ and return the new dictionary; otherwise, return the existing
+ option dictionary.
+ """
+ d = self.command_options.get(command)
+ if d is None:
+ d = self.command_options[command] = {}
+ return d
+
+ def get_fullname(self, filesafe=False):
+ return self.metadata.get_fullname(filesafe)
+
+ def dump_option_dicts(self, header=None, commands=None, indent=""):
+ from pprint import pformat
+
+ if commands is None: # dump all command option dicts
+ commands = sorted(self.command_options)
+
+ if header is not None:
+ logger.info(indent + header)
+ indent = indent + " "
+
+ if not commands:
+ logger.info(indent + "no commands known yet")
+ return
+
+ for cmd_name in commands:
+ opt_dict = self.command_options.get(cmd_name)
+ if opt_dict is None:
+ logger.info(indent + "no option dict for %r command",
+ cmd_name)
+ else:
+ logger.info(indent + "option dict for %r command:", cmd_name)
+ out = pformat(opt_dict)
+ for line in out.split('\n'):
+ logger.info(indent + " " + line)
+
+ # -- Config file finding/parsing methods ---------------------------
+ # XXX to be removed
+ def parse_config_files(self, filenames=None):
+ return self.config.parse_config_files(filenames)
+
+ def find_config_files(self):
+ return self.config.find_config_files()
+
+ # -- Command-line parsing methods ----------------------------------
+
+ def parse_command_line(self):
+ """Parse the setup script's command line, taken from the
+ 'script_args' instance attribute (which defaults to 'sys.argv[1:]'
+ -- see 'setup()' in run.py). This list is first processed for
+ "global options" -- options that set attributes of the Distribution
+ instance. Then, it is alternately scanned for Packaging commands
+ and options for that command. Each new command terminates the
+ options for the previous command. The allowed options for a
+ command are determined by the 'user_options' attribute of the
+ command class -- thus, we have to be able to load command classes
+ in order to parse the command line. Any error in that 'options'
+ attribute raises PackagingGetoptError; any error on the
+ command line raises PackagingArgError. If no Packaging commands
+ were found on the command line, raises PackagingArgError. Return
+ true if command line was successfully parsed and we should carry
+ on with executing commands; false if no errors but we shouldn't
+ execute commands (currently, this only happens if user asks for
+ help).
+ """
+ #
+ # We now have enough information to show the Macintosh dialog
+ # that allows the user to interactively specify the "command line".
+ #
+ toplevel_options = self._get_toplevel_options()
+
+ # We have to parse the command line a bit at a time -- global
+ # options, then the first command, then its options, and so on --
+ # because each command will be handled by a different class, and
+ # the options that are valid for a particular class aren't known
+ # until we have loaded the command class, which doesn't happen
+ # until we know what the command is.
+
+ self.commands = []
+ parser = FancyGetopt(toplevel_options + self.display_options)
+ parser.set_negative_aliases(self.negative_opt)
+ args = parser.getopt(args=self.script_args, object=self)
+ option_order = parser.get_option_order()
+
+ # for display options we return immediately
+ if self.handle_display_options(option_order):
+ return
+
+ while args:
+ args = self._parse_command_opts(parser, args)
+ if args is None: # user asked for help (and got it)
+ return
+
+ # Handle the cases of --help as a "global" option, ie.
+ # "pysetup run --help" and "pysetup run --help command ...". For the
+ # former, we show global options (--dry-run, etc.)
+ # and display-only options (--name, --version, etc.); for the
+ # latter, we omit the display-only options and show help for
+ # each command listed on the command line.
+ if self.help:
+ self._show_help(parser,
+ display_options=len(self.commands) == 0,
+ commands=self.commands)
+ return
+
+ return True
+
+ def _get_toplevel_options(self):
+ """Return the non-display options recognized at the top level.
+
+ This includes options that are recognized *only* at the top
+ level as well as options recognized for commands.
+ """
+ return self.global_options
+
+ def _parse_command_opts(self, parser, args):
+ """Parse the command-line options for a single command.
+ 'parser' must be a FancyGetopt instance; 'args' must be the list
+ of arguments, starting with the current command (whose options
+ we are about to parse). Returns a new version of 'args' with
+ the next command at the front of the list; will be the empty
+ list if there are no more commands on the command line. Returns
+ None if the user asked for help on this command.
+ """
+ # Pull the current command from the head of the command line
+ command = args[0]
+ if not command_re.match(command):
+ raise SystemExit("invalid command name %r" % command)
+ self.commands.append(command)
+
+ # Dig up the command class that implements this command, so we
+ # 1) know that it's a valid command, and 2) know which options
+ # it takes.
+ try:
+ cmd_class = get_command_class(command)
+ except PackagingModuleError as msg:
+ raise PackagingArgError(msg)
+
+ # XXX We want to push this in packaging.command
+ #
+ # Require that the command class be derived from Command -- want
+ # to be sure that the basic "command" interface is implemented.
+ for meth in ('initialize_options', 'finalize_options', 'run'):
+ if hasattr(cmd_class, meth):
+ continue
+ raise PackagingClassError(
+ 'command %r must implement %r' % (cmd_class, meth))
+
+ # Also make sure that the command object provides a list of its
+ # known options.
+ if not (hasattr(cmd_class, 'user_options') and
+ isinstance(cmd_class.user_options, list)):
+ raise PackagingClassError(
+ "command class %s must provide "
+ "'user_options' attribute (a list of tuples)" % cmd_class)
+
+ # If the command class has a list of negative alias options,
+ # merge it in with the global negative aliases.
+ negative_opt = self.negative_opt
+ if hasattr(cmd_class, 'negative_opt'):
+ negative_opt = negative_opt.copy()
+ negative_opt.update(cmd_class.negative_opt)
+
+ # Check for help_options in command class. They have a different
+ # format (tuple of four) so we need to preprocess them here.
+ if (hasattr(cmd_class, 'help_options') and
+ isinstance(cmd_class.help_options, list)):
+ help_options = cmd_class.help_options[:]
+ else:
+ help_options = []
+
+ # All commands support the global options too, just by adding
+ # in 'global_options'.
+ parser.set_option_table(self.global_options +
+ cmd_class.user_options +
+ help_options)
+ parser.set_negative_aliases(negative_opt)
+ args, opts = parser.getopt(args[1:])
+ if hasattr(opts, 'help') and opts.help:
+ self._show_help(parser, display_options=False,
+ commands=[cmd_class])
+ return
+
+ if (hasattr(cmd_class, 'help_options') and
+ isinstance(cmd_class.help_options, list)):
+ help_option_found = False
+ for help_option, short, desc, func in cmd_class.help_options:
+ if hasattr(opts, help_option.replace('-', '_')):
+ help_option_found = True
+ if callable(func):
+ func()
+ else:
+ raise PackagingClassError(
+ "invalid help function %r for help option %r: "
+ "must be a callable object (function, etc.)"
+ % (func, help_option))
+
+ if help_option_found:
+ return
+
+ # Put the options from the command line into their official
+ # holding pen, the 'command_options' dictionary.
+ opt_dict = self.get_option_dict(command)
+ for name, value in vars(opts).items():
+ opt_dict[name] = ("command line", value)
+
+ return args
+
+ def finalize_options(self):
+ """Set final values for all the options on the Distribution
+ instance, analogous to the .finalize_options() method of Command
+ objects.
+ """
+ if getattr(self, 'convert_2to3_doctests', None):
+ self.convert_2to3_doctests = [os.path.join(p)
+ for p in self.convert_2to3_doctests]
+ else:
+ self.convert_2to3_doctests = []
+
+ def _show_help(self, parser, global_options=True, display_options=True,
+ commands=[]):
+ """Show help for the setup script command line in the form of
+ several lists of command-line options. 'parser' should be a
+ FancyGetopt instance; do not expect it to be returned in the
+ same state, as its option table will be reset to make it
+ generate the correct help text.
+
+ If 'global_options' is true, lists the global options:
+ --dry-run, etc. If 'display_options' is true, lists
+ the "display-only" options: --help-commands. Finally,
+ lists per-command help for every command name or command class
+ in 'commands'.
+ """
+ if global_options:
+ if display_options:
+ options = self._get_toplevel_options()
+ else:
+ options = self.global_options
+ parser.set_option_table(options)
+ parser.print_help(self.common_usage + "\nGlobal options:")
+ print()
+
+ if display_options:
+ parser.set_option_table(self.display_options)
+ parser.print_help(
+ "Information display options (just display " +
+ "information, ignore any commands)")
+ print()
+
+ for command in self.commands:
+ if isinstance(command, type) and issubclass(command, Command):
+ cls = command
+ else:
+ cls = get_command_class(command)
+ if (hasattr(cls, 'help_options') and
+ isinstance(cls.help_options, list)):
+ parser.set_option_table(cls.user_options + cls.help_options)
+ else:
+ parser.set_option_table(cls.user_options)
+ parser.print_help("Options for %r command:" % cls.__name__)
+ print()
+
+ print(gen_usage(self.script_name))
+
+ def handle_display_options(self, option_order):
+ """If there were any non-global "display-only" options
+ (--help-commands) on the command line, display the requested info and
+ return true; else return false.
+ """
+ # User just wants a list of commands -- we'll print it out and stop
+ # processing now (ie. if they ran "setup --help-commands foo bar",
+ # we ignore "foo bar").
+ if self.help_commands:
+ self.print_commands()
+ print()
+ print(gen_usage(self.script_name))
+ return True
+
+ # If user supplied any of the "display metadata" options, then
+ # display that metadata in the order in which the user supplied the
+ # metadata options.
+ any_display_options = False
+ is_display_option = set()
+ for option in self.display_options:
+ is_display_option.add(option[0])
+
+ for opt, val in option_order:
+ if val and opt in is_display_option:
+ opt = opt.replace('-', '_')
+ value = self.metadata[opt]
+ if opt in ('keywords', 'platform'):
+ print(','.join(value))
+ elif opt in ('classifier', 'provides', 'requires',
+ 'obsoletes'):
+ print('\n'.join(value))
+ else:
+ print(value)
+ any_display_options = True
+
+ return any_display_options
+
+ def print_command_list(self, commands, header, max_length):
+ """Print a subset of the list of all commands -- used by
+ 'print_commands()'.
+ """
+ print(header + ":")
+
+ for cmd in commands:
+ cls = self.cmdclass.get(cmd) or get_command_class(cmd)
+ description = getattr(cls, 'description',
+ '(no description available)')
+
+ print(" %-*s %s" % (max_length, cmd, description))
+
+ def _get_command_groups(self):
+ """Helper function to retrieve all the command class names divided
+ into standard commands (listed in
+ packaging.command.STANDARD_COMMANDS) and extra commands (given in
+ self.cmdclass and not standard commands).
+ """
+ extra_commands = [cmd for cmd in self.cmdclass
+ if cmd not in STANDARD_COMMANDS]
+ return STANDARD_COMMANDS, extra_commands
+
+ def print_commands(self):
+ """Print out a help message listing all available commands with a
+ description of each. The list is divided into standard commands
+ (listed in packaging.command.STANDARD_COMMANDS) and extra commands
+ (given in self.cmdclass and not standard commands). The
+ descriptions come from the command class attribute
+ 'description'.
+ """
+ std_commands, extra_commands = self._get_command_groups()
+ max_length = 0
+ for cmd in (std_commands + extra_commands):
+ if len(cmd) > max_length:
+ max_length = len(cmd)
+
+ self.print_command_list(std_commands,
+ "Standard commands",
+ max_length)
+ if extra_commands:
+ print()
+ self.print_command_list(extra_commands,
+ "Extra commands",
+ max_length)
+
+ # -- Command class/object methods ----------------------------------
+
+ def get_command_obj(self, command, create=True):
+ """Return the command object for 'command'. Normally this object
+ is cached on a previous call to 'get_command_obj()'; if no command
+ object for 'command' is in the cache, then we either create and
+ return it (if 'create' is true) or return None.
+ """
+ cmd_obj = self.command_obj.get(command)
+ if not cmd_obj and create:
+ logger.debug("Distribution.get_command_obj(): "
+ "creating %r command object", command)
+
+ cls = get_command_class(command)
+ cmd_obj = self.command_obj[command] = cls(self)
+ self.have_run[command] = 0
+
+ # Set any options that were supplied in config files or on the
+ # command line. (XXX support for error reporting is suboptimal
+ # here: errors aren't reported until finalize_options is called,
+ # which means we won't report the source of the error.)
+ options = self.command_options.get(command)
+ if options:
+ self._set_command_options(cmd_obj, options)
+
+ return cmd_obj
+
+ def _set_command_options(self, command_obj, option_dict=None):
+ """Set the options for 'command_obj' from 'option_dict'. Basically
+ this means copying elements of a dictionary ('option_dict') to
+ attributes of an instance ('command').
+
+ 'command_obj' must be a Command instance. If 'option_dict' is not
+ supplied, uses the standard option dictionary for this command
+ (from 'self.command_options').
+ """
+ command_name = command_obj.get_command_name()
+ if option_dict is None:
+ option_dict = self.get_option_dict(command_name)
+
+ logger.debug(" setting options for %r command:", command_name)
+
+ for option, (source, value) in option_dict.items():
+ logger.debug(" %s = %s (from %s)", option, value, source)
+ try:
+ bool_opts = [x.replace('-', '_')
+ for x in command_obj.boolean_options]
+ except AttributeError:
+ bool_opts = []
+ try:
+ neg_opt = command_obj.negative_opt
+ except AttributeError:
+ neg_opt = {}
+
+ try:
+ is_string = isinstance(value, str)
+ if option in neg_opt and is_string:
+ setattr(command_obj, neg_opt[option], not strtobool(value))
+ elif option in bool_opts and is_string:
+ setattr(command_obj, option, strtobool(value))
+ elif hasattr(command_obj, option):
+ setattr(command_obj, option, value)
+ else:
+ raise PackagingOptionError(
+ "error in %s: command %r has no such option %r" %
+ (source, command_name, option))
+ except ValueError as msg:
+ raise PackagingOptionError(msg)
+
+ def reinitialize_command(self, command, reinit_subcommands=False):
+ """Reinitializes a command to the state it was in when first
+ returned by 'get_command_obj()': i.e., initialized but not yet
+ finalized. This provides the opportunity to sneak option
+ values in programmatically, overriding or supplementing
+ user-supplied values from the config files and command line.
+ You'll have to re-finalize the command object (by calling
+ 'finalize_options()' or 'ensure_finalized()') before using it for
+ real.
+
+ 'command' should be a command name (string) or command object. If
+ 'reinit_subcommands' is true, also reinitializes the command's
+ sub-commands, as declared by the 'sub_commands' class attribute (if
+ it has one). See the "install_dist" command for an example. Only
+ reinitializes the sub-commands that actually matter, i.e. those
+ whose test predicate return true.
+
+ Returns the reinitialized command object. It will be the same
+ object as the one stored in the self.command_obj attribute.
+ """
+ if not isinstance(command, Command):
+ command_name = command
+ command = self.get_command_obj(command_name)
+ else:
+ command_name = command.get_command_name()
+
+ if not command.finalized:
+ return command
+
+ command.initialize_options()
+ self.have_run[command_name] = 0
+ command.finalized = False
+ self._set_command_options(command)
+
+ if reinit_subcommands:
+ for sub in command.get_sub_commands():
+ self.reinitialize_command(sub, reinit_subcommands)
+
+ return command
+
+ # -- Methods that operate on the Distribution ----------------------
+
+ def run_commands(self):
+ """Run each command that was seen on the setup script command line.
+ Uses the list of commands found and cache of command objects
+ created by 'get_command_obj()'.
+ """
+ for cmd in self.commands:
+ self.run_command(cmd)
+
+ # -- Methods that operate on its Commands --------------------------
+
+ def run_command(self, command, options=None):
+ """Do whatever it takes to run a command (including nothing at all,
+ if the command has already been run). Specifically: if we have
+ already created and run the command named by 'command', return
+ silently without doing anything. If the command named by 'command'
+ doesn't even have a command object yet, create one. Then invoke
+ 'run()' on that command object (or an existing one).
+ """
+ # Already been here, done that? then return silently.
+ if self.have_run.get(command):
+ return
+
+ if options is not None:
+ self.command_options[command] = options
+
+ cmd_obj = self.get_command_obj(command)
+ cmd_obj.ensure_finalized()
+ self.run_command_hooks(cmd_obj, 'pre_hook')
+ logger.info("running %s", command)
+ cmd_obj.run()
+ self.run_command_hooks(cmd_obj, 'post_hook')
+ self.have_run[command] = 1
+
+ def run_command_hooks(self, cmd_obj, hook_kind):
+ """Run hooks registered for that command and phase.
+
+ *cmd_obj* is a finalized command object; *hook_kind* is either
+ 'pre_hook' or 'post_hook'.
+ """
+ if hook_kind not in ('pre_hook', 'post_hook'):
+ raise ValueError('invalid hook kind: %r' % hook_kind)
+
+ hooks = getattr(cmd_obj, hook_kind, None)
+
+ if hooks is None:
+ return
+
+ for hook in hooks.values():
+ if isinstance(hook, str):
+ try:
+ hook_obj = resolve_name(hook)
+ except ImportError as e:
+ raise PackagingModuleError(e)
+ else:
+ hook_obj = hook
+
+ if not callable(hook_obj):
+ raise PackagingOptionError('hook %r is not callable' % hook)
+
+ logger.info('running %s %s for command %s',
+ hook_kind, hook, cmd_obj.get_command_name())
+ hook_obj(cmd_obj)
+
+ # -- Distribution query methods ------------------------------------
+ def has_pure_modules(self):
+ return len(self.packages or self.py_modules or []) > 0
+
+ def has_ext_modules(self):
+ return self.ext_modules and len(self.ext_modules) > 0
+
+ def has_c_libraries(self):
+ return self.libraries and len(self.libraries) > 0
+
+ def has_modules(self):
+ return self.has_pure_modules() or self.has_ext_modules()
+
+ def has_headers(self):
+ return self.headers and len(self.headers) > 0
+
+ def has_scripts(self):
+ return self.scripts and len(self.scripts) > 0
+
+ def has_data_files(self):
+ return self.data_files and len(self.data_files) > 0
+
+ def is_pure(self):
+ return (self.has_pure_modules() and
+ not self.has_ext_modules() and
+ not self.has_c_libraries())
diff --git a/Lib/packaging/errors.py b/Lib/packaging/errors.py
new file mode 100644
index 0000000..8878129
--- /dev/null
+++ b/Lib/packaging/errors.py
@@ -0,0 +1,138 @@
+"""Exceptions used throughout the package.
+
+Submodules of packaging may raise exceptions defined in this module as
+well as standard exceptions; in particular, SystemExit is usually raised
+for errors that are obviously the end-user's fault (e.g. bad
+command-line arguments).
+"""
+
+
+class PackagingError(Exception):
+ """The root of all Packaging evil."""
+
+
+class PackagingModuleError(PackagingError):
+ """Unable to load an expected module, or to find an expected class
+ within some module (in particular, command modules and classes)."""
+
+
+class PackagingClassError(PackagingError):
+ """Some command class (or possibly distribution class, if anyone
+ feels a need to subclass Distribution) is found not to be holding
+ up its end of the bargain, ie. implementing some part of the
+ "command "interface."""
+
+
+class PackagingGetoptError(PackagingError):
+ """The option table provided to 'fancy_getopt()' is bogus."""
+
+
+class PackagingArgError(PackagingError):
+ """Raised by fancy_getopt in response to getopt.error -- ie. an
+ error in the command line usage."""
+
+
+class PackagingFileError(PackagingError):
+ """Any problems in the filesystem: expected file not found, etc.
+ Typically this is for problems that we detect before IOError or
+ OSError could be raised."""
+
+
+class PackagingOptionError(PackagingError):
+ """Syntactic/semantic errors in command options, such as use of
+ mutually conflicting options, or inconsistent options,
+ badly-spelled values, etc. No distinction is made between option
+ values originating in the setup script, the command line, config
+ files, or what-have-you -- but if we *know* something originated in
+ the setup script, we'll raise PackagingSetupError instead."""
+
+
+class PackagingSetupError(PackagingError):
+ """For errors that can be definitely blamed on the setup script,
+ such as invalid keyword arguments to 'setup()'."""
+
+
+class PackagingPlatformError(PackagingError):
+ """We don't know how to do something on the current platform (but
+ we do know how to do it on some platform) -- eg. trying to compile
+ C files on a platform not supported by a CCompiler subclass."""
+
+
+class PackagingExecError(PackagingError):
+ """Any problems executing an external program (such as the C
+ compiler, when compiling C files)."""
+
+
+class PackagingInternalError(PackagingError):
+ """Internal inconsistencies or impossibilities (obviously, this
+ should never be seen if the code is working!)."""
+
+
+class PackagingTemplateError(PackagingError):
+ """Syntax error in a file list template."""
+
+
+class PackagingPyPIError(PackagingError):
+ """Any problem occuring during using the indexes."""
+
+
+# Exception classes used by the CCompiler implementation classes
+class CCompilerError(Exception):
+ """Some compile/link operation failed."""
+
+
+class PreprocessError(CCompilerError):
+ """Failure to preprocess one or more C/C++ files."""
+
+
+class CompileError(CCompilerError):
+ """Failure to compile one or more C/C++ source files."""
+
+
+class LibError(CCompilerError):
+ """Failure to create a static library from one or more C/C++ object
+ files."""
+
+
+class LinkError(CCompilerError):
+ """Failure to link one or more C/C++ object files into an executable
+ or shared library file."""
+
+
+class UnknownFileError(CCompilerError):
+ """Attempt to process an unknown file type."""
+
+
+class MetadataMissingError(PackagingError):
+ """A required metadata is missing"""
+
+
+class MetadataConflictError(PackagingError):
+ """Attempt to read or write metadata fields that are conflictual."""
+
+
+class MetadataUnrecognizedVersionError(PackagingError):
+ """Unknown metadata version number."""
+
+
+class IrrationalVersionError(Exception):
+ """This is an irrational version."""
+ pass
+
+
+class HugeMajorVersionNumError(IrrationalVersionError):
+ """An irrational version because the major version number is huge
+ (often because a year or date was used).
+
+ See `error_on_huge_major_num` option in `NormalizedVersion` for details.
+ This guard can be disabled by setting that option False.
+ """
+ pass
+
+
+class InstallationException(Exception):
+ """Base exception for installation scripts"""
+
+
+class InstallationConflict(InstallationException):
+ """Raised when a conflict is detected"""
diff --git a/Lib/packaging/fancy_getopt.py b/Lib/packaging/fancy_getopt.py
new file mode 100644
index 0000000..61dd5fc
--- /dev/null
+++ b/Lib/packaging/fancy_getopt.py
@@ -0,0 +1,388 @@
+"""Command line parsing machinery.
+
+The FancyGetopt class is a Wrapper around the getopt module that
+provides the following additional features:
+ * short and long options are tied together
+ * options have help strings, so fancy_getopt could potentially
+ create a complete usage summary
+ * options set attributes of a passed-in object.
+
+It is used under the hood by the command classes. Do not use directly.
+"""
+
+import getopt
+import re
+import sys
+import textwrap
+
+from packaging.errors import PackagingGetoptError, PackagingArgError
+
+# Much like command_re in packaging.core, this is close to but not quite
+# the same as a Python NAME -- except, in the spirit of most GNU
+# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
+# The similarities to NAME are again not a coincidence...
+longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
+longopt_re = re.compile(r'^%s$' % longopt_pat)
+
+# For recognizing "negative alias" options, eg. "quiet=!verbose"
+neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
+
+
+class FancyGetopt:
+ """Wrapper around the standard 'getopt()' module that provides some
+ handy extra functionality:
+ * short and long options are tied together
+ * options have help strings, and help text can be assembled
+ from them
+ * options set attributes of a passed-in object
+ * boolean options can have "negative aliases" -- eg. if
+ --quiet is the "negative alias" of --verbose, then "--quiet"
+ on the command line sets 'verbose' to false
+ """
+
+ def __init__(self, option_table=None):
+
+ # The option table is (currently) a list of tuples. The
+ # tuples may have 3 or four values:
+ # (long_option, short_option, help_string [, repeatable])
+ # if an option takes an argument, its long_option should have '='
+ # appended; short_option should just be a single character, no ':'
+ # in any case. If a long_option doesn't have a corresponding
+ # short_option, short_option should be None. All option tuples
+ # must have long options.
+ self.option_table = option_table
+
+ # 'option_index' maps long option names to entries in the option
+ # table (ie. those 3-tuples).
+ self.option_index = {}
+ if self.option_table:
+ self._build_index()
+
+ # 'alias' records (duh) alias options; {'foo': 'bar'} means
+ # --foo is an alias for --bar
+ self.alias = {}
+
+ # 'negative_alias' keeps track of options that are the boolean
+ # opposite of some other option
+ self.negative_alias = {}
+
+ # These keep track of the information in the option table. We
+ # don't actually populate these structures until we're ready to
+ # parse the command line, since the 'option_table' passed in here
+ # isn't necessarily the final word.
+ self.short_opts = []
+ self.long_opts = []
+ self.short2long = {}
+ self.attr_name = {}
+ self.takes_arg = {}
+
+ # And 'option_order' is filled up in 'getopt()'; it records the
+ # original order of options (and their values) on the command line,
+ # but expands short options, converts aliases, etc.
+ self.option_order = []
+
+ def _build_index(self):
+ self.option_index.clear()
+ for option in self.option_table:
+ self.option_index[option[0]] = option
+
+ def set_option_table(self, option_table):
+ self.option_table = option_table
+ self._build_index()
+
+ def add_option(self, long_option, short_option=None, help_string=None):
+ if long_option in self.option_index:
+ raise PackagingGetoptError(
+ "option conflict: already an option '%s'" % long_option)
+ else:
+ option = (long_option, short_option, help_string)
+ self.option_table.append(option)
+ self.option_index[long_option] = option
+
+ def has_option(self, long_option):
+ """Return true if the option table for this parser has an
+ option with long name 'long_option'."""
+ return long_option in self.option_index
+
+ def _check_alias_dict(self, aliases, what):
+ assert isinstance(aliases, dict)
+ for alias, opt in aliases.items():
+ if alias not in self.option_index:
+ raise PackagingGetoptError(
+ ("invalid %s '%s': "
+ "option '%s' not defined") % (what, alias, alias))
+ if opt not in self.option_index:
+ raise PackagingGetoptError(
+ ("invalid %s '%s': "
+ "aliased option '%s' not defined") % (what, alias, opt))
+
+ def set_aliases(self, alias):
+ """Set the aliases for this option parser."""
+ self._check_alias_dict(alias, "alias")
+ self.alias = alias
+
+ def set_negative_aliases(self, negative_alias):
+ """Set the negative aliases for this option parser.
+ 'negative_alias' should be a dictionary mapping option names to
+ option names, both the key and value must already be defined
+ in the option table."""
+ self._check_alias_dict(negative_alias, "negative alias")
+ self.negative_alias = negative_alias
+
+ def _grok_option_table(self):
+ """Populate the various data structures that keep tabs on the
+ option table. Called by 'getopt()' before it can do anything
+ worthwhile.
+ """
+ self.long_opts = []
+ self.short_opts = []
+ self.short2long.clear()
+ self.repeat = {}
+
+ for option in self.option_table:
+ if len(option) == 3:
+ longopt, short, help = option
+ repeat = 0
+ elif len(option) == 4:
+ longopt, short, help, repeat = option
+ else:
+ # the option table is part of the code, so simply
+ # assert that it is correct
+ raise ValueError("invalid option tuple: %r" % option)
+
+ # Type- and value-check the option names
+ if not isinstance(longopt, str) or len(longopt) < 2:
+ raise PackagingGetoptError(
+ ("invalid long option '%s': "
+ "must be a string of length >= 2") % longopt)
+
+ if (not ((short is None) or
+ (isinstance(short, str) and len(short) == 1))):
+ raise PackagingGetoptError(
+ ("invalid short option '%s': "
+ "must be a single character or None") % short)
+
+ self.repeat[longopt] = repeat
+ self.long_opts.append(longopt)
+
+ if longopt[-1] == '=': # option takes an argument?
+ if short:
+ short = short + ':'
+ longopt = longopt[0:-1]
+ self.takes_arg[longopt] = 1
+ else:
+
+ # Is option is a "negative alias" for some other option (eg.
+ # "quiet" == "!verbose")?
+ alias_to = self.negative_alias.get(longopt)
+ if alias_to is not None:
+ if self.takes_arg[alias_to]:
+ raise PackagingGetoptError(
+ ("invalid negative alias '%s': "
+ "aliased option '%s' takes a value") % \
+ (longopt, alias_to))
+
+ self.long_opts[-1] = longopt # XXX redundant?!
+ self.takes_arg[longopt] = 0
+
+ else:
+ self.takes_arg[longopt] = 0
+
+ # If this is an alias option, make sure its "takes arg" flag is
+ # the same as the option it's aliased to.
+ alias_to = self.alias.get(longopt)
+ if alias_to is not None:
+ if self.takes_arg[longopt] != self.takes_arg[alias_to]:
+ raise PackagingGetoptError(
+ ("invalid alias '%s': inconsistent with "
+ "aliased option '%s' (one of them takes a value, "
+ "the other doesn't") % (longopt, alias_to))
+
+ # Now enforce some bondage on the long option name, so we can
+ # later translate it to an attribute name on some object. Have
+ # to do this a bit late to make sure we've removed any trailing
+ # '='.
+ if not longopt_re.match(longopt):
+ raise PackagingGetoptError(
+ ("invalid long option name '%s' " +
+ "(must be letters, numbers, hyphens only") % longopt)
+
+ self.attr_name[longopt] = longopt.replace('-', '_')
+ if short:
+ self.short_opts.append(short)
+ self.short2long[short[0]] = longopt
+
+ def getopt(self, args=None, object=None):
+ """Parse command-line options in args. Store as attributes on object.
+
+ If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
+ 'object' is None or not supplied, creates a new OptionDummy
+ object, stores option values there, and returns a tuple (args,
+ object). If 'object' is supplied, it is modified in place and
+ 'getopt()' just returns 'args'; in both cases, the returned
+ 'args' is a modified copy of the passed-in 'args' list, which
+ is left untouched.
+ """
+ if args is None:
+ args = sys.argv[1:]
+ if object is None:
+ object = OptionDummy()
+ created_object = 1
+ else:
+ created_object = 0
+
+ self._grok_option_table()
+
+ short_opts = ' '.join(self.short_opts)
+
+ try:
+ opts, args = getopt.getopt(args, short_opts, self.long_opts)
+ except getopt.error as msg:
+ raise PackagingArgError(msg)
+
+ for opt, val in opts:
+ if len(opt) == 2 and opt[0] == '-': # it's a short option
+ opt = self.short2long[opt[1]]
+ else:
+ assert len(opt) > 2 and opt[:2] == '--'
+ opt = opt[2:]
+
+ alias = self.alias.get(opt)
+ if alias:
+ opt = alias
+
+ if not self.takes_arg[opt]: # boolean option?
+ assert val == '', "boolean option can't have value"
+ alias = self.negative_alias.get(opt)
+ if alias:
+ opt = alias
+ val = 0
+ else:
+ val = 1
+
+ attr = self.attr_name[opt]
+ # The only repeating option at the moment is 'verbose'.
+ # It has a negative option -q quiet, which should set verbose = 0.
+ if val and self.repeat.get(attr) is not None:
+ val = getattr(object, attr, 0) + 1
+ setattr(object, attr, val)
+ self.option_order.append((opt, val))
+
+ # for opts
+ if created_object:
+ return args, object
+ else:
+ return args
+
+ def get_option_order(self):
+ """Returns the list of (option, value) tuples processed by the
+ previous run of 'getopt()'. Raises RuntimeError if
+ 'getopt()' hasn't been called yet.
+ """
+ if self.option_order is None:
+ raise RuntimeError("'getopt()' hasn't been called yet")
+ else:
+ return self.option_order
+
+ return self.option_order
+
+ def generate_help(self, header=None):
+ """Generate help text (a list of strings, one per suggested line of
+ output) from the option table for this FancyGetopt object.
+ """
+ # Blithely assume the option table is good: probably wouldn't call
+ # 'generate_help()' unless you've already called 'getopt()'.
+
+ # First pass: determine maximum length of long option names
+ max_opt = 0
+ for option in self.option_table:
+ longopt = option[0]
+ short = option[1]
+ l = len(longopt)
+ if longopt[-1] == '=':
+ l = l - 1
+ if short is not None:
+ l = l + 5 # " (-x)" where short == 'x'
+ if l > max_opt:
+ max_opt = l
+
+ opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
+
+ # Typical help block looks like this:
+ # --foo controls foonabulation
+ # Help block for longest option looks like this:
+ # --flimflam set the flim-flam level
+ # and with wrapped text:
+ # --flimflam set the flim-flam level (must be between
+ # 0 and 100, except on Tuesdays)
+ # Options with short names will have the short name shown (but
+ # it doesn't contribute to max_opt):
+ # --foo (-f) controls foonabulation
+ # If adding the short option would make the left column too wide,
+ # we push the explanation off to the next line
+ # --flimflam (-l)
+ # set the flim-flam level
+ # Important parameters:
+ # - 2 spaces before option block start lines
+ # - 2 dashes for each long option name
+ # - min. 2 spaces between option and explanation (gutter)
+ # - 5 characters (incl. space) for short option name
+
+ # Now generate lines of help text. (If 80 columns were good enough
+ # for Jesus, then 78 columns are good enough for me!)
+ line_width = 78
+ text_width = line_width - opt_width
+ big_indent = ' ' * opt_width
+ if header:
+ lines = [header]
+ else:
+ lines = ['Option summary:']
+
+ for option in self.option_table:
+ longopt, short, help = option[:3]
+ text = textwrap.wrap(help, text_width)
+
+ # Case 1: no short option at all (makes life easy)
+ if short is None:
+ if text:
+ lines.append(" --%-*s %s" % (max_opt, longopt, text[0]))
+ else:
+ lines.append(" --%-*s " % (max_opt, longopt))
+
+ # Case 2: we have a short option, so we have to include it
+ # just after the long option
+ else:
+ opt_names = "%s (-%s)" % (longopt, short)
+ if text:
+ lines.append(" --%-*s %s" %
+ (max_opt, opt_names, text[0]))
+ else:
+ lines.append(" --%-*s" % opt_names)
+
+ for l in text[1:]:
+ lines.append(big_indent + l)
+
+ return lines
+
+ def print_help(self, header=None, file=None):
+ if file is None:
+ file = sys.stdout
+ for line in self.generate_help(header):
+ file.write(line + "\n")
+
+
+def fancy_getopt(options, negative_opt, object, args):
+ parser = FancyGetopt(options)
+ parser.set_negative_aliases(negative_opt)
+ return parser.getopt(args, object)
+
+
+class OptionDummy:
+ """Dummy class just used as a place to hold command-line option
+ values as instance attributes."""
+
+ def __init__(self, options=[]):
+ """Create a new OptionDummy instance. The attributes listed in
+ 'options' will be initialized to None."""
+ for opt in options:
+ setattr(self, opt, None)
diff --git a/Lib/packaging/install.py b/Lib/packaging/install.py
new file mode 100644
index 0000000..776ba40
--- /dev/null
+++ b/Lib/packaging/install.py
@@ -0,0 +1,529 @@
+"""Building blocks for installers.
+
+When used as a script, this module installs a release thanks to info
+obtained from an index (e.g. PyPI), with dependencies.
+
+This is a higher-level module built on packaging.database and
+packaging.pypi.
+"""
+import os
+import sys
+import stat
+import errno
+import shutil
+import logging
+import tempfile
+from sysconfig import get_config_var, get_path, is_python_build
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.util import (_is_archive_file, ask, get_install_method,
+ egginfo_to_distinfo)
+from packaging.pypi import wrapper
+from packaging.version import get_version_predicate
+from packaging.database import get_distributions, get_distribution
+from packaging.depgraph import generate_graph
+
+from packaging.errors import (PackagingError, InstallationException,
+ InstallationConflict, CCompilerError)
+from packaging.pypi.errors import ProjectNotFound, ReleaseNotFound
+from packaging import database
+
+
+__all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove',
+ 'install', 'install_local_project']
+
+
+def _move_files(files, destination):
+ """Move the list of files in the destination folder, keeping the same
+ structure.
+
+ Return a list of tuple (old, new) emplacement of files
+
+ :param files: a list of files to move.
+ :param destination: the destination directory to put on the files.
+ """
+
+ for old in files:
+ filename = os.path.split(old)[-1]
+ new = os.path.join(destination, filename)
+ # try to make the paths.
+ try:
+ os.makedirs(os.path.dirname(new))
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ os.rename(old, new)
+ yield old, new
+
+
+def _run_distutils_install(path):
+ # backward compat: using setuptools or plain-distutils
+ cmd = '%s setup.py install --record=%s'
+ record_file = os.path.join(path, 'RECORD')
+ os.system(cmd % (sys.executable, record_file))
+ if not os.path.exists(record_file):
+ raise ValueError('failed to install')
+ else:
+ egginfo_to_distinfo(record_file, remove_egginfo=True)
+
+
+def _run_setuptools_install(path):
+ cmd = '%s setup.py install --record=%s --single-version-externally-managed'
+ record_file = os.path.join(path, 'RECORD')
+
+ os.system(cmd % (sys.executable, record_file))
+ if not os.path.exists(record_file):
+ raise ValueError('failed to install')
+ else:
+ egginfo_to_distinfo(record_file, remove_egginfo=True)
+
+
+def _run_packaging_install(path):
+ # XXX check for a valid setup.cfg?
+ dist = Distribution()
+ dist.parse_config_files()
+ try:
+ dist.run_command('install_dist')
+ name = dist.metadata['Name']
+ return database.get_distribution(name) is not None
+ except (IOError, os.error, PackagingError, CCompilerError) as msg:
+ raise ValueError("Failed to install, " + str(msg))
+
+
+def _install_dist(dist, path):
+ """Install a distribution into a path.
+
+ This:
+
+ * unpack the distribution
+ * copy the files in "path"
+ * determine if the distribution is packaging or distutils1.
+ """
+ where = dist.unpack()
+
+ if where is None:
+ raise ValueError('Cannot locate the unpacked archive')
+
+ return _run_install_from_archive(where)
+
+
+def install_local_project(path):
+ """Install a distribution from a source directory.
+
+ If the source directory contains a setup.py install using distutils1.
+ If a setup.cfg is found, install using the install_dist command.
+
+ Returns True on success, False on Failure.
+ """
+ path = os.path.abspath(path)
+ if os.path.isdir(path):
+ logger.info('Installing from source directory: %r', path)
+ return _run_install_from_dir(path)
+ elif _is_archive_file(path):
+ logger.info('Installing from archive: %r', path)
+ _unpacked_dir = tempfile.mkdtemp()
+ try:
+ shutil.unpack_archive(path, _unpacked_dir)
+ return _run_install_from_archive(_unpacked_dir)
+ finally:
+ shutil.rmtree(_unpacked_dir)
+ else:
+ logger.warning('No project to install.')
+ return False
+
+
+def _run_install_from_archive(source_dir):
+ # XXX need a better way
+ for item in os.listdir(source_dir):
+ fullpath = os.path.join(source_dir, item)
+ if os.path.isdir(fullpath):
+ source_dir = fullpath
+ break
+ return _run_install_from_dir(source_dir)
+
+
+install_methods = {
+ 'packaging': _run_packaging_install,
+ 'setuptools': _run_setuptools_install,
+ 'distutils': _run_distutils_install}
+
+
+def _run_install_from_dir(source_dir):
+ old_dir = os.getcwd()
+ os.chdir(source_dir)
+ install_method = get_install_method(source_dir)
+ func = install_methods[install_method]
+ try:
+ func = install_methods[install_method]
+ try:
+ func(source_dir)
+ return True
+ except ValueError as err:
+ # failed to install
+ logger.info(str(err))
+ return False
+ finally:
+ os.chdir(old_dir)
+
+
+def install_dists(dists, path, paths=None):
+ """Install all distributions provided in dists, with the given prefix.
+
+ If an error occurs while installing one of the distributions, uninstall all
+ the installed distribution (in the context if this function).
+
+ Return a list of installed dists.
+
+ :param dists: distributions to install
+ :param path: base path to install distribution in
+ :param paths: list of paths (defaults to sys.path) to look for info
+ """
+
+ installed_dists = []
+ for dist in dists:
+ logger.info('Installing %r %s...', dist.name, dist.version)
+ try:
+ _install_dist(dist, path)
+ installed_dists.append(dist)
+ except Exception as e:
+ logger.info('Failed: %s', e)
+
+ # reverting
+ for installed_dist in installed_dists:
+ logger.info('Reverting %r', installed_dist)
+ remove(installed_dist.name, paths)
+ raise e
+ return installed_dists
+
+
+def install_from_infos(install_path=None, install=[], remove=[], conflicts=[],
+ paths=None):
+ """Install and remove the given distributions.
+
+ The function signature is made to be compatible with the one of get_infos.
+ The aim of this script is to povide a way to install/remove what's asked,
+ and to rollback if needed.
+
+ So, it's not possible to be in an inconsistant state, it could be either
+ installed, either uninstalled, not half-installed.
+
+ The process follow those steps:
+
+ 1. Move all distributions that will be removed in a temporary location
+ 2. Install all the distributions that will be installed in a temp. loc.
+ 3. If the installation fails, rollback (eg. move back) those
+ distributions, or remove what have been installed.
+ 4. Else, move the distributions to the right locations, and remove for
+ real the distributions thats need to be removed.
+
+ :param install_path: the installation path where we want to install the
+ distributions.
+ :param install: list of distributions that will be installed; install_path
+ must be provided if this list is not empty.
+ :param remove: list of distributions that will be removed.
+ :param conflicts: list of conflicting distributions, eg. that will be in
+ conflict once the install and remove distribution will be
+ processed.
+ :param paths: list of paths (defaults to sys.path) to look for info
+ """
+ # first of all, if we have conflicts, stop here.
+ if conflicts:
+ raise InstallationConflict(conflicts)
+
+ if install and not install_path:
+ raise ValueError("Distributions are to be installed but `install_path`"
+ " is not provided.")
+
+ # before removing the files, we will start by moving them away
+ # then, if any error occurs, we could replace them in the good place.
+ temp_files = {} # contains lists of {dist: (old, new)} paths
+ temp_dir = None
+ if remove:
+ temp_dir = tempfile.mkdtemp()
+ for dist in remove:
+ files = dist.list_installed_files()
+ temp_files[dist] = _move_files(files, temp_dir)
+ try:
+ if install:
+ install_dists(install, install_path, paths)
+ except:
+ # if an error occurs, put back the files in the right place.
+ for files in temp_files.values():
+ for old, new in files:
+ shutil.move(new, old)
+ if temp_dir:
+ shutil.rmtree(temp_dir)
+ # now re-raising
+ raise
+
+ # we can remove them for good
+ for files in temp_files.values():
+ for old, new in files:
+ os.remove(new)
+ if temp_dir:
+ shutil.rmtree(temp_dir)
+
+
+def _get_setuptools_deps(release):
+ # NotImplementedError
+ pass
+
+
+def get_infos(requirements, index=None, installed=None, prefer_final=True):
+ """Return the informations on what's going to be installed and upgraded.
+
+ :param requirements: is a *string* containing the requirements for this
+ project (for instance "FooBar 1.1" or "BarBaz (<1.2)")
+ :param index: If an index is specified, use this one, otherwise, use
+ :class index.ClientWrapper: to get project metadatas.
+ :param installed: a list of already installed distributions.
+ :param prefer_final: when picking up the releases, prefer a "final" one
+ over a beta/alpha/etc one.
+
+ The results are returned in a dict, containing all the operations
+ needed to install the given requirements::
+
+ >>> get_install_info("FooBar (<=1.2)")
+ {'install': [<FooBar 1.1>], 'remove': [], 'conflict': []}
+
+ Conflict contains all the conflicting distributions, if there is a
+ conflict.
+ """
+ # this function does several things:
+ # 1. get a release specified by the requirements
+ # 2. gather its metadata, using setuptools compatibility if needed
+ # 3. compare this tree with what is currently installed on the system,
+ # return the requirements of what is missing
+ # 4. do that recursively and merge back the results
+ # 5. return a dict containing information about what is needed to install
+ # or remove
+
+ if not installed:
+ logger.debug('Reading installed distributions')
+ installed = list(get_distributions(use_egg_info=True))
+
+ infos = {'install': [], 'remove': [], 'conflict': []}
+ # Is a compatible version of the project already installed ?
+ predicate = get_version_predicate(requirements)
+ found = False
+
+ # check that the project isn't already installed
+ for installed_project in installed:
+ # is it a compatible project ?
+ if predicate.name.lower() != installed_project.name.lower():
+ continue
+ found = True
+ logger.info('Found %r %s', installed_project.name,
+ installed_project.version)
+
+ # if we already have something installed, check it matches the
+ # requirements
+ if predicate.match(installed_project.version):
+ return infos
+ break
+
+ if not found:
+ logger.debug('Project not installed')
+
+ if not index:
+ index = wrapper.ClientWrapper()
+
+ if not installed:
+ installed = get_distributions(use_egg_info=True)
+
+ # Get all the releases that match the requirements
+ try:
+ release = index.get_release(requirements)
+ except (ReleaseNotFound, ProjectNotFound):
+ raise InstallationException('Release not found: %r' % requirements)
+
+ if release is None:
+ logger.info('Could not find a matching project')
+ return infos
+
+ metadata = release.fetch_metadata()
+
+ # we need to build setuptools deps if any
+ if 'requires_dist' not in metadata:
+ metadata['requires_dist'] = _get_setuptools_deps(release)
+
+ # build the dependency graph with local and required dependencies
+ dists = list(installed)
+ dists.append(release)
+ depgraph = generate_graph(dists)
+
+ # Get what the missing deps are
+ dists = depgraph.missing[release]
+ if dists:
+ logger.info("Missing dependencies found, retrieving metadata")
+ # we have missing deps
+ for dist in dists:
+ _update_infos(infos, get_infos(dist, index, installed))
+
+ # Fill in the infos
+ existing = [d for d in installed if d.name == release.name]
+ if existing:
+ infos['remove'].append(existing[0])
+ infos['conflict'].extend(depgraph.reverse_list[existing[0]])
+ infos['install'].append(release)
+ return infos
+
+
+def _update_infos(infos, new_infos):
+ """extends the lists contained in the `info` dict with those contained
+ in the `new_info` one
+ """
+ for key, value in infos.items():
+ if key in new_infos:
+ infos[key].extend(new_infos[key])
+
+
+def remove(project_name, paths=None, auto_confirm=True):
+ """Removes a single project from the installation.
+
+ Returns True on success
+ """
+ dist = get_distribution(project_name, use_egg_info=True, paths=paths)
+ if dist is None:
+ raise PackagingError('Distribution %r not found' % project_name)
+ files = dist.list_installed_files(local=True)
+ rmdirs = []
+ rmfiles = []
+ tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall')
+
+ def _move_file(source, target):
+ try:
+ os.rename(source, target)
+ except OSError as err:
+ return err
+ return None
+
+ success = True
+ error = None
+ try:
+ for file_, md5, size in files:
+ if os.path.isfile(file_):
+ dirname, filename = os.path.split(file_)
+ tmpfile = os.path.join(tmp, filename)
+ try:
+ error = _move_file(file_, tmpfile)
+ if error is not None:
+ success = False
+ break
+ finally:
+ if not os.path.isfile(file_):
+ os.rename(tmpfile, file_)
+ if file_ not in rmfiles:
+ rmfiles.append(file_)
+ if dirname not in rmdirs:
+ rmdirs.append(dirname)
+ finally:
+ shutil.rmtree(tmp)
+
+ if not success:
+ logger.info('%r cannot be removed.', project_name)
+ logger.info('Error: %s', error)
+ return False
+
+ logger.info('Removing %r: ', project_name)
+
+ for file_ in rmfiles:
+ logger.info(' %s', file_)
+
+ # Taken from the pip project
+ if auto_confirm:
+ response = 'y'
+ else:
+ response = ask('Proceed (y/n)? ', ('y', 'n'))
+
+ if response == 'y':
+ file_count = 0
+ for file_ in rmfiles:
+ os.remove(file_)
+ file_count += 1
+
+ dir_count = 0
+ for dirname in rmdirs:
+ if not os.path.exists(dirname):
+ # could
+ continue
+
+ files_count = 0
+ for root, dir, files in os.walk(dirname):
+ files_count += len(files)
+
+ if files_count > 0:
+ # XXX Warning
+ continue
+
+ # empty dirs with only empty dirs
+ if os.stat(dirname).st_mode & stat.S_IWUSR:
+ # XXX Add a callable in shutil.rmtree to count
+ # the number of deleted elements
+ shutil.rmtree(dirname)
+ dir_count += 1
+
+ # removing the top path
+ # XXX count it ?
+ if os.path.exists(dist.path):
+ shutil.rmtree(dist.path)
+
+ logger.info('Success: removed %d files and %d dirs',
+ file_count, dir_count)
+
+ return True
+
+
+def install(project):
+ """Installs a project.
+
+ Returns True on success, False on failure
+ """
+ if is_python_build():
+ # Python would try to install into the site-packages directory under
+ # $PREFIX, but when running from an uninstalled code checkout we don't
+ # want to create directories under the installation root
+ message = ('installing third-party projects from an uninstalled '
+ 'Python is not supported')
+ logger.error(message)
+ return False
+
+ logger.info('Checking the installation location...')
+ purelib_path = get_path('purelib')
+
+ # trying to write a file there
+ try:
+ with tempfile.NamedTemporaryFile(suffix=project,
+ dir=purelib_path) as testfile:
+ testfile.write(b'test')
+ except OSError:
+ # FIXME this should check the errno, or be removed altogether (race
+ # condition: the directory permissions could be changed between here
+ # and the actual install)
+ logger.info('Unable to write in "%s". Do you have the permissions ?'
+ % purelib_path)
+ return False
+
+ logger.info('Getting information about %r...', project)
+ try:
+ info = get_infos(project)
+ except InstallationException:
+ logger.info('Cound not find %r', project)
+ return False
+
+ if info['install'] == []:
+ logger.info('Nothing to install')
+ return False
+
+ install_path = get_config_var('base')
+ try:
+ install_from_infos(install_path,
+ info['install'], info['remove'], info['conflict'])
+
+ except InstallationConflict as e:
+ if logger.isEnabledFor(logging.INFO):
+ projects = ('%r %s' % (p.name, p.version) for p in e.args[0])
+ logger.info('%r conflicts with %s', project, ','.join(projects))
+
+ return True
diff --git a/Lib/packaging/manifest.py b/Lib/packaging/manifest.py
new file mode 100644
index 0000000..40e7330
--- /dev/null
+++ b/Lib/packaging/manifest.py
@@ -0,0 +1,381 @@
+"""Class representing the list of files in a distribution.
+
+The Manifest class can be used to:
+
+ - read or write a MANIFEST file
+ - read a template file and find out the file list
+"""
+# XXX todo: document + add tests
+import re
+import os
+import fnmatch
+
+from packaging import logger
+from packaging.util import write_file, convert_path
+from packaging.errors import (PackagingTemplateError,
+ PackagingInternalError)
+
+__all__ = ['Manifest']
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+
+
+class Manifest(object):
+ """A list of files built by on exploring the filesystem and filtered by
+ applying various patterns to what we find there.
+ """
+
+ def __init__(self):
+ self.allfiles = None
+ self.files = []
+
+ #
+ # Public API
+ #
+
+ def findall(self, dir=os.curdir):
+ self.allfiles = _findall(dir)
+
+ def append(self, item):
+ self.files.append(item)
+
+ def extend(self, items):
+ self.files.extend(items)
+
+ def sort(self):
+ # Not a strict lexical sort!
+ self.files = [os.path.join(*path_tuple) for path_tuple in
+ sorted(os.path.split(path) for path in self.files)]
+
+ def clear(self):
+ """Clear all collected files."""
+ self.files = []
+ if self.allfiles is not None:
+ self.allfiles = []
+
+ def remove_duplicates(self):
+ # Assumes list has been sorted!
+ for i in range(len(self.files) - 1, 0, -1):
+ if self.files[i] == self.files[i - 1]:
+ del self.files[i]
+
+ def read_template(self, path_or_file):
+ """Read and parse a manifest template file.
+ 'path' can be a path or a file-like object.
+
+ Updates the list accordingly.
+ """
+ if isinstance(path_or_file, str):
+ f = open(path_or_file)
+ else:
+ f = path_or_file
+
+ try:
+ content = f.read()
+ # first, let's unwrap collapsed lines
+ content = _COLLAPSE_PATTERN.sub('', content)
+ # next, let's remove commented lines and empty lines
+ content = _COMMENTED_LINE.sub('', content)
+
+ # now we have our cleaned up lines
+ lines = [line.strip() for line in content.split('\n')]
+ finally:
+ f.close()
+
+ for line in lines:
+ if line == '':
+ continue
+ try:
+ self._process_template_line(line)
+ except PackagingTemplateError as msg:
+ logger.warning("%s, %s", path_or_file, msg)
+
+ def write(self, path):
+ """Write the file list in 'self.filelist' (presumably as filled in
+ by 'add_defaults()' and 'read_template()') to the manifest file
+ named by 'self.manifest'.
+ """
+ if os.path.isfile(path):
+ with open(path) as fp:
+ first_line = fp.readline()
+
+ if first_line != '# file GENERATED by packaging, do NOT edit\n':
+ logger.info("not writing to manually maintained "
+ "manifest file %r", path)
+ return
+
+ self.sort()
+ self.remove_duplicates()
+ content = self.files[:]
+ content.insert(0, '# file GENERATED by packaging, do NOT edit')
+ logger.info("writing manifest file %r", path)
+ write_file(path, content)
+
+ def read(self, path):
+ """Read the manifest file (named by 'self.manifest') and use it to
+ fill in 'self.filelist', the list of files to include in the source
+ distribution.
+ """
+ logger.info("reading manifest file %r", path)
+ with open(path) as manifest:
+ for line in manifest.readlines():
+ self.append(line)
+
+ def exclude_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Remove strings (presumably filenames) from 'files' that match
+ 'pattern'.
+
+ Other parameters are the same as for 'include_pattern()', above.
+ The list 'self.files' is modified in place. Return True if files are
+ found.
+ """
+ files_found = False
+ pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+ for i in range(len(self.files) - 1, -1, -1):
+ if pattern_re.search(self.files[i]):
+ del self.files[i]
+ files_found = True
+
+ return files_found
+
+ #
+ # Private API
+ #
+
+ def _parse_template_line(self, line):
+ words = line.split()
+ if len(words) == 1 and words[0] not in (
+ 'include', 'exclude', 'global-include', 'global-exclude',
+ 'recursive-include', 'recursive-exclude', 'graft', 'prune'):
+ # no action given, let's use the default 'include'
+ words.insert(0, 'include')
+
+ action = words[0]
+ patterns = dir = dir_pattern = None
+
+ if action in ('include', 'exclude',
+ 'global-include', 'global-exclude'):
+ if len(words) < 2:
+ raise PackagingTemplateError(
+ "%r expects <pattern1> <pattern2> ..." % action)
+
+ patterns = [convert_path(word) for word in words[1:]]
+
+ elif action in ('recursive-include', 'recursive-exclude'):
+ if len(words) < 3:
+ raise PackagingTemplateError(
+ "%r expects <dir> <pattern1> <pattern2> ..." % action)
+
+ dir = convert_path(words[1])
+ patterns = [convert_path(word) for word in words[2:]]
+
+ elif action in ('graft', 'prune'):
+ if len(words) != 2:
+ raise PackagingTemplateError(
+ "%r expects a single <dir_pattern>" % action)
+
+ dir_pattern = convert_path(words[1])
+
+ else:
+ raise PackagingTemplateError("unknown action %r" % action)
+
+ return action, patterns, dir, dir_pattern
+
+ def _process_template_line(self, line):
+ # Parse the line: split it up, make sure the right number of words
+ # is there, and return the relevant words. 'action' is always
+ # defined: it's the first word of the line. Which of the other
+ # three are defined depends on the action; it'll be either
+ # patterns, (dir and patterns), or (dir_pattern).
+ action, patterns, dir, dir_pattern = self._parse_template_line(line)
+
+ # OK, now we know that the action is valid and we have the
+ # right number of words on the line for that action -- so we
+ # can proceed with minimal error-checking.
+ if action == 'include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, anchor=True):
+ logger.warning("no files found matching %r", pattern)
+
+ elif action == 'exclude':
+ for pattern in patterns:
+ if not self.exclude_pattern(pattern, anchor=True):
+ logger.warning("no previously-included files "
+ "found matching %r", pattern)
+
+ elif action == 'global-include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, anchor=False):
+ logger.warning("no files found matching %r "
+ "anywhere in distribution", pattern)
+
+ elif action == 'global-exclude':
+ for pattern in patterns:
+ if not self.exclude_pattern(pattern, anchor=False):
+ logger.warning("no previously-included files "
+ "matching %r found anywhere in "
+ "distribution", pattern)
+
+ elif action == 'recursive-include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, prefix=dir):
+ logger.warning("no files found matching %r "
+ "under directory %r", pattern, dir)
+
+ elif action == 'recursive-exclude':
+ for pattern in patterns:
+ if not self.exclude_pattern(pattern, prefix=dir):
+ logger.warning("no previously-included files "
+ "matching %r found under directory %r",
+ pattern, dir)
+
+ elif action == 'graft':
+ if not self._include_pattern(None, prefix=dir_pattern):
+ logger.warning("no directories found matching %r",
+ dir_pattern)
+
+ elif action == 'prune':
+ if not self.exclude_pattern(None, prefix=dir_pattern):
+ logger.warning("no previously-included directories found "
+ "matching %r", dir_pattern)
+ else:
+ raise PackagingInternalError(
+ "this cannot happen: invalid action %r" % action)
+
+ def _include_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Select strings (presumably filenames) from 'self.files' that
+ match 'pattern', a Unix-style wildcard (glob) pattern.
+
+ Patterns are not quite the same as implemented by the 'fnmatch'
+ module: '*' and '?' match non-special characters, where "special"
+ is platform-dependent: slash on Unix; colon, slash, and backslash on
+ DOS/Windows; and colon on Mac OS.
+
+ If 'anchor' is true (the default), then the pattern match is more
+ stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
+ 'anchor' is false, both of these will match.
+
+ If 'prefix' is supplied, then only filenames starting with 'prefix'
+ (itself a pattern) and ending with 'pattern', with anything in between
+ them, will match. 'anchor' is ignored in this case.
+
+ If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+ 'pattern' is assumed to be either a string containing a regex or a
+ regex object -- no translation is done, the regex is just compiled
+ and used as-is.
+
+ Selected strings will be added to self.files.
+
+ Return True if files are found.
+ """
+ # XXX docstring lying about what the special chars are?
+ files_found = False
+ pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+
+ # delayed loading of allfiles list
+ if self.allfiles is None:
+ self.findall()
+
+ for name in self.allfiles:
+ if pattern_re.search(name):
+ self.files.append(name)
+ files_found = True
+
+ return files_found
+
+
+#
+# Utility functions
+#
+def _findall(dir=os.curdir):
+ """Find all files under 'dir' and return the list of full filenames
+ (relative to 'dir').
+ """
+ from stat import S_ISREG, S_ISDIR, S_ISLNK
+
+ list = []
+ stack = [dir]
+ pop = stack.pop
+ push = stack.append
+
+ while stack:
+ dir = pop()
+ names = os.listdir(dir)
+
+ for name in names:
+ if dir != os.curdir: # avoid the dreaded "./" syndrome
+ fullname = os.path.join(dir, name)
+ else:
+ fullname = name
+
+ # Avoid excess stat calls -- just one will do, thank you!
+ stat = os.stat(fullname)
+ mode = stat.st_mode
+ if S_ISREG(mode):
+ list.append(fullname)
+ elif S_ISDIR(mode) and not S_ISLNK(mode):
+ push(fullname)
+
+ return list
+
+
+def _glob_to_re(pattern):
+ """Translate a shell-like glob pattern to a regular expression.
+
+ Return a string containing the regex. Differs from
+ 'fnmatch.translate()' in that '*' does not match "special characters"
+ (which are platform-specific).
+ """
+ pattern_re = fnmatch.translate(pattern)
+
+ # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+ # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+ # and by extension they shouldn't match such "special characters" under
+ # any OS. So change all non-escaped dots in the RE to match any
+ # character except the special characters (currently: just os.sep).
+ sep = os.sep
+ if os.sep == '\\':
+ # we're using a regex to manipulate a regex, so we need
+ # to escape the backslash twice
+ sep = r'\\\\'
+ escaped = r'\1[^%s]' % sep
+ pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
+ return pattern_re
+
+
+def _translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
+ """Translate a shell-like wildcard pattern to a compiled regular
+ expression.
+
+ Return the compiled regex. If 'is_regex' true,
+ then 'pattern' is directly compiled to a regex (if it's a string)
+ or just returned as-is (assumes it's a regex object).
+ """
+ if is_regex:
+ if isinstance(pattern, str):
+ return re.compile(pattern)
+ else:
+ return pattern
+
+ if pattern:
+ pattern_re = _glob_to_re(pattern)
+ else:
+ pattern_re = ''
+
+ if prefix is not None:
+ # ditch end of pattern character
+ empty_pattern = _glob_to_re('')
+ prefix_re = _glob_to_re(prefix)[:-len(empty_pattern)]
+ sep = os.sep
+ if os.sep == '\\':
+ sep = r'\\'
+ pattern_re = "^" + sep.join((prefix_re, ".*" + pattern_re))
+ else: # no prefix -- respect anchor flag
+ if anchor:
+ pattern_re = "^" + pattern_re
+
+ return re.compile(pattern_re)
diff --git a/Lib/packaging/markers.py b/Lib/packaging/markers.py
new file mode 100644
index 0000000..63fdc19
--- /dev/null
+++ b/Lib/packaging/markers.py
@@ -0,0 +1,189 @@
+"""Parser for the environment markers micro-language defined in PEP 345."""
+
+import os
+import sys
+import platform
+from io import BytesIO
+from tokenize import tokenize, NAME, OP, STRING, ENDMARKER, ENCODING
+
+__all__ = ['interpret']
+
+
+# allowed operators
+_OPERATORS = {'==': lambda x, y: x == y,
+ '!=': lambda x, y: x != y,
+ '>': lambda x, y: x > y,
+ '>=': lambda x, y: x >= y,
+ '<': lambda x, y: x < y,
+ '<=': lambda x, y: x <= y,
+ 'in': lambda x, y: x in y,
+ 'not in': lambda x, y: x not in y}
+
+
+def _operate(operation, x, y):
+ return _OPERATORS[operation](x, y)
+
+
+# restricted set of variables
+_VARS = {'sys.platform': sys.platform,
+ 'python_version': '%s.%s' % sys.version_info[:2],
+ # FIXME parsing sys.platform is not reliable, but there is no other
+ # way to get e.g. 2.7.2+, and the PEP is defined with sys.version
+ 'python_full_version': sys.version.split(' ', 1)[0],
+ 'os.name': os.name,
+ 'platform.version': platform.version(),
+ 'platform.machine': platform.machine(),
+ 'platform.python_implementation': platform.python_implementation(),
+ }
+
+
+class _Operation:
+
+ def __init__(self, execution_context=None):
+ self.left = None
+ self.op = None
+ self.right = None
+ if execution_context is None:
+ execution_context = {}
+ self.execution_context = execution_context
+
+ def _get_var(self, name):
+ if name in self.execution_context:
+ return self.execution_context[name]
+ return _VARS[name]
+
+ def __repr__(self):
+ return '%s %s %s' % (self.left, self.op, self.right)
+
+ def _is_string(self, value):
+ if value is None or len(value) < 2:
+ return False
+ for delimiter in '"\'':
+ if value[0] == value[-1] == delimiter:
+ return True
+ return False
+
+ def _is_name(self, value):
+ return value in _VARS
+
+ def _convert(self, value):
+ if value in _VARS:
+ return self._get_var(value)
+ return value.strip('"\'')
+
+ def _check_name(self, value):
+ if value not in _VARS:
+ raise NameError(value)
+
+ def _nonsense_op(self):
+ msg = 'This operation is not supported : "%s"' % self
+ raise SyntaxError(msg)
+
+ def __call__(self):
+ # make sure we do something useful
+ if self._is_string(self.left):
+ if self._is_string(self.right):
+ self._nonsense_op()
+ self._check_name(self.right)
+ else:
+ if not self._is_string(self.right):
+ self._nonsense_op()
+ self._check_name(self.left)
+
+ if self.op not in _OPERATORS:
+ raise TypeError('Operator not supported "%s"' % self.op)
+
+ left = self._convert(self.left)
+ right = self._convert(self.right)
+ return _operate(self.op, left, right)
+
+
+class _OR:
+ def __init__(self, left, right=None):
+ self.left = left
+ self.right = right
+
+ def filled(self):
+ return self.right is not None
+
+ def __repr__(self):
+ return 'OR(%r, %r)' % (self.left, self.right)
+
+ def __call__(self):
+ return self.left() or self.right()
+
+
+class _AND:
+ def __init__(self, left, right=None):
+ self.left = left
+ self.right = right
+
+ def filled(self):
+ return self.right is not None
+
+ def __repr__(self):
+ return 'AND(%r, %r)' % (self.left, self.right)
+
+ def __call__(self):
+ return self.left() and self.right()
+
+
+def interpret(marker, execution_context=None):
+ """Interpret a marker and return a result depending on environment."""
+ marker = marker.strip().encode()
+ ops = []
+ op_starting = True
+ for token in tokenize(BytesIO(marker).readline):
+ # Unpack token
+ toktype, tokval, rowcol, line, logical_line = token
+ if toktype not in (NAME, OP, STRING, ENDMARKER, ENCODING):
+ raise SyntaxError('Type not supported "%s"' % tokval)
+
+ if op_starting:
+ op = _Operation(execution_context)
+ if len(ops) > 0:
+ last = ops[-1]
+ if isinstance(last, (_OR, _AND)) and not last.filled():
+ last.right = op
+ else:
+ ops.append(op)
+ else:
+ ops.append(op)
+ op_starting = False
+ else:
+ op = ops[-1]
+
+ if (toktype == ENDMARKER or
+ (toktype == NAME and tokval in ('and', 'or'))):
+ if toktype == NAME and tokval == 'and':
+ ops.append(_AND(ops.pop()))
+ elif toktype == NAME and tokval == 'or':
+ ops.append(_OR(ops.pop()))
+ op_starting = True
+ continue
+
+ if isinstance(op, (_OR, _AND)) and op.right is not None:
+ op = op.right
+
+ if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
+ or (toktype == OP and tokval == '.')):
+ if op.op is None:
+ if op.left is None:
+ op.left = tokval
+ else:
+ op.left += tokval
+ else:
+ if op.right is None:
+ op.right = tokval
+ else:
+ op.right += tokval
+ elif toktype == OP or tokval in ('in', 'not'):
+ if tokval == 'in' and op.op == 'not':
+ op.op = 'not in'
+ else:
+ op.op = tokval
+
+ for op in ops:
+ if not op():
+ return False
+ return True
diff --git a/Lib/packaging/metadata.py b/Lib/packaging/metadata.py
new file mode 100644
index 0000000..2993ebb
--- /dev/null
+++ b/Lib/packaging/metadata.py
@@ -0,0 +1,570 @@
+"""Implementation of the Metadata for Python packages PEPs.
+
+Supports all metadata formats (1.0, 1.1, 1.2).
+"""
+
+import re
+import logging
+
+from io import StringIO
+from email import message_from_file
+from packaging import logger
+from packaging.markers import interpret
+from packaging.version import (is_valid_predicate, is_valid_version,
+ is_valid_versions)
+from packaging.errors import (MetadataMissingError,
+ MetadataConflictError,
+ MetadataUnrecognizedVersionError)
+
+try:
+ # docutils is installed
+ from docutils.utils import Reporter
+ from docutils.parsers.rst import Parser
+ from docutils import frontend
+ from docutils import nodes
+
+ class SilentReporter(Reporter):
+
+ def __init__(self, source, report_level, halt_level, stream=None,
+ debug=0, encoding='ascii', error_handler='replace'):
+ self.messages = []
+ super(SilentReporter, self).__init__(
+ source, report_level, halt_level, stream,
+ debug, encoding, error_handler)
+
+ def system_message(self, level, message, *children, **kwargs):
+ self.messages.append((level, message, children, kwargs))
+
+ _HAS_DOCUTILS = True
+except ImportError:
+ # docutils is not installed
+ _HAS_DOCUTILS = False
+
+# public API of this module
+__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# preferred version. Hopefully will be changed
+# to 1.2 once PEP 345 is supported everywhere
+PKG_INFO_PREFERRED_VERSION = '1.0'
+
+_LINE_PREFIX = re.compile('\n \|')
+_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'License')
+
+_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'License', 'Classifier', 'Download-URL', 'Obsoletes',
+ 'Provides', 'Requires')
+
+_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
+ 'Download-URL')
+
+_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'Maintainer', 'Maintainer-email', 'License',
+ 'Classifier', 'Download-URL', 'Obsoletes-Dist',
+ 'Project-URL', 'Provides-Dist', 'Requires-Dist',
+ 'Requires-Python', 'Requires-External')
+
+_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
+ 'Obsoletes-Dist', 'Requires-External', 'Maintainer',
+ 'Maintainer-email', 'Project-URL')
+
+_ALL_FIELDS = set()
+_ALL_FIELDS.update(_241_FIELDS)
+_ALL_FIELDS.update(_314_FIELDS)
+_ALL_FIELDS.update(_345_FIELDS)
+
+
+def _version2fieldlist(version):
+ if version == '1.0':
+ return _241_FIELDS
+ elif version == '1.1':
+ return _314_FIELDS
+ elif version == '1.2':
+ return _345_FIELDS
+ raise MetadataUnrecognizedVersionError(version)
+
+
+def _best_version(fields):
+ """Detect the best version depending on the fields used."""
+ def _has_marker(keys, markers):
+ for marker in markers:
+ if marker in keys:
+ return True
+ return False
+
+ keys = list(fields)
+ possible_versions = ['1.0', '1.1', '1.2']
+
+ # first let's try to see if a field is not part of one of the version
+ for key in keys:
+ if key not in _241_FIELDS and '1.0' in possible_versions:
+ possible_versions.remove('1.0')
+ if key not in _314_FIELDS and '1.1' in possible_versions:
+ possible_versions.remove('1.1')
+ if key not in _345_FIELDS and '1.2' in possible_versions:
+ possible_versions.remove('1.2')
+
+ # possible_version contains qualified versions
+ if len(possible_versions) == 1:
+ return possible_versions[0] # found !
+ elif len(possible_versions) == 0:
+ raise MetadataConflictError('Unknown metadata set')
+
+ # let's see if one unique marker is found
+ is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
+ is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+ if is_1_1 and is_1_2:
+ raise MetadataConflictError('You used incompatible 1.1 and 1.2 fields')
+
+ # we have the choice, either 1.0, or 1.2
+ # - 1.0 has a broken Summary field but works with all tools
+ # - 1.1 is to avoid
+ # - 1.2 fixes Summary but is not widespread yet
+ if not is_1_1 and not is_1_2:
+ # we couldn't find any specific marker
+ if PKG_INFO_PREFERRED_VERSION in possible_versions:
+ return PKG_INFO_PREFERRED_VERSION
+ if is_1_1:
+ return '1.1'
+
+ # default marker when 1.0 is disqualified
+ return '1.2'
+
+
+_ATTR2FIELD = {
+ 'metadata_version': 'Metadata-Version',
+ 'name': 'Name',
+ 'version': 'Version',
+ 'platform': 'Platform',
+ 'supported_platform': 'Supported-Platform',
+ 'summary': 'Summary',
+ 'description': 'Description',
+ 'keywords': 'Keywords',
+ 'home_page': 'Home-page',
+ 'author': 'Author',
+ 'author_email': 'Author-email',
+ 'maintainer': 'Maintainer',
+ 'maintainer_email': 'Maintainer-email',
+ 'license': 'License',
+ 'classifier': 'Classifier',
+ 'download_url': 'Download-URL',
+ 'obsoletes_dist': 'Obsoletes-Dist',
+ 'provides_dist': 'Provides-Dist',
+ 'requires_dist': 'Requires-Dist',
+ 'requires_python': 'Requires-Python',
+ 'requires_external': 'Requires-External',
+ 'requires': 'Requires',
+ 'provides': 'Provides',
+ 'obsoletes': 'Obsoletes',
+ 'project_url': 'Project-URL',
+}
+
+_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
+_VERSIONS_FIELDS = ('Requires-Python',)
+_VERSION_FIELDS = ('Version',)
+_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
+ 'Requires', 'Provides', 'Obsoletes-Dist',
+ 'Provides-Dist', 'Requires-Dist', 'Requires-External',
+ 'Project-URL', 'Supported-Platform')
+_LISTTUPLEFIELDS = ('Project-URL',)
+
+_ELEMENTSFIELD = ('Keywords',)
+
+_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
+
+_MISSING = object()
+
+_FILESAFE = re.compile('[^A-Za-z0-9.]+')
+
+
+class Metadata:
+ """The metadata of a release.
+
+ Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
+ instantiate the class with one of these arguments (or none):
+ - *path*, the path to a METADATA file
+ - *fileobj* give a file-like object with METADATA as content
+ - *mapping* is a dict-like object
+ """
+ # TODO document that execution_context and platform_dependent are used
+ # to filter on query, not when setting a key
+ # also document the mapping API and UNKNOWN default key
+
+ def __init__(self, path=None, platform_dependent=False,
+ execution_context=None, fileobj=None, mapping=None):
+ self._fields = {}
+ self.requires_files = []
+ self.docutils_support = _HAS_DOCUTILS
+ self.platform_dependent = platform_dependent
+ self.execution_context = execution_context
+ if [path, fileobj, mapping].count(None) < 2:
+ raise TypeError('path, fileobj and mapping are exclusive')
+ if path is not None:
+ self.read(path)
+ elif fileobj is not None:
+ self.read_file(fileobj)
+ elif mapping is not None:
+ self.update(mapping)
+
+ def _set_best_version(self):
+ self._fields['Metadata-Version'] = _best_version(self._fields)
+
+ def _write_field(self, file, name, value):
+ file.write('%s: %s\n' % (name, value))
+
+ def __getitem__(self, name):
+ return self.get(name)
+
+ def __setitem__(self, name, value):
+ return self.set(name, value)
+
+ def __delitem__(self, name):
+ field_name = self._convert_name(name)
+ try:
+ del self._fields[field_name]
+ except KeyError:
+ raise KeyError(name)
+ self._set_best_version()
+
+ def __contains__(self, name):
+ return (name in self._fields or
+ self._convert_name(name) in self._fields)
+
+ def _convert_name(self, name):
+ if name in _ALL_FIELDS:
+ return name
+ name = name.replace('-', '_').lower()
+ return _ATTR2FIELD.get(name, name)
+
+ def _default_value(self, name):
+ if name in _LISTFIELDS or name in _ELEMENTSFIELD:
+ return []
+ return 'UNKNOWN'
+
+ def _check_rst_data(self, data):
+ """Return warnings when the provided data has syntax errors."""
+ source_path = StringIO()
+ parser = Parser()
+ settings = frontend.OptionParser().get_default_values()
+ settings.tab_width = 4
+ settings.pep_references = None
+ settings.rfc_references = None
+ reporter = SilentReporter(source_path,
+ settings.report_level,
+ settings.halt_level,
+ stream=settings.warning_stream,
+ debug=settings.debug,
+ encoding=settings.error_encoding,
+ error_handler=settings.error_encoding_error_handler)
+
+ document = nodes.document(settings, reporter, source=source_path)
+ document.note_source(source_path, -1)
+ try:
+ parser.parse(data, document)
+ except AttributeError:
+ reporter.messages.append((-1, 'Could not finish the parsing.',
+ '', {}))
+
+ return reporter.messages
+
+ def _platform(self, value):
+ if not self.platform_dependent or ';' not in value:
+ return True, value
+ value, marker = value.split(';')
+ return interpret(marker, self.execution_context), value
+
+ def _remove_line_prefix(self, value):
+ return _LINE_PREFIX.sub('\n', value)
+
+ #
+ # Public API
+ #
+ def get_fullname(self, filesafe=False):
+ """Return the distribution name with version.
+
+ If filesafe is true, return a filename-escaped form."""
+ name, version = self['Name'], self['Version']
+ if filesafe:
+ # For both name and version any runs of non-alphanumeric or '.'
+ # characters are replaced with a single '-'. Additionally any
+ # spaces in the version string become '.'
+ name = _FILESAFE.sub('-', name)
+ version = _FILESAFE.sub('-', version.replace(' ', '.'))
+ return '%s-%s' % (name, version)
+
+ def is_metadata_field(self, name):
+ """return True if name is a valid metadata key"""
+ name = self._convert_name(name)
+ return name in _ALL_FIELDS
+
+ def is_multi_field(self, name):
+ name = self._convert_name(name)
+ return name in _LISTFIELDS
+
+ def read(self, filepath):
+ """Read the metadata values from a file path."""
+ with open(filepath, 'r', encoding='utf-8') as fp:
+ self.read_file(fp)
+
+ def read_file(self, fileob):
+ """Read the metadata values from a file object."""
+ msg = message_from_file(fileob)
+ self._fields['Metadata-Version'] = msg['metadata-version']
+
+ for field in _version2fieldlist(self['Metadata-Version']):
+ if field in _LISTFIELDS:
+ # we can have multiple lines
+ values = msg.get_all(field)
+ if field in _LISTTUPLEFIELDS and values is not None:
+ values = [tuple(value.split(',')) for value in values]
+ self.set(field, values)
+ else:
+ # single line
+ value = msg[field]
+ if value is not None and value != 'UNKNOWN':
+ self.set(field, value)
+
+ def write(self, filepath):
+ """Write the metadata fields to filepath."""
+ with open(filepath, 'w', encoding='utf-8') as fp:
+ self.write_file(fp)
+
+ def write_file(self, fileobject):
+ """Write the PKG-INFO format data to a file object."""
+ self._set_best_version()
+ for field in _version2fieldlist(self['Metadata-Version']):
+ values = self.get(field)
+ if field in _ELEMENTSFIELD:
+ self._write_field(fileobject, field, ','.join(values))
+ continue
+ if field not in _LISTFIELDS:
+ if field == 'Description':
+ values = values.replace('\n', '\n |')
+ values = [values]
+
+ if field in _LISTTUPLEFIELDS:
+ values = [','.join(value) for value in values]
+
+ for value in values:
+ self._write_field(fileobject, field, value)
+
+ def update(self, other=None, **kwargs):
+ """Set metadata values from the given iterable `other` and kwargs.
+
+ Behavior is like `dict.update`: If `other` has a ``keys`` method,
+ they are looped over and ``self[key]`` is assigned ``other[key]``.
+ Else, ``other`` is an iterable of ``(key, value)`` iterables.
+
+ Keys that don't match a metadata field or that have an empty value are
+ dropped.
+ """
+ # XXX the code should just use self.set, which does tbe same checks and
+ # conversions already, but that would break packaging.pypi: it uses the
+ # update method, which does not call _set_best_version (which set
+ # does), and thus allows having a Metadata object (as long as you don't
+ # modify or write it) with extra fields from PyPI that are not fields
+ # defined in Metadata PEPs. to solve it, the best_version system
+ # should be reworked so that it's called only for writing, or in a new
+ # strict mode, or with a new, more lax Metadata subclass in p7g.pypi
+ def _set(key, value):
+ if key in _ATTR2FIELD and value:
+ self.set(self._convert_name(key), value)
+
+ if not other:
+ # other is None or empty container
+ pass
+ elif hasattr(other, 'keys'):
+ for k in other.keys():
+ _set(k, other[k])
+ else:
+ for k, v in other:
+ _set(k, v)
+
+ if kwargs:
+ for k, v in kwargs.items():
+ _set(k, v)
+
+ def set(self, name, value):
+ """Control then set a metadata field."""
+ name = self._convert_name(name)
+
+ if ((name in _ELEMENTSFIELD or name == 'Platform') and
+ not isinstance(value, (list, tuple))):
+ if isinstance(value, str):
+ value = [v.strip() for v in value.split(',')]
+ else:
+ value = []
+ elif (name in _LISTFIELDS and
+ not isinstance(value, (list, tuple))):
+ if isinstance(value, str):
+ value = [value]
+ else:
+ value = []
+
+ if logger.isEnabledFor(logging.WARNING):
+ project_name = self['Name']
+
+ if name in _PREDICATE_FIELDS and value is not None:
+ for v in value:
+ # check that the values are valid predicates
+ if not is_valid_predicate(v.split(';')[0]):
+ logger.warning(
+ '%r: %r is not a valid predicate (field %r)',
+ project_name, v, name)
+ # FIXME this rejects UNKNOWN, is that right?
+ elif name in _VERSIONS_FIELDS and value is not None:
+ if not is_valid_versions(value):
+ logger.warning('%r: %r is not a valid version (field %r)',
+ project_name, value, name)
+ elif name in _VERSION_FIELDS and value is not None:
+ if not is_valid_version(value):
+ logger.warning('%r: %r is not a valid version (field %r)',
+ project_name, value, name)
+
+ if name in _UNICODEFIELDS:
+ if name == 'Description':
+ value = self._remove_line_prefix(value)
+
+ self._fields[name] = value
+ self._set_best_version()
+
+ def get(self, name, default=_MISSING):
+ """Get a metadata field."""
+ name = self._convert_name(name)
+ if name not in self._fields:
+ if default is _MISSING:
+ default = self._default_value(name)
+ return default
+ if name in _UNICODEFIELDS:
+ value = self._fields[name]
+ return value
+ elif name in _LISTFIELDS:
+ value = self._fields[name]
+ if value is None:
+ return []
+ res = []
+ for val in value:
+ valid, val = self._platform(val)
+ if not valid:
+ continue
+ if name not in _LISTTUPLEFIELDS:
+ res.append(val)
+ else:
+ # That's for Project-URL
+ res.append((val[0], val[1]))
+ return res
+
+ elif name in _ELEMENTSFIELD:
+ valid, value = self._platform(self._fields[name])
+ if not valid:
+ return []
+ if isinstance(value, str):
+ return value.split(',')
+ valid, value = self._platform(self._fields[name])
+ if not valid:
+ return None
+ return value
+
+ def check(self, strict=False, restructuredtext=False):
+ """Check if the metadata is compliant. If strict is False then raise if
+ no Name or Version are provided"""
+ # XXX should check the versions (if the file was loaded)
+ missing, warnings = [], []
+
+ for attr in ('Name', 'Version'): # required by PEP 345
+ if attr not in self:
+ missing.append(attr)
+
+ if strict and missing != []:
+ msg = 'missing required metadata: %s' % ', '.join(missing)
+ raise MetadataMissingError(msg)
+
+ for attr in ('Home-page', 'Author'):
+ if attr not in self:
+ missing.append(attr)
+
+ if _HAS_DOCUTILS and restructuredtext:
+ warnings.extend(self._check_rst_data(self['Description']))
+
+ # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
+ if self['Metadata-Version'] != '1.2':
+ return missing, warnings
+
+ def is_valid_predicates(value):
+ for v in value:
+ if not is_valid_predicate(v.split(';')[0]):
+ return False
+ return True
+
+ for fields, controller in ((_PREDICATE_FIELDS, is_valid_predicates),
+ (_VERSIONS_FIELDS, is_valid_versions),
+ (_VERSION_FIELDS, is_valid_version)):
+ for field in fields:
+ value = self.get(field, None)
+ if value is not None and not controller(value):
+ warnings.append('Wrong value for %r: %s' % (field, value))
+
+ return missing, warnings
+
+ def todict(self):
+ """Return fields as a dict.
+
+ Field names will be converted to use the underscore-lowercase style
+ instead of hyphen-mixed case (i.e. home_page instead of Home-page).
+ """
+ data = {
+ 'metadata_version': self['Metadata-Version'],
+ 'name': self['Name'],
+ 'version': self['Version'],
+ 'summary': self['Summary'],
+ 'home_page': self['Home-page'],
+ 'author': self['Author'],
+ 'author_email': self['Author-email'],
+ 'license': self['License'],
+ 'description': self['Description'],
+ 'keywords': self['Keywords'],
+ 'platform': self['Platform'],
+ 'classifier': self['Classifier'],
+ 'download_url': self['Download-URL'],
+ }
+
+ if self['Metadata-Version'] == '1.2':
+ data['requires_dist'] = self['Requires-Dist']
+ data['requires_python'] = self['Requires-Python']
+ data['requires_external'] = self['Requires-External']
+ data['provides_dist'] = self['Provides-Dist']
+ data['obsoletes_dist'] = self['Obsoletes-Dist']
+ data['project_url'] = [','.join(url) for url in
+ self['Project-URL']]
+
+ elif self['Metadata-Version'] == '1.1':
+ data['provides'] = self['Provides']
+ data['requires'] = self['Requires']
+ data['obsoletes'] = self['Obsoletes']
+
+ return data
+
+ # Mapping API
+ # XXX these methods should return views or sets in 3.x
+
+ def keys(self):
+ return list(_version2fieldlist(self['Metadata-Version']))
+
+ def __iter__(self):
+ for key in self.keys():
+ yield key
+
+ def values(self):
+ return [self[key] for key in self.keys()]
+
+ def items(self):
+ return [(key, self[key]) for key in self.keys()]
diff --git a/Lib/packaging/pypi/__init__.py b/Lib/packaging/pypi/__init__.py
new file mode 100644
index 0000000..5660c50
--- /dev/null
+++ b/Lib/packaging/pypi/__init__.py
@@ -0,0 +1,9 @@
+"""Low-level and high-level APIs to interact with project indexes."""
+
+__all__ = ['simple',
+ 'xmlrpc',
+ 'dist',
+ 'errors',
+ 'mirrors']
+
+from packaging.pypi.dist import ReleaseInfo, ReleasesList, DistInfo
diff --git a/Lib/packaging/pypi/base.py b/Lib/packaging/pypi/base.py
new file mode 100644
index 0000000..305fca9
--- /dev/null
+++ b/Lib/packaging/pypi/base.py
@@ -0,0 +1,48 @@
+"""Base class for index crawlers."""
+
+from packaging.pypi.dist import ReleasesList
+
+
+class BaseClient:
+ """Base class containing common methods for the index crawlers/clients"""
+
+ def __init__(self, prefer_final, prefer_source):
+ self._prefer_final = prefer_final
+ self._prefer_source = prefer_source
+ self._index = self
+
+ def _get_prefer_final(self, prefer_final=None):
+ """Return the prefer_final internal parameter or the specified one if
+ provided"""
+ if prefer_final:
+ return prefer_final
+ else:
+ return self._prefer_final
+
+ def _get_prefer_source(self, prefer_source=None):
+ """Return the prefer_source internal parameter or the specified one if
+ provided"""
+ if prefer_source:
+ return prefer_source
+ else:
+ return self._prefer_source
+
+ def _get_project(self, project_name):
+ """Return an project instance, create it if necessary"""
+ return self._projects.setdefault(project_name.lower(),
+ ReleasesList(project_name, index=self._index))
+
+ def download_distribution(self, requirements, temp_path=None,
+ prefer_source=None, prefer_final=None):
+ """Download a distribution from the last release according to the
+ requirements.
+
+ If temp_path is provided, download to this path, otherwise, create a
+ temporary location for the download and return it.
+ """
+ prefer_final = self._get_prefer_final(prefer_final)
+ prefer_source = self._get_prefer_source(prefer_source)
+ release = self.get_release(requirements, prefer_final)
+ if release:
+ dist = release.get_distribution(prefer_source=prefer_source)
+ return dist.download(temp_path)
diff --git a/Lib/packaging/pypi/dist.py b/Lib/packaging/pypi/dist.py
new file mode 100644
index 0000000..541465e
--- /dev/null
+++ b/Lib/packaging/pypi/dist.py
@@ -0,0 +1,544 @@
+"""Classes representing releases and distributions retrieved from indexes.
+
+A project (= unique name) can have several releases (= versions) and
+each release can have several distributions (= sdist and bdists).
+
+Release objects contain metadata-related information (see PEP 376);
+distribution objects contain download-related information.
+"""
+
+import re
+import hashlib
+import tempfile
+import urllib.request
+import urllib.parse
+import urllib.error
+import urllib.parse
+from shutil import unpack_archive
+
+from packaging.errors import IrrationalVersionError
+from packaging.version import (suggest_normalized_version, NormalizedVersion,
+ get_version_predicate)
+from packaging.metadata import Metadata
+from packaging.pypi.errors import (HashDoesNotMatch, UnsupportedHashName,
+ CantParseArchiveName)
+
+
+__all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url']
+
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz .egg".split()
+MD5_HASH = re.compile(r'^.*#md5=([a-f0-9]+)$')
+DIST_TYPES = ['bdist', 'sdist']
+
+
+class IndexReference:
+ """Mixin used to store the index reference"""
+ def set_index(self, index=None):
+ self._index = index
+
+
+class ReleaseInfo(IndexReference):
+ """Represent a release of a project (a project with a specific version).
+ The release contain the _metadata informations related to this specific
+ version, and is also a container for distribution related informations.
+
+ See the DistInfo class for more information about distributions.
+ """
+
+ def __init__(self, name, version, metadata=None, hidden=False,
+ index=None, **kwargs):
+ """
+ :param name: the name of the distribution
+ :param version: the version of the distribution
+ :param metadata: the metadata fields of the release.
+ :type metadata: dict
+ :param kwargs: optional arguments for a new distribution.
+ """
+ self.set_index(index)
+ self.name = name
+ self._version = None
+ self.version = version
+ if metadata:
+ self.metadata = Metadata(mapping=metadata)
+ else:
+ self.metadata = None
+ self.dists = {}
+ self.hidden = hidden
+
+ if 'dist_type' in kwargs:
+ dist_type = kwargs.pop('dist_type')
+ self.add_distribution(dist_type, **kwargs)
+
+ def set_version(self, version):
+ try:
+ self._version = NormalizedVersion(version)
+ except IrrationalVersionError:
+ suggestion = suggest_normalized_version(version)
+ if suggestion:
+ self.version = suggestion
+ else:
+ raise IrrationalVersionError(version)
+
+ def get_version(self):
+ return self._version
+
+ version = property(get_version, set_version)
+
+ def fetch_metadata(self):
+ """If the metadata is not set, use the indexes to get it"""
+ if not self.metadata:
+ self._index.get_metadata(self.name, str(self.version))
+ return self.metadata
+
+ @property
+ def is_final(self):
+ """proxy to version.is_final"""
+ return self.version.is_final
+
+ def fetch_distributions(self):
+ if self.dists is None:
+ self._index.get_distributions(self.name, str(self.version))
+ if self.dists is None:
+ self.dists = {}
+ return self.dists
+
+ def add_distribution(self, dist_type='sdist', python_version=None,
+ **params):
+ """Add distribution informations to this release.
+ If distribution information is already set for this distribution type,
+ add the given url paths to the distribution. This can be useful while
+ some of them fails to download.
+
+ :param dist_type: the distribution type (eg. "sdist", "bdist", etc.)
+ :param params: the fields to be passed to the distribution object
+ (see the :class:DistInfo constructor).
+ """
+ if dist_type not in DIST_TYPES:
+ raise ValueError(dist_type)
+ if dist_type in self.dists:
+ self.dists[dist_type].add_url(**params)
+ else:
+ self.dists[dist_type] = DistInfo(self, dist_type,
+ index=self._index, **params)
+ if python_version:
+ self.dists[dist_type].python_version = python_version
+
+ def get_distribution(self, dist_type=None, prefer_source=True):
+ """Return a distribution.
+
+ If dist_type is set, find first for this distribution type, and just
+ act as an alias of __get_item__.
+
+ If prefer_source is True, search first for source distribution, and if
+ not return one existing distribution.
+ """
+ if len(self.dists) == 0:
+ raise LookupError
+ if dist_type:
+ return self[dist_type]
+ if prefer_source:
+ if "sdist" in self.dists:
+ dist = self["sdist"]
+ else:
+ dist = next(self.dists.values())
+ return dist
+
+ def unpack(self, path=None, prefer_source=True):
+ """Unpack the distribution to the given path.
+
+ If not destination is given, creates a temporary location.
+
+ Returns the location of the extracted files (root).
+ """
+ return self.get_distribution(prefer_source=prefer_source)\
+ .unpack(path=path)
+
+ def download(self, temp_path=None, prefer_source=True):
+ """Download the distribution, using the requirements.
+
+ If more than one distribution match the requirements, use the last
+ version.
+ Download the distribution, and put it in the temp_path. If no temp_path
+ is given, creates and return one.
+
+ Returns the complete absolute path to the downloaded archive.
+ """
+ return self.get_distribution(prefer_source=prefer_source)\
+ .download(path=temp_path)
+
+ def set_metadata(self, metadata):
+ if not self.metadata:
+ self.metadata = Metadata()
+ self.metadata.update(metadata)
+
+ def __getitem__(self, item):
+ """distributions are available using release["sdist"]"""
+ return self.dists[item]
+
+ def _check_is_comparable(self, other):
+ if not isinstance(other, ReleaseInfo):
+ raise TypeError("cannot compare %s and %s"
+ % (type(self).__name__, type(other).__name__))
+ elif self.name != other.name:
+ raise TypeError("cannot compare %s and %s"
+ % (self.name, other.name))
+
+ def __repr__(self):
+ return "<%s %s>" % (self.name, self.version)
+
+ def __eq__(self, other):
+ self._check_is_comparable(other)
+ return self.version == other.version
+
+ def __lt__(self, other):
+ self._check_is_comparable(other)
+ return self.version < other.version
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __gt__(self, other):
+ return not (self.__lt__(other) or self.__eq__(other))
+
+ def __le__(self, other):
+ return self.__eq__(other) or self.__lt__(other)
+
+ def __ge__(self, other):
+ return self.__eq__(other) or self.__gt__(other)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+
+class DistInfo(IndexReference):
+ """Represents a distribution retrieved from an index (sdist, bdist, ...)
+ """
+
+ def __init__(self, release, dist_type=None, url=None, hashname=None,
+ hashval=None, is_external=True, python_version=None,
+ index=None):
+ """Create a new instance of DistInfo.
+
+ :param release: a DistInfo class is relative to a release.
+ :param dist_type: the type of the dist (eg. source, bin-*, etc.)
+ :param url: URL where we found this distribution
+ :param hashname: the name of the hash we want to use. Refer to the
+ hashlib.new documentation for more information.
+ :param hashval: the hash value.
+ :param is_external: we need to know if the provided url comes from
+ an index browsing, or from an external resource.
+
+ """
+ self.set_index(index)
+ self.release = release
+ self.dist_type = dist_type
+ self.python_version = python_version
+ self._unpacked_dir = None
+ # set the downloaded path to None by default. The goal here
+ # is to not download distributions multiple times
+ self.downloaded_location = None
+ # We store urls in dict, because we need to have a bit more infos
+ # than the simple URL. It will be used later to find the good url to
+ # use.
+ # We have two _url* attributes: _url and urls. urls contains a list
+ # of dict for the different urls, and _url contains the choosen url, in
+ # order to dont make the selection process multiple times.
+ self.urls = []
+ self._url = None
+ self.add_url(url, hashname, hashval, is_external)
+
+ def add_url(self, url=None, hashname=None, hashval=None, is_external=True):
+ """Add a new url to the list of urls"""
+ if hashname is not None:
+ try:
+ hashlib.new(hashname)
+ except ValueError:
+ raise UnsupportedHashName(hashname)
+ if url not in [u['url'] for u in self.urls]:
+ self.urls.append({
+ 'url': url,
+ 'hashname': hashname,
+ 'hashval': hashval,
+ 'is_external': is_external,
+ })
+ # reset the url selection process
+ self._url = None
+
+ @property
+ def url(self):
+ """Pick up the right url for the list of urls in self.urls"""
+ # We return internal urls over externals.
+ # If there is more than one internal or external, return the first
+ # one.
+ if self._url is None:
+ if len(self.urls) > 1:
+ internals_urls = [u for u in self.urls \
+ if u['is_external'] == False]
+ if len(internals_urls) >= 1:
+ self._url = internals_urls[0]
+ if self._url is None:
+ self._url = self.urls[0]
+ return self._url
+
+ @property
+ def is_source(self):
+ """return if the distribution is a source one or not"""
+ return self.dist_type == 'sdist'
+
+ def download(self, path=None):
+ """Download the distribution to a path, and return it.
+
+ If the path is given in path, use this, otherwise, generates a new one
+ Return the download location.
+ """
+ if path is None:
+ path = tempfile.mkdtemp()
+
+ # if we do not have downloaded it yet, do it.
+ if self.downloaded_location is None:
+ url = self.url['url']
+ archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+ filename, headers = urllib.request.urlretrieve(url,
+ path + "/" + archive_name)
+ self.downloaded_location = filename
+ self._check_md5(filename)
+ return self.downloaded_location
+
+ def unpack(self, path=None):
+ """Unpack the distribution to the given path.
+
+ If not destination is given, creates a temporary location.
+
+ Returns the location of the extracted files (root).
+ """
+ if not self._unpacked_dir:
+ if path is None:
+ path = tempfile.mkdtemp()
+
+ filename = self.download(path)
+ unpack_archive(filename, path)
+ self._unpacked_dir = path
+
+ return path
+
+ def _check_md5(self, filename):
+ """Check that the md5 checksum of the given file matches the one in
+ url param"""
+ hashname = self.url['hashname']
+ expected_hashval = self.url['hashval']
+ if None not in (expected_hashval, hashname):
+ with open(filename, 'rb') as f:
+ hashval = hashlib.new(hashname)
+ hashval.update(f.read())
+
+ if hashval.hexdigest() != expected_hashval:
+ raise HashDoesNotMatch("got %s instead of %s"
+ % (hashval.hexdigest(), expected_hashval))
+
+ def __repr__(self):
+ if self.release is None:
+ return "<? ? %s>" % self.dist_type
+
+ return "<%s %s %s>" % (
+ self.release.name, self.release.version, self.dist_type or "")
+
+
+class ReleasesList(IndexReference):
+ """A container of Release.
+
+ Provides useful methods and facilities to sort and filter releases.
+ """
+ def __init__(self, name, releases=None, contains_hidden=False, index=None):
+ self.set_index(index)
+ self.releases = []
+ self.name = name
+ self.contains_hidden = contains_hidden
+ if releases:
+ self.add_releases(releases)
+
+ def fetch_releases(self):
+ self._index.get_releases(self.name)
+ return self.releases
+
+ def filter(self, predicate):
+ """Filter and return a subset of releases matching the given predicate.
+ """
+ return ReleasesList(self.name, [release for release in self.releases
+ if predicate.match(release.version)],
+ index=self._index)
+
+ def get_last(self, requirements, prefer_final=None):
+ """Return the "last" release, that satisfy the given predicates.
+
+ "last" is defined by the version number of the releases, you also could
+ set prefer_final parameter to True or False to change the order results
+ """
+ predicate = get_version_predicate(requirements)
+ releases = self.filter(predicate)
+ if len(releases) == 0:
+ return None
+ releases.sort_releases(prefer_final, reverse=True)
+ return releases[0]
+
+ def add_releases(self, releases):
+ """Add releases in the release list.
+
+ :param: releases is a list of ReleaseInfo objects.
+ """
+ for r in releases:
+ self.add_release(release=r)
+
+ def add_release(self, version=None, dist_type='sdist', release=None,
+ **dist_args):
+ """Add a release to the list.
+
+ The release can be passed in the `release` parameter, and in this case,
+ it will be crawled to extract the useful informations if necessary, or
+ the release informations can be directly passed in the `version` and
+ `dist_type` arguments.
+
+ Other keywords arguments can be provided, and will be forwarded to the
+ distribution creation (eg. the arguments of the DistInfo constructor).
+ """
+ if release:
+ if release.name.lower() != self.name.lower():
+ raise ValueError("%s is not the same project as %s" %
+ (release.name, self.name))
+ version = str(release.version)
+
+ if version not in self.get_versions():
+ # append only if not already exists
+ self.releases.append(release)
+ for dist in release.dists.values():
+ for url in dist.urls:
+ self.add_release(version, dist.dist_type, **url)
+ else:
+ matches = [r for r in self.releases
+ if str(r.version) == version and r.name == self.name]
+ if not matches:
+ release = ReleaseInfo(self.name, version, index=self._index)
+ self.releases.append(release)
+ else:
+ release = matches[0]
+
+ release.add_distribution(dist_type=dist_type, **dist_args)
+
+ def sort_releases(self, prefer_final=False, reverse=True, *args, **kwargs):
+ """Sort the results with the given properties.
+
+ The `prefer_final` argument can be used to specify if final
+ distributions (eg. not dev, beta or alpha) would be preferred or not.
+
+ Results can be inverted by using `reverse`.
+
+ Any other parameter provided will be forwarded to the sorted call. You
+ cannot redefine the key argument of "sorted" here, as it is used
+ internally to sort the releases.
+ """
+
+ sort_by = []
+ if prefer_final:
+ sort_by.append("is_final")
+ sort_by.append("version")
+
+ self.releases.sort(
+ key=lambda i: tuple(getattr(i, arg) for arg in sort_by),
+ reverse=reverse, *args, **kwargs)
+
+ def get_release(self, version):
+ """Return a release from its version."""
+ matches = [r for r in self.releases if str(r.version) == version]
+ if len(matches) != 1:
+ raise KeyError(version)
+ return matches[0]
+
+ def get_versions(self):
+ """Return a list of releases versions contained"""
+ return [str(r.version) for r in self.releases]
+
+ def __getitem__(self, key):
+ return self.releases[key]
+
+ def __len__(self):
+ return len(self.releases)
+
+ def __repr__(self):
+ string = 'Project "%s"' % self.name
+ if self.get_versions():
+ string += ' versions: %s' % ', '.join(self.get_versions())
+ return '<%s>' % string
+
+
+def get_infos_from_url(url, probable_dist_name=None, is_external=True):
+ """Get useful informations from an URL.
+
+ Return a dict of (name, version, url, hashtype, hash, is_external)
+
+ :param url: complete url of the distribution
+ :param probable_dist_name: A probable name of the project.
+ :param is_external: Tell if the url commes from an index or from
+ an external URL.
+ """
+ # if the url contains a md5 hash, get it.
+ md5_hash = None
+ match = MD5_HASH.match(url)
+ if match is not None:
+ md5_hash = match.group(1)
+ # remove the hash
+ url = url.replace("#md5=%s" % md5_hash, "")
+
+ # parse the archive name to find dist name and version
+ archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+ extension_matched = False
+ # remove the extension from the name
+ for ext in EXTENSIONS:
+ if archive_name.endswith(ext):
+ archive_name = archive_name[:-len(ext)]
+ extension_matched = True
+
+ name, version = split_archive_name(archive_name)
+ if extension_matched is True:
+ return {'name': name,
+ 'version': version,
+ 'url': url,
+ 'hashname': "md5",
+ 'hashval': md5_hash,
+ 'is_external': is_external,
+ 'dist_type': 'sdist'}
+
+
+def split_archive_name(archive_name, probable_name=None):
+ """Split an archive name into two parts: name and version.
+
+ Return the tuple (name, version)
+ """
+ # Try to determine wich part is the name and wich is the version using the
+ # "-" separator. Take the larger part to be the version number then reduce
+ # if this not works.
+ def eager_split(str, maxsplit=2):
+ # split using the "-" separator
+ splits = str.rsplit("-", maxsplit)
+ name = splits[0]
+ version = "-".join(splits[1:])
+ if version.startswith("-"):
+ version = version[1:]
+ if suggest_normalized_version(version) is None and maxsplit >= 0:
+ # we dont get a good version number: recurse !
+ return eager_split(str, maxsplit - 1)
+ else:
+ return name, version
+ if probable_name is not None:
+ probable_name = probable_name.lower()
+ name = None
+ if probable_name is not None and probable_name in archive_name:
+ # we get the name from probable_name, if given.
+ name = probable_name
+ version = archive_name.lstrip(name)
+ else:
+ name, version = eager_split(archive_name)
+
+ version = suggest_normalized_version(version)
+ if version is not None and name != "":
+ return name.lower(), version
+ else:
+ raise CantParseArchiveName(archive_name)
diff --git a/Lib/packaging/pypi/errors.py b/Lib/packaging/pypi/errors.py
new file mode 100644
index 0000000..2191ac1
--- /dev/null
+++ b/Lib/packaging/pypi/errors.py
@@ -0,0 +1,39 @@
+"""Exceptions raised by packaging.pypi code."""
+
+from packaging.errors import PackagingPyPIError
+
+
+class ProjectNotFound(PackagingPyPIError):
+ """Project has not been found"""
+
+
+class DistributionNotFound(PackagingPyPIError):
+ """The release has not been found"""
+
+
+class ReleaseNotFound(PackagingPyPIError):
+ """The release has not been found"""
+
+
+class CantParseArchiveName(PackagingPyPIError):
+ """An archive name can't be parsed to find distribution name and version"""
+
+
+class DownloadError(PackagingPyPIError):
+ """An error has occurs while downloading"""
+
+
+class HashDoesNotMatch(DownloadError):
+ """Compared hashes does not match"""
+
+
+class UnsupportedHashName(PackagingPyPIError):
+ """A unsupported hashname has been used"""
+
+
+class UnableToDownload(PackagingPyPIError):
+ """All mirrors have been tried, without success"""
+
+
+class InvalidSearchField(PackagingPyPIError):
+ """An invalid search field has been used"""
diff --git a/Lib/packaging/pypi/mirrors.py b/Lib/packaging/pypi/mirrors.py
new file mode 100644
index 0000000..a646acff
--- /dev/null
+++ b/Lib/packaging/pypi/mirrors.py
@@ -0,0 +1,52 @@
+"""Utilities related to the mirror infrastructure defined in PEP 381."""
+
+from string import ascii_lowercase
+import socket
+
+DEFAULT_MIRROR_URL = "last.pypi.python.org"
+
+
+def get_mirrors(hostname=None):
+ """Return the list of mirrors from the last record found on the DNS
+ entry::
+
+ >>> from packaging.pypi.mirrors import get_mirrors
+ >>> get_mirrors()
+ ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org',
+ 'd.pypi.python.org']
+
+ """
+ if hostname is None:
+ hostname = DEFAULT_MIRROR_URL
+
+ # return the last mirror registered on PyPI.
+ try:
+ hostname = socket.gethostbyname_ex(hostname)[0]
+ except socket.gaierror:
+ return []
+ end_letter = hostname.split(".", 1)
+
+ # determine the list from the last one.
+ return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
+
+
+def string_range(last):
+ """Compute the range of string between "a" and last.
+
+ This works for simple "a to z" lists, but also for "a to zz" lists.
+ """
+ for k in range(len(last)):
+ for x in product(ascii_lowercase, repeat=(k + 1)):
+ result = ''.join(x)
+ yield result
+ if result == last:
+ return
+
+
+def product(*args, **kwds):
+ pools = [tuple(arg) for arg in args] * kwds.get('repeat', 1)
+ result = [[]]
+ for pool in pools:
+ result = [x + [y] for x in result for y in pool]
+ for prod in result:
+ yield tuple(prod)
diff --git a/Lib/packaging/pypi/simple.py b/Lib/packaging/pypi/simple.py
new file mode 100644
index 0000000..e26d55d
--- /dev/null
+++ b/Lib/packaging/pypi/simple.py
@@ -0,0 +1,462 @@
+"""Spider using the screen-scraping "simple" PyPI API.
+
+This module contains the class Crawler, a simple spider that
+can be used to find and retrieve distributions from a project index
+(like the Python Package Index), using its so-called simple API (see
+reference implementation available at http://pypi.python.org/simple/).
+"""
+
+import http.client
+import re
+import socket
+import sys
+import urllib.request
+import urllib.parse
+import urllib.error
+import os
+
+from fnmatch import translate
+from functools import wraps
+from packaging import logger
+from packaging.metadata import Metadata
+from packaging.version import get_version_predicate
+from packaging import __version__ as packaging_version
+from packaging.pypi.base import BaseClient
+from packaging.pypi.dist import (ReleasesList, EXTENSIONS,
+ get_infos_from_url, MD5_HASH)
+from packaging.pypi.errors import (PackagingPyPIError, DownloadError,
+ UnableToDownload, CantParseArchiveName,
+ ReleaseNotFound, ProjectNotFound)
+from packaging.pypi.mirrors import get_mirrors
+
+__all__ = ['Crawler', 'DEFAULT_SIMPLE_INDEX_URL']
+
+# -- Constants -----------------------------------------------
+DEFAULT_SIMPLE_INDEX_URL = "http://a.pypi.python.org/simple/"
+DEFAULT_HOSTS = ("*",)
+SOCKET_TIMEOUT = 15
+USER_AGENT = "Python-urllib/%s.%s packaging/%s" % (
+ sys.version_info[0], sys.version_info[1], packaging_version)
+
+# -- Regexps -------------------------------------------------
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+ENTITY_SUB = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+
+
+def socket_timeout(timeout=SOCKET_TIMEOUT):
+ """Decorator to add a socket timeout when requesting pages on PyPI.
+ """
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ old_timeout = socket.getdefaulttimeout()
+ if hasattr(self, "_timeout"):
+ timeout = self._timeout
+ socket.setdefaulttimeout(timeout)
+ try:
+ return func(self, *args, **kwargs)
+ finally:
+ socket.setdefaulttimeout(old_timeout)
+ return wrapped
+ return wrapper
+
+
+def with_mirror_support():
+ """Decorator that makes the mirroring support easier"""
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ try:
+ return func(self, *args, **kwargs)
+ except DownloadError:
+ # if an error occurs, try with the next index_url
+ if self._mirrors_tries >= self._mirrors_max_tries:
+ try:
+ self._switch_to_next_mirror()
+ except KeyError:
+ raise UnableToDownload("Tried all mirrors")
+ else:
+ self._mirrors_tries += 1
+ self._projects.clear()
+ return wrapped(self, *args, **kwargs)
+ return wrapped
+ return wrapper
+
+
+class Crawler(BaseClient):
+ """Provides useful tools to request the Python Package Index simple API.
+
+ You can specify both mirrors and mirrors_url, but mirrors_url will only be
+ used if mirrors is set to None.
+
+ :param index_url: the url of the simple index to search on.
+ :param prefer_final: if the version is not mentioned, and the last
+ version is not a "final" one (alpha, beta, etc.),
+ pick up the last final version.
+ :param prefer_source: if the distribution type is not mentioned, pick up
+ the source one if available.
+ :param follow_externals: tell if following external links is needed or
+ not. Default is False.
+ :param hosts: a list of hosts allowed to be processed while using
+ follow_externals=True. Default behavior is to follow all
+ hosts.
+ :param follow_externals: tell if following external links is needed or
+ not. Default is False.
+ :param mirrors_url: the url to look on for DNS records giving mirror
+ addresses.
+ :param mirrors: a list of mirrors (see PEP 381).
+ :param timeout: time in seconds to consider a url has timeouted.
+ :param mirrors_max_tries": number of times to try requesting informations
+ on mirrors before switching.
+ """
+
+ def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False,
+ prefer_source=True, hosts=DEFAULT_HOSTS,
+ follow_externals=False, mirrors_url=None, mirrors=None,
+ timeout=SOCKET_TIMEOUT, mirrors_max_tries=0):
+ super(Crawler, self).__init__(prefer_final, prefer_source)
+ self.follow_externals = follow_externals
+
+ # mirroring attributes.
+ parsed = urllib.parse.urlparse(index_url)
+ self.scheme = parsed[0]
+ if self.scheme == 'file':
+ ender = os.path.sep
+ else:
+ ender = '/'
+ if not index_url.endswith(ender):
+ index_url += ender
+ # if no mirrors are defined, use the method described in PEP 381.
+ if mirrors is None:
+ mirrors = get_mirrors(mirrors_url)
+ self._mirrors = set(mirrors)
+ self._mirrors_used = set()
+ self.index_url = index_url
+ self._mirrors_max_tries = mirrors_max_tries
+ self._mirrors_tries = 0
+ self._timeout = timeout
+
+ # create a regexp to match all given hosts
+ self._allowed_hosts = re.compile('|'.join(map(translate, hosts))).match
+
+ # we keep an index of pages we have processed, in order to avoid
+ # scanning them multple time (eg. if there is multiple pages pointing
+ # on one)
+ self._processed_urls = []
+ self._projects = {}
+
+ @with_mirror_support()
+ def search_projects(self, name=None, **kwargs):
+ """Search the index for projects containing the given name.
+
+ Return a list of names.
+ """
+ if '*' in name:
+ name.replace('*', '.*')
+ else:
+ name = "%s%s%s" % ('*.?', name, '*.?')
+ name = name.replace('*', '[^<]*') # avoid matching end tag
+ pattern = ('<a[^>]*>(%s)</a>' % name).encode('utf-8')
+ projectname = re.compile(pattern, re.I)
+ matching_projects = []
+
+ with self._open_url(self.index_url) as index:
+ index_content = index.read()
+
+ for match in projectname.finditer(index_content):
+ project_name = match.group(1).decode('utf-8')
+ matching_projects.append(self._get_project(project_name))
+ return matching_projects
+
+ def get_releases(self, requirements, prefer_final=None,
+ force_update=False):
+ """Search for releases and return a ReleasesList object containing
+ the results.
+ """
+ predicate = get_version_predicate(requirements)
+ if predicate.name.lower() in self._projects and not force_update:
+ return self._projects.get(predicate.name.lower())
+ prefer_final = self._get_prefer_final(prefer_final)
+ logger.debug('Reading info on PyPI about %s', predicate.name)
+ self._process_index_page(predicate.name)
+
+ if predicate.name.lower() not in self._projects:
+ raise ProjectNotFound
+
+ releases = self._projects.get(predicate.name.lower())
+ releases.sort_releases(prefer_final=prefer_final)
+ return releases
+
+ def get_release(self, requirements, prefer_final=None):
+ """Return only one release that fulfill the given requirements"""
+ predicate = get_version_predicate(requirements)
+ release = self.get_releases(predicate, prefer_final)\
+ .get_last(predicate)
+ if not release:
+ raise ReleaseNotFound("No release matches the given criterias")
+ return release
+
+ def get_distributions(self, project_name, version):
+ """Return the distributions found on the index for the specific given
+ release"""
+ # as the default behavior of get_release is to return a release
+ # containing the distributions, just alias it.
+ return self.get_release("%s (%s)" % (project_name, version))
+
+ def get_metadata(self, project_name, version):
+ """Return the metadatas from the simple index.
+
+ Currently, download one archive, extract it and use the PKG-INFO file.
+ """
+ release = self.get_distributions(project_name, version)
+ if not release.metadata:
+ location = release.get_distribution().unpack()
+ pkg_info = os.path.join(location, 'PKG-INFO')
+ release.metadata = Metadata(pkg_info)
+ return release
+
+ def _switch_to_next_mirror(self):
+ """Switch to the next mirror (eg. point self.index_url to the next
+ mirror url.
+
+ Raise a KeyError if all mirrors have been tried.
+ """
+ self._mirrors_used.add(self.index_url)
+ index_url = self._mirrors.pop()
+ # XXX use urllib.parse for a real check of missing scheme part
+ if not index_url.startswith(("http://", "https://", "file://")):
+ index_url = "http://%s" % index_url
+
+ if not index_url.endswith("/simple"):
+ index_url = "%s/simple/" % index_url
+
+ self.index_url = index_url
+
+ def _is_browsable(self, url):
+ """Tell if the given URL can be browsed or not.
+
+ It uses the follow_externals and the hosts list to tell if the given
+ url is browsable or not.
+ """
+ # if _index_url is contained in the given URL, we are browsing the
+ # index, and it's always "browsable".
+ # local files are always considered browable resources
+ if self.index_url in url or urllib.parse.urlparse(url)[0] == "file":
+ return True
+ elif self.follow_externals:
+ if self._allowed_hosts(urllib.parse.urlparse(url)[1]): # 1 is netloc
+ return True
+ else:
+ return False
+ return False
+
+ def _is_distribution(self, link):
+ """Tell if the given URL matches to a distribution name or not.
+ """
+ #XXX find a better way to check that links are distributions
+ # Using a regexp ?
+ for ext in EXTENSIONS:
+ if ext in link:
+ return True
+ return False
+
+ def _register_release(self, release=None, release_info={}):
+ """Register a new release.
+
+ Both a release or a dict of release_info can be provided, the preferred
+ way (eg. the quicker) is the dict one.
+
+ Return the list of existing releases for the given project.
+ """
+ # Check if the project already has a list of releases (refering to
+ # the project name). If not, create a new release list.
+ # Then, add the release to the list.
+ if release:
+ name = release.name
+ else:
+ name = release_info['name']
+ if name.lower() not in self._projects:
+ self._projects[name.lower()] = ReleasesList(name, index=self._index)
+
+ if release:
+ self._projects[name.lower()].add_release(release=release)
+ else:
+ name = release_info.pop('name')
+ version = release_info.pop('version')
+ dist_type = release_info.pop('dist_type')
+ self._projects[name.lower()].add_release(version, dist_type,
+ **release_info)
+ return self._projects[name.lower()]
+
+ def _process_url(self, url, project_name=None, follow_links=True):
+ """Process an url and search for distributions packages.
+
+ For each URL found, if it's a download, creates a PyPIdistribution
+ object. If it's a homepage and we can follow links, process it too.
+
+ :param url: the url to process
+ :param project_name: the project name we are searching for.
+ :param follow_links: Do not want to follow links more than from one
+ level. This parameter tells if we want to follow
+ the links we find (eg. run recursively this
+ method on it)
+ """
+ with self._open_url(url) as f:
+ base_url = f.url
+ if url not in self._processed_urls:
+ self._processed_urls.append(url)
+ link_matcher = self._get_link_matcher(url)
+ for link, is_download in link_matcher(f.read().decode(), base_url):
+ if link not in self._processed_urls:
+ if self._is_distribution(link) or is_download:
+ self._processed_urls.append(link)
+ # it's a distribution, so create a dist object
+ try:
+ infos = get_infos_from_url(link, project_name,
+ is_external=self.index_url not in url)
+ except CantParseArchiveName as e:
+ logger.warning(
+ "version has not been parsed: %s", e)
+ else:
+ self._register_release(release_info=infos)
+ else:
+ if self._is_browsable(link) and follow_links:
+ self._process_url(link, project_name,
+ follow_links=False)
+
+ def _get_link_matcher(self, url):
+ """Returns the right link matcher function of the given url
+ """
+ if self.index_url in url:
+ return self._simple_link_matcher
+ else:
+ return self._default_link_matcher
+
+ def _get_full_url(self, url, base_url):
+ return urllib.parse.urljoin(base_url, self._htmldecode(url))
+
+ def _simple_link_matcher(self, content, base_url):
+ """Yield all links with a rel="download" or rel="homepage".
+
+ This matches the simple index requirements for matching links.
+ If follow_externals is set to False, dont yeld the external
+ urls.
+
+ :param content: the content of the page we want to parse
+ :param base_url: the url of this page.
+ """
+ for match in HREF.finditer(content):
+ url = self._get_full_url(match.group(1), base_url)
+ if MD5_HASH.match(url):
+ yield (url, True)
+
+ for match in REL.finditer(content):
+ # search for rel links.
+ tag, rel = match.groups()
+ rels = [s.strip() for s in rel.lower().split(',')]
+ if 'homepage' in rels or 'download' in rels:
+ for match in HREF.finditer(tag):
+ url = self._get_full_url(match.group(1), base_url)
+ if 'download' in rels or self._is_browsable(url):
+ # yield a list of (url, is_download)
+ yield (url, 'download' in rels)
+
+ def _default_link_matcher(self, content, base_url):
+ """Yield all links found on the page.
+ """
+ for match in HREF.finditer(content):
+ url = self._get_full_url(match.group(1), base_url)
+ if self._is_browsable(url):
+ yield (url, False)
+
+ @with_mirror_support()
+ def _process_index_page(self, name):
+ """Find and process a PyPI page for the given project name.
+
+ :param name: the name of the project to find the page
+ """
+ # Browse and index the content of the given PyPI page.
+ if self.scheme == 'file':
+ ender = os.path.sep
+ else:
+ ender = '/'
+ url = self.index_url + name + ender
+ self._process_url(url, name)
+
+ @socket_timeout()
+ def _open_url(self, url):
+ """Open a urllib2 request, handling HTTP authentication, and local
+ files support.
+
+ """
+ scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
+
+ # authentication stuff
+ if scheme in ('http', 'https'):
+ auth, host = urllib.parse.splituser(netloc)
+ else:
+ auth = None
+
+ # add index.html automatically for filesystem paths
+ if scheme == 'file':
+ if url.endswith(os.path.sep):
+ url += "index.html"
+
+ # add authorization headers if auth is provided
+ if auth:
+ auth = "Basic " + \
+ urllib.parse.unquote(auth).encode('base64').strip()
+ new_url = urllib.parse.urlunparse((
+ scheme, host, path, params, query, frag))
+ request = urllib.request.Request(new_url)
+ request.add_header("Authorization", auth)
+ else:
+ request = urllib.request.Request(url)
+ request.add_header('User-Agent', USER_AGENT)
+ try:
+ fp = urllib.request.urlopen(request)
+ except (ValueError, http.client.InvalidURL) as v:
+ msg = ' '.join([str(arg) for arg in v.args])
+ raise PackagingPyPIError('%s %s' % (url, msg))
+ except urllib.error.HTTPError as v:
+ return v
+ except urllib.error.URLError as v:
+ raise DownloadError("Download error for %s: %s" % (url, v.reason))
+ except http.client.BadStatusLine as v:
+ raise DownloadError('%s returned a bad status line. '
+ 'The server might be down, %s' % (url, v.line))
+ except http.client.HTTPException as v:
+ raise DownloadError("Download error for %s: %s" % (url, v))
+ except socket.timeout:
+ raise DownloadError("The server timeouted")
+
+ if auth:
+ # Put authentication info back into request URL if same host,
+ # so that links found on the page will work
+ s2, h2, path2, param2, query2, frag2 = \
+ urllib.parse.urlparse(fp.url)
+ if s2 == scheme and h2 == host:
+ fp.url = urllib.parse.urlunparse(
+ (s2, netloc, path2, param2, query2, frag2))
+ return fp
+
+ def _decode_entity(self, match):
+ what = match.group(1)
+ if what.startswith('#x'):
+ what = int(what[2:], 16)
+ elif what.startswith('#'):
+ what = int(what[1:])
+ else:
+ from html.entities import name2codepoint
+ what = name2codepoint.get(what, match.group(0))
+ return chr(what)
+
+ def _htmldecode(self, text):
+ """Decode HTML entities in the given text."""
+ return ENTITY_SUB(self._decode_entity, text)
diff --git a/Lib/packaging/pypi/wrapper.py b/Lib/packaging/pypi/wrapper.py
new file mode 100644
index 0000000..945d08a
--- /dev/null
+++ b/Lib/packaging/pypi/wrapper.py
@@ -0,0 +1,99 @@
+"""Convenient client for all PyPI APIs.
+
+This module provides a ClientWrapper class which will use the "simple"
+or XML-RPC API to request information or files from an index.
+"""
+
+from packaging.pypi import simple, xmlrpc
+
+_WRAPPER_MAPPINGS = {'get_release': 'simple',
+ 'get_releases': 'simple',
+ 'search_projects': 'simple',
+ 'get_metadata': 'xmlrpc',
+ 'get_distributions': 'simple'}
+
+_WRAPPER_INDEXES = {'xmlrpc': xmlrpc.Client,
+ 'simple': simple.Crawler}
+
+
+def switch_index_if_fails(func, wrapper):
+ """Decorator that switch of index (for instance from xmlrpc to simple)
+ if the first mirror return an empty list or raises an exception.
+ """
+ def decorator(*args, **kwargs):
+ retry = True
+ exception = None
+ methods = [func]
+ for f in wrapper._indexes.values():
+ if f != func.__self__ and hasattr(f, func.__name__):
+ methods.append(getattr(f, func.__name__))
+ for method in methods:
+ try:
+ response = method(*args, **kwargs)
+ retry = False
+ except Exception as e:
+ exception = e
+ if not retry:
+ break
+ if retry and exception:
+ raise exception
+ else:
+ return response
+ return decorator
+
+
+class ClientWrapper:
+ """Wrapper around simple and xmlrpc clients,
+
+ Choose the best implementation to use depending the needs, using the given
+ mappings.
+ If one of the indexes returns an error, tries to use others indexes.
+
+ :param index: tell which index to rely on by default.
+ :param index_classes: a dict of name:class to use as indexes.
+ :param indexes: a dict of name:index already instantiated
+ :param mappings: the mappings to use for this wrapper
+ """
+
+ def __init__(self, default_index='simple', index_classes=_WRAPPER_INDEXES,
+ indexes={}, mappings=_WRAPPER_MAPPINGS):
+ self._projects = {}
+ self._mappings = mappings
+ self._indexes = indexes
+ self._default_index = default_index
+
+ # instantiate the classes and set their _project attribute to the one
+ # of the wrapper.
+ for name, cls in index_classes.items():
+ obj = self._indexes.setdefault(name, cls())
+ obj._projects = self._projects
+ obj._index = self
+
+ def __getattr__(self, method_name):
+ """When asking for methods of the wrapper, return the implementation of
+ the wrapped classes, depending the mapping.
+
+ Decorate the methods to switch of implementation if an error occurs
+ """
+ real_method = None
+ if method_name in _WRAPPER_MAPPINGS:
+ obj = self._indexes[_WRAPPER_MAPPINGS[method_name]]
+ real_method = getattr(obj, method_name)
+ else:
+ # the method is not defined in the mappings, so we try first to get
+ # it via the default index, and rely on others if needed.
+ try:
+ real_method = getattr(self._indexes[self._default_index],
+ method_name)
+ except AttributeError:
+ other_indexes = [i for i in self._indexes
+ if i != self._default_index]
+ for index in other_indexes:
+ real_method = getattr(self._indexes[index], method_name,
+ None)
+ if real_method:
+ break
+ if real_method:
+ return switch_index_if_fails(real_method, self)
+ else:
+ raise AttributeError("No index have attribute '%s'" % method_name)
diff --git a/Lib/packaging/pypi/xmlrpc.py b/Lib/packaging/pypi/xmlrpc.py
new file mode 100644
index 0000000..befdf6d
--- /dev/null
+++ b/Lib/packaging/pypi/xmlrpc.py
@@ -0,0 +1,200 @@
+"""Spider using the XML-RPC PyPI API.
+
+This module contains the class Client, a spider that can be used to find
+and retrieve distributions from a project index (like the Python Package
+Index), using its XML-RPC API (see documentation of the reference
+implementation at http://wiki.python.org/moin/PyPiXmlRpc).
+"""
+
+import xmlrpc.client
+
+from packaging import logger
+from packaging.errors import IrrationalVersionError
+from packaging.version import get_version_predicate
+from packaging.pypi.base import BaseClient
+from packaging.pypi.errors import (ProjectNotFound, InvalidSearchField,
+ ReleaseNotFound)
+from packaging.pypi.dist import ReleaseInfo
+
+__all__ = ['Client', 'DEFAULT_XMLRPC_INDEX_URL']
+
+DEFAULT_XMLRPC_INDEX_URL = 'http://python.org/pypi'
+
+_SEARCH_FIELDS = ['name', 'version', 'author', 'author_email', 'maintainer',
+ 'maintainer_email', 'home_page', 'license', 'summary',
+ 'description', 'keywords', 'platform', 'download_url']
+
+
+class Client(BaseClient):
+ """Client to query indexes using XML-RPC method calls.
+
+ If no server_url is specified, use the default PyPI XML-RPC URL,
+ defined in the DEFAULT_XMLRPC_INDEX_URL constant::
+
+ >>> client = Client()
+ >>> client.server_url == DEFAULT_XMLRPC_INDEX_URL
+ True
+
+ >>> client = Client("http://someurl/")
+ >>> client.server_url
+ 'http://someurl/'
+ """
+
+ def __init__(self, server_url=DEFAULT_XMLRPC_INDEX_URL, prefer_final=False,
+ prefer_source=True):
+ super(Client, self).__init__(prefer_final, prefer_source)
+ self.server_url = server_url
+ self._projects = {}
+
+ def get_release(self, requirements, prefer_final=False):
+ """Return a release with all complete metadata and distribution
+ related informations.
+ """
+ prefer_final = self._get_prefer_final(prefer_final)
+ predicate = get_version_predicate(requirements)
+ releases = self.get_releases(predicate.name)
+ release = releases.get_last(predicate, prefer_final)
+ self.get_metadata(release.name, str(release.version))
+ self.get_distributions(release.name, str(release.version))
+ return release
+
+ def get_releases(self, requirements, prefer_final=None, show_hidden=True,
+ force_update=False):
+ """Return the list of existing releases for a specific project.
+
+ Cache the results from one call to another.
+
+ If show_hidden is True, return the hidden releases too.
+ If force_update is True, reprocess the index to update the
+ informations (eg. make a new XML-RPC call).
+ ::
+
+ >>> client = Client()
+ >>> client.get_releases('Foo')
+ ['1.1', '1.2', '1.3']
+
+ If no such project exists, raise a ProjectNotFound exception::
+
+ >>> client.get_project_versions('UnexistingProject')
+ ProjectNotFound: UnexistingProject
+
+ """
+ def get_versions(project_name, show_hidden):
+ return self.proxy.package_releases(project_name, show_hidden)
+
+ predicate = get_version_predicate(requirements)
+ prefer_final = self._get_prefer_final(prefer_final)
+ project_name = predicate.name
+ if not force_update and (project_name.lower() in self._projects):
+ project = self._projects[project_name.lower()]
+ if not project.contains_hidden and show_hidden:
+ # if hidden releases are requested, and have an existing
+ # list of releases that does not contains hidden ones
+ all_versions = get_versions(project_name, show_hidden)
+ existing_versions = project.get_versions()
+ hidden_versions = set(all_versions) - set(existing_versions)
+ for version in hidden_versions:
+ project.add_release(release=ReleaseInfo(project_name,
+ version, index=self._index))
+ else:
+ versions = get_versions(project_name, show_hidden)
+ if not versions:
+ raise ProjectNotFound(project_name)
+ project = self._get_project(project_name)
+ project.add_releases([ReleaseInfo(project_name, version,
+ index=self._index)
+ for version in versions])
+ project = project.filter(predicate)
+ if len(project) == 0:
+ raise ReleaseNotFound("%s" % predicate)
+ project.sort_releases(prefer_final)
+ return project
+
+
+ def get_distributions(self, project_name, version):
+ """Grab informations about distributions from XML-RPC.
+
+ Return a ReleaseInfo object, with distribution-related informations
+ filled in.
+ """
+ url_infos = self.proxy.release_urls(project_name, version)
+ project = self._get_project(project_name)
+ if version not in project.get_versions():
+ project.add_release(release=ReleaseInfo(project_name, version,
+ index=self._index))
+ release = project.get_release(version)
+ for info in url_infos:
+ packagetype = info['packagetype']
+ dist_infos = {'url': info['url'],
+ 'hashval': info['md5_digest'],
+ 'hashname': 'md5',
+ 'is_external': False,
+ 'python_version': info['python_version']}
+ release.add_distribution(packagetype, **dist_infos)
+ return release
+
+ def get_metadata(self, project_name, version):
+ """Retrieve project metadata.
+
+ Return a ReleaseInfo object, with metadata informations filled in.
+ """
+ # to be case-insensitive, get the informations from the XMLRPC API
+ projects = [d['name'] for d in
+ self.proxy.search({'name': project_name})
+ if d['name'].lower() == project_name]
+ if len(projects) > 0:
+ project_name = projects[0]
+
+ metadata = self.proxy.release_data(project_name, version)
+ project = self._get_project(project_name)
+ if version not in project.get_versions():
+ project.add_release(release=ReleaseInfo(project_name, version,
+ index=self._index))
+ release = project.get_release(version)
+ release.set_metadata(metadata)
+ return release
+
+ def search_projects(self, name=None, operator="or", **kwargs):
+ """Find using the keys provided in kwargs.
+
+ You can set operator to "and" or "or".
+ """
+ for key in kwargs:
+ if key not in _SEARCH_FIELDS:
+ raise InvalidSearchField(key)
+ if name:
+ kwargs["name"] = name
+ projects = self.proxy.search(kwargs, operator)
+ for p in projects:
+ project = self._get_project(p['name'])
+ try:
+ project.add_release(release=ReleaseInfo(p['name'],
+ p['version'], metadata={'summary': p['summary']},
+ index=self._index))
+ except IrrationalVersionError as e:
+ logger.warning("Irrational version error found: %s", e)
+ return [self._projects[p['name'].lower()] for p in projects]
+
+ def get_all_projects(self):
+ """Return the list of all projects registered in the package index"""
+ projects = self.proxy.list_packages()
+ for name in projects:
+ self.get_releases(name, show_hidden=True)
+
+ return [self._projects[name.lower()] for name in set(projects)]
+
+ @property
+ def proxy(self):
+ """Property used to return the XMLRPC server proxy.
+
+ If no server proxy is defined yet, creates a new one::
+
+ >>> client = Client()
+ >>> client.proxy()
+ <ServerProxy for python.org/pypi>
+
+ """
+ if not hasattr(self, '_server_proxy'):
+ self._server_proxy = xmlrpc.client.ServerProxy(self.server_url)
+
+ return self._server_proxy
diff --git a/Lib/packaging/run.py b/Lib/packaging/run.py
new file mode 100644
index 0000000..c3600a7
--- /dev/null
+++ b/Lib/packaging/run.py
@@ -0,0 +1,663 @@
+"""Main command line parser. Implements the pysetup script."""
+
+import os
+import re
+import sys
+import getopt
+import logging
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.util import _is_archive_file, generate_setup_py
+from packaging.command import get_command_class, STANDARD_COMMANDS
+from packaging.install import install, install_local_project, remove
+from packaging.database import get_distribution, get_distributions
+from packaging.depgraph import generate_graph
+from packaging.fancy_getopt import FancyGetopt
+from packaging.errors import (PackagingArgError, PackagingError,
+ PackagingModuleError, PackagingClassError,
+ CCompilerError)
+
+
+command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+common_usage = """\
+Actions:
+%(actions)s
+
+To get more help on an action, use:
+
+ pysetup action --help
+"""
+
+global_options = [
+ # The fourth entry for verbose means that it can be repeated.
+ ('verbose', 'v', "run verbosely (default)", True),
+ ('quiet', 'q', "run quietly (turns verbosity off)"),
+ ('dry-run', 'n', "don't actually do anything"),
+ ('help', 'h', "show detailed help message"),
+ ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
+ ('version', None, 'Display the version'),
+]
+
+negative_opt = {'quiet': 'verbose'}
+
+display_options = [
+ ('help-commands', None, "list all available commands"),
+]
+
+display_option_names = [x[0].replace('-', '_') for x in display_options]
+
+
+def _parse_args(args, options, long_options):
+ """Transform sys.argv input into a dict.
+
+ :param args: the args to parse (i.e sys.argv)
+ :param options: the list of options to pass to getopt
+ :param long_options: the list of string with the names of the long options
+ to be passed to getopt.
+
+ The function returns a dict with options/long_options as keys and matching
+ values as values.
+ """
+ optlist, args = getopt.gnu_getopt(args, options, long_options)
+ optdict = {}
+ optdict['args'] = args
+ for k, v in optlist:
+ k = k.lstrip('-')
+ if k not in optdict:
+ optdict[k] = []
+ if v:
+ optdict[k].append(v)
+ else:
+ optdict[k].append(v)
+ return optdict
+
+
+class action_help:
+ """Prints a help message when the standard help flags: -h and --help
+ are used on the commandline.
+ """
+
+ def __init__(self, help_msg):
+ self.help_msg = help_msg
+
+ def __call__(self, f):
+ def wrapper(*args, **kwargs):
+ f_args = args[1]
+ if '--help' in f_args or '-h' in f_args:
+ print(self.help_msg)
+ return
+ return f(*args, **kwargs)
+ return wrapper
+
+
+@action_help("""\
+Usage: pysetup create
+ or: pysetup create --help
+
+Create a new Python project.
+""")
+def _create(distpatcher, args, **kw):
+ from packaging.create import main
+ return main()
+
+
+@action_help("""\
+Usage: pysetup generate-setup
+ or: pysetup generate-setup --help
+
+Generate a setup.py script for backward-compatibility purposes.
+""")
+def _generate(distpatcher, args, **kw):
+ generate_setup_py()
+ logger.info('The setup.py was generated')
+
+
+@action_help("""\
+Usage: pysetup graph dist
+ or: pysetup graph --help
+
+Print dependency graph for the distribution.
+
+positional arguments:
+ dist installed distribution name
+""")
+def _graph(dispatcher, args, **kw):
+ name = args[1]
+ dist = get_distribution(name, use_egg_info=True)
+ if dist is None:
+ logger.warning('Distribution not found.')
+ return 1
+ else:
+ dists = get_distributions(use_egg_info=True)
+ graph = generate_graph(dists)
+ print(graph.repr_node(dist))
+
+
+@action_help("""\
+Usage: pysetup install [dist]
+ or: pysetup install [archive]
+ or: pysetup install [src_dir]
+ or: pysetup install --help
+
+Install a Python distribution from the indexes, source directory, or sdist.
+
+positional arguments:
+ archive path to source distribution (zip, tar.gz)
+ dist distribution name to install from the indexes
+ scr_dir path to source directory
+""")
+def _install(dispatcher, args, **kw):
+ # first check if we are in a source directory
+ if len(args) < 2:
+ # are we inside a project dir?
+ if os.path.isfile('setup.cfg') or os.path.isfile('setup.py'):
+ args.insert(1, os.getcwd())
+ else:
+ logger.warning('No project to install.')
+ return 1
+
+ target = args[1]
+ # installing from a source dir or archive file?
+ if os.path.isdir(target) or _is_archive_file(target):
+ return not install_local_project(target)
+ else:
+ # download from PyPI
+ return not install(target)
+
+
+@action_help("""\
+Usage: pysetup metadata [dist]
+ or: pysetup metadata [dist] [-f field ...]
+ or: pysetup metadata --help
+
+Print metadata for the distribution.
+
+positional arguments:
+ dist installed distribution name
+
+optional arguments:
+ -f metadata field to print; omit to get all fields
+""")
+def _metadata(dispatcher, args, **kw):
+ opts = _parse_args(args[1:], 'f:', [])
+ if opts['args']:
+ name = opts['args'][0]
+ dist = get_distribution(name, use_egg_info=True)
+ if dist is None:
+ logger.warning('%r not installed', name)
+ return 1
+ elif os.path.isfile('setup.cfg'):
+ logger.info('searching local dir for metadata')
+ dist = Distribution() # XXX use config module
+ dist.parse_config_files()
+ else:
+ logger.warning('no argument given and no local setup.cfg found')
+ return 1
+
+ metadata = dist.metadata
+
+ if 'f' in opts:
+ keys = (k for k in opts['f'] if k in metadata)
+ else:
+ keys = metadata.keys()
+
+ for key in keys:
+ if key in metadata:
+ print(metadata._convert_name(key) + ':')
+ value = metadata[key]
+ if isinstance(value, list):
+ for v in value:
+ print(' ', v)
+ else:
+ print(' ', value.replace('\n', '\n '))
+
+
+@action_help("""\
+Usage: pysetup remove dist [-y]
+ or: pysetup remove --help
+
+Uninstall a Python distribution.
+
+positional arguments:
+ dist installed distribution name
+
+optional arguments:
+ -y auto confirm distribution removal
+""")
+def _remove(distpatcher, args, **kw):
+ opts = _parse_args(args[1:], 'y', [])
+ if 'y' in opts:
+ auto_confirm = True
+ else:
+ auto_confirm = False
+
+ retcode = 0
+ for dist in set(opts['args']):
+ try:
+ remove(dist, auto_confirm=auto_confirm)
+ except PackagingError:
+ logger.warning('%r not installed', dist)
+ retcode = 1
+
+ return retcode
+
+
+@action_help("""\
+Usage: pysetup run [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+ or: pysetup run --help
+ or: pysetup run --list-commands
+ or: pysetup run cmd --help
+""")
+def _run(dispatcher, args, **kw):
+ parser = dispatcher.parser
+ args = args[1:]
+
+ commands = STANDARD_COMMANDS # FIXME display extra commands
+
+ if args == ['--list-commands']:
+ print('List of available commands:')
+ for cmd in commands:
+ cls = dispatcher.cmdclass.get(cmd) or get_command_class(cmd)
+ desc = getattr(cls, 'description', '(no description available)')
+ print(' %s: %s' % (cmd, desc))
+ return
+
+ while args:
+ args = dispatcher._parse_command_opts(parser, args)
+ if args is None:
+ return
+
+ # create the Distribution class
+ # need to feed setup.cfg here !
+ dist = Distribution()
+
+ # Find and parse the config file(s): they will override options from
+ # the setup script, but be overridden by the command line.
+
+ # XXX still need to be extracted from Distribution
+ dist.parse_config_files()
+
+ for cmd in dispatcher.commands:
+ # FIXME need to catch MetadataMissingError here (from the check command
+ # e.g.)--or catch any exception, print an error message and exit with 1
+ dist.run_command(cmd, dispatcher.command_options[cmd])
+
+ return 0
+
+
+@action_help("""\
+Usage: pysetup list [dist ...]
+ or: pysetup list --help
+
+Print name, version and location for the matching installed distributions.
+
+positional arguments:
+ dist installed distribution name; omit to get all distributions
+""")
+def _list(dispatcher, args, **kw):
+ opts = _parse_args(args[1:], '', [])
+ dists = get_distributions(use_egg_info=True)
+ if opts['args']:
+ results = (d for d in dists if d.name.lower() in opts['args'])
+ listall = False
+ else:
+ results = dists
+ listall = True
+
+ number = 0
+ for dist in results:
+ print('%r %s (from %r)' % (dist.name, dist.version, dist.path))
+ number += 1
+
+ if number == 0:
+ if listall:
+ logger.info('Nothing seems to be installed.')
+ else:
+ logger.warning('No matching distribution found.')
+ return 1
+ else:
+ logger.info('Found %d projects installed.', number)
+
+
+@action_help("""\
+Usage: pysetup search [project] [--simple [url]] [--xmlrpc [url] [--fieldname value ...] --operator or|and]
+ or: pysetup search --help
+
+Search the indexes for the matching projects.
+
+positional arguments:
+ project the project pattern to search for
+
+optional arguments:
+ --xmlrpc [url] whether to use the xmlrpc index or not. If an url is
+ specified, it will be used rather than the default one.
+
+ --simple [url] whether to use the simple index or not. If an url is
+ specified, it will be used rather than the default one.
+
+ --fieldname value Make a search on this field. Can only be used if
+ --xmlrpc has been selected or is the default index.
+
+ --operator or|and Defines what is the operator to use when doing xmlrpc
+ searchs with multiple fieldnames. Can only be used if
+ --xmlrpc has been selected or is the default index.
+""")
+def _search(dispatcher, args, **kw):
+ """The search action.
+
+ It is able to search for a specific index (specified with --index), using
+ the simple or xmlrpc index types (with --type xmlrpc / --type simple)
+ """
+ #opts = _parse_args(args[1:], '', ['simple', 'xmlrpc'])
+ # 1. what kind of index is requested ? (xmlrpc / simple)
+ logger.error('not implemented')
+ return 1
+
+
+actions = [
+ ('run', 'Run one or several commands', _run),
+ ('metadata', 'Display the metadata of a project', _metadata),
+ ('install', 'Install a project', _install),
+ ('remove', 'Remove a project', _remove),
+ ('search', 'Search for a project in the indexes', _search),
+ ('list', 'List installed projects', _list),
+ ('graph', 'Display a graph', _graph),
+ ('create', 'Create a project', _create),
+ ('generate-setup', 'Generate a backward-compatible setup.py', _generate),
+]
+
+
+class Dispatcher:
+ """Reads the command-line options
+ """
+ def __init__(self, args=None):
+ self.verbose = 1
+ self.dry_run = False
+ self.help = False
+ self.cmdclass = {}
+ self.commands = []
+ self.command_options = {}
+
+ for attr in display_option_names:
+ setattr(self, attr, False)
+
+ self.parser = FancyGetopt(global_options + display_options)
+ self.parser.set_negative_aliases(negative_opt)
+ # FIXME this parses everything, including command options (e.g. "run
+ # build -i" errors with "option -i not recognized")
+ args = self.parser.getopt(args=args, object=self)
+
+ # if first arg is "run", we have some commands
+ if len(args) == 0:
+ self.action = None
+ else:
+ self.action = args[0]
+
+ allowed = [action[0] for action in actions] + [None]
+ if self.action not in allowed:
+ msg = 'Unrecognized action "%s"' % self.action
+ raise PackagingArgError(msg)
+
+ self._set_logger()
+ self.args = args
+
+ # for display options we return immediately
+ if self.help or self.action is None:
+ self._show_help(self.parser, display_options_=False)
+
+ def _set_logger(self):
+ # setting up the logging level from the command-line options
+ # -q gets warning, error and critical
+ if self.verbose == 0:
+ level = logging.WARNING
+ # default level or -v gets info too
+ # XXX there's a bug somewhere: the help text says that -v is default
+ # (and verbose is set to 1 above), but when the user explicitly gives
+ # -v on the command line, self.verbose is incremented to 2! Here we
+ # compensate for that (I tested manually). On a related note, I think
+ # it's a good thing to use -q/nothing/-v/-vv on the command line
+ # instead of logging constants; it will be easy to add support for
+ # logging configuration in setup.cfg for advanced users. --merwok
+ elif self.verbose in (1, 2):
+ level = logging.INFO
+ else: # -vv and more for debug
+ level = logging.DEBUG
+
+ # setting up the stream handler
+ handler = logging.StreamHandler(sys.stderr)
+ handler.setLevel(level)
+ logger.addHandler(handler)
+ logger.setLevel(level)
+
+ def _parse_command_opts(self, parser, args):
+ # Pull the current command from the head of the command line
+ command = args[0]
+ if not command_re.match(command):
+ raise SystemExit("invalid command name %r" % (command,))
+ self.commands.append(command)
+
+ # Dig up the command class that implements this command, so we
+ # 1) know that it's a valid command, and 2) know which options
+ # it takes.
+ try:
+ cmd_class = get_command_class(command)
+ except PackagingModuleError as msg:
+ raise PackagingArgError(msg)
+
+ # XXX We want to push this in packaging.command
+ #
+ # Require that the command class be derived from Command -- want
+ # to be sure that the basic "command" interface is implemented.
+ for meth in ('initialize_options', 'finalize_options', 'run'):
+ if hasattr(cmd_class, meth):
+ continue
+ raise PackagingClassError(
+ 'command %r must implement %r' % (cmd_class, meth))
+
+ # Also make sure that the command object provides a list of its
+ # known options.
+ if not (hasattr(cmd_class, 'user_options') and
+ isinstance(cmd_class.user_options, list)):
+ raise PackagingClassError(
+ "command class %s must provide "
+ "'user_options' attribute (a list of tuples)" % cmd_class)
+
+ # If the command class has a list of negative alias options,
+ # merge it in with the global negative aliases.
+ _negative_opt = negative_opt.copy()
+
+ if hasattr(cmd_class, 'negative_opt'):
+ _negative_opt.update(cmd_class.negative_opt)
+
+ # Check for help_options in command class. They have a different
+ # format (tuple of four) so we need to preprocess them here.
+ if (hasattr(cmd_class, 'help_options') and
+ isinstance(cmd_class.help_options, list)):
+ help_options = cmd_class.help_options[:]
+ else:
+ help_options = []
+
+ # All commands support the global options too, just by adding
+ # in 'global_options'.
+ parser.set_option_table(global_options +
+ cmd_class.user_options +
+ help_options)
+ parser.set_negative_aliases(_negative_opt)
+ args, opts = parser.getopt(args[1:])
+
+ if hasattr(opts, 'help') and opts.help:
+ self._show_command_help(cmd_class)
+ return
+
+ if (hasattr(cmd_class, 'help_options') and
+ isinstance(cmd_class.help_options, list)):
+ help_option_found = False
+ for help_option, short, desc, func in cmd_class.help_options:
+ if hasattr(opts, help_option.replace('-', '_')):
+ help_option_found = True
+ if callable(func):
+ func()
+ else:
+ raise PackagingClassError(
+ "invalid help function %r for help option %r: "
+ "must be a callable object (function, etc.)"
+ % (func, help_option))
+
+ if help_option_found:
+ return
+
+ # Put the options from the command line into their official
+ # holding pen, the 'command_options' dictionary.
+ opt_dict = self.get_option_dict(command)
+ for name, value in vars(opts).items():
+ opt_dict[name] = ("command line", value)
+
+ return args
+
+ def get_option_dict(self, command):
+ """Get the option dictionary for a given command. If that
+ command's option dictionary hasn't been created yet, then create it
+ and return the new dictionary; otherwise, return the existing
+ option dictionary.
+ """
+ d = self.command_options.get(command)
+ if d is None:
+ d = self.command_options[command] = {}
+ return d
+
+ def show_help(self):
+ self._show_help(self.parser)
+
+ def print_usage(self, parser):
+ parser.set_option_table(global_options)
+
+ actions_ = [' %s: %s' % (name, desc) for name, desc, __ in actions]
+ usage = common_usage % {'actions': '\n'.join(actions_)}
+
+ parser.print_help(usage + "\nGlobal options:")
+
+ def _show_help(self, parser, global_options_=True, display_options_=True,
+ commands=[]):
+ # late import because of mutual dependence between these modules
+ from packaging.command.cmd import Command
+
+ print('Usage: pysetup [options] action [action_options]')
+ print()
+ if global_options_:
+ self.print_usage(self.parser)
+ print()
+
+ if display_options_:
+ parser.set_option_table(display_options)
+ parser.print_help(
+ "Information display options (just display " +
+ "information, ignore any commands)")
+ print()
+
+ for command in commands:
+ if isinstance(command, type) and issubclass(command, Command):
+ cls = command
+ else:
+ cls = get_command_class(command)
+ if (hasattr(cls, 'help_options') and
+ isinstance(cls.help_options, list)):
+ parser.set_option_table(cls.user_options + cls.help_options)
+ else:
+ parser.set_option_table(cls.user_options)
+
+ parser.print_help("Options for %r command:" % cls.__name__)
+ print()
+
+ def _show_command_help(self, command):
+ if isinstance(command, str):
+ command = get_command_class(command)
+
+ desc = getattr(command, 'description', '(no description available)')
+ print('Description:', desc)
+ print()
+
+ if (hasattr(command, 'help_options') and
+ isinstance(command.help_options, list)):
+ self.parser.set_option_table(command.user_options +
+ command.help_options)
+ else:
+ self.parser.set_option_table(command.user_options)
+
+ self.parser.print_help("Options:")
+ print()
+
+ def _get_command_groups(self):
+ """Helper function to retrieve all the command class names divided
+ into standard commands (listed in
+ packaging.command.STANDARD_COMMANDS) and extra commands (given in
+ self.cmdclass and not standard commands).
+ """
+ extra_commands = [cmd for cmd in self.cmdclass
+ if cmd not in STANDARD_COMMANDS]
+ return STANDARD_COMMANDS, extra_commands
+
+ def print_commands(self):
+ """Print out a help message listing all available commands with a
+ description of each. The list is divided into standard commands
+ (listed in packaging.command.STANDARD_COMMANDS) and extra commands
+ (given in self.cmdclass and not standard commands). The
+ descriptions come from the command class attribute
+ 'description'.
+ """
+ std_commands, extra_commands = self._get_command_groups()
+ max_length = max(len(command)
+ for commands in (std_commands, extra_commands)
+ for command in commands)
+
+ self.print_command_list(std_commands, "Standard commands", max_length)
+ if extra_commands:
+ print()
+ self.print_command_list(extra_commands, "Extra commands",
+ max_length)
+
+ def print_command_list(self, commands, header, max_length):
+ """Print a subset of the list of all commands -- used by
+ 'print_commands()'.
+ """
+ print(header + ":")
+
+ for cmd in commands:
+ cls = self.cmdclass.get(cmd) or get_command_class(cmd)
+ description = getattr(cls, 'description',
+ '(no description available)')
+
+ print(" %-*s %s" % (max_length, cmd, description))
+
+ def __call__(self):
+ if self.action is None:
+ return
+
+ for action, desc, func in actions:
+ if action == self.action:
+ return func(self, self.args)
+ return -1
+
+
+def main(args=None):
+ old_level = logger.level
+ old_handlers = list(logger.handlers)
+ try:
+ dispatcher = Dispatcher(args)
+ if dispatcher.action is None:
+ return
+ return dispatcher()
+ except KeyboardInterrupt:
+ logger.info('interrupted')
+ return 1
+ except (IOError, os.error, PackagingError, CCompilerError) as exc:
+ logger.exception(exc)
+ return 1
+ finally:
+ logger.setLevel(old_level)
+ logger.handlers[:] = old_handlers
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/Lib/packaging/tests/LONG_DESC.txt b/Lib/packaging/tests/LONG_DESC.txt
new file mode 100644
index 0000000..2b4358a
--- /dev/null
+++ b/Lib/packaging/tests/LONG_DESC.txt
@@ -0,0 +1,44 @@
+CLVault
+=======
+
+CLVault uses Keyring to provide a command-line utility to safely store
+and retrieve passwords.
+
+Install it using pip or the setup.py script::
+
+ $ python setup.py install
+
+ $ pip install clvault
+
+Once it's installed, you will have three scripts installed in your
+Python scripts folder, you can use to list, store and retrieve passwords::
+
+ $ clvault-set blog
+ Set your password:
+ Set the associated username (can be blank): tarek
+ Set a description (can be blank): My blog password
+ Password set.
+
+ $ clvault-get blog
+ The username is "tarek"
+ The password has been copied in your clipboard
+
+ $ clvault-list
+ Registered services:
+ blog My blog password
+
+
+*clvault-set* takes a service name then prompt you for a password, and some
+optional information about your service. The password is safely stored in
+a keyring while the description is saved in a ``.clvault`` file in your
+home directory. This file is created automatically the first time the command
+is used.
+
+*clvault-get* copies the password for a given service in your clipboard, and
+displays the associated user if any.
+
+*clvault-list* lists all registered services, with their description when
+given.
+
+
+Project page: http://bitbucket.org/tarek/clvault
diff --git a/Lib/packaging/tests/PKG-INFO b/Lib/packaging/tests/PKG-INFO
new file mode 100644
index 0000000..f48546e
--- /dev/null
+++ b/Lib/packaging/tests/PKG-INFO
@@ -0,0 +1,57 @@
+Metadata-Version: 1.2
+Name: CLVault
+Version: 0.5
+Summary: Command-Line utility to store and retrieve passwords
+Home-page: http://bitbucket.org/tarek/clvault
+Author: Tarek Ziade
+Author-email: tarek@ziade.org
+License: PSF
+Keywords: keyring,password,crypt
+Requires-Dist: foo; sys.platform == 'okook'
+Requires-Dist: bar; sys.platform == '%s'
+Platform: UNKNOWN
+Description: CLVault
+ |=======
+ |
+ |CLVault uses Keyring to provide a command-line utility to safely store
+ |and retrieve passwords.
+ |
+ |Install it using pip or the setup.py script::
+ |
+ | $ python setup.py install
+ |
+ | $ pip install clvault
+ |
+ |Once it's installed, you will have three scripts installed in your
+ |Python scripts folder, you can use to list, store and retrieve passwords::
+ |
+ | $ clvault-set blog
+ | Set your password:
+ | Set the associated username (can be blank): tarek
+ | Set a description (can be blank): My blog password
+ | Password set.
+ |
+ | $ clvault-get blog
+ | The username is "tarek"
+ | The password has been copied in your clipboard
+ |
+ | $ clvault-list
+ | Registered services:
+ | blog My blog password
+ |
+ |
+ |*clvault-set* takes a service name then prompt you for a password, and some
+ |optional information about your service. The password is safely stored in
+ |a keyring while the description is saved in a ``.clvault`` file in your
+ |home directory. This file is created automatically the first time the command
+ |is used.
+ |
+ |*clvault-get* copies the password for a given service in your clipboard, and
+ |displays the associated user if any.
+ |
+ |*clvault-list* lists all registered services, with their description when
+ |given.
+ |
+ |
+ |Project page: http://bitbucket.org/tarek/clvault
+ |
diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO
new file mode 100644
index 0000000..dff8d00
--- /dev/null
+++ b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO
@@ -0,0 +1,182 @@
+Metadata-Version: 1.0
+Name: setuptools
+Version: 0.6c9
+Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
+Home-page: http://pypi.python.org/pypi/setuptools
+Author: Phillip J. Eby
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+ Installing and Using Setuptools
+ ===============================
+
+ .. contents:: **Table of Contents**
+
+
+ -------------------------
+ Installation Instructions
+ -------------------------
+
+ Windows
+ =======
+
+ Install setuptools using the provided ``.exe`` installer. If you've previously
+ installed older versions of setuptools, please delete all ``setuptools*.egg``
+ and ``setuptools.pth`` files from your system's ``site-packages`` directory
+ (and any other ``sys.path`` directories) FIRST.
+
+ If you are upgrading a previous version of setuptools that was installed using
+ an ``.exe`` installer, please be sure to also *uninstall that older version*
+ via your system's "Add/Remove Programs" feature, BEFORE installing the newer
+ version.
+
+ Once installation is complete, you will find an ``easy_install.exe`` program in
+ your Python ``Scripts`` subdirectory. Be sure to add this directory to your
+ ``PATH`` environment variable, if you haven't already done so.
+
+
+ RPM-Based Systems
+ =================
+
+ Install setuptools using the provided source RPM. The included ``.spec`` file
+ assumes you are installing using the default ``python`` executable, and is not
+ specific to a particular Python version. The ``easy_install`` executable will
+ be installed to a system ``bin`` directory such as ``/usr/bin``.
+
+ If you wish to install to a location other than the default Python
+ installation's default ``site-packages`` directory (and ``$prefix/bin`` for
+ scripts), please use the ``.egg``-based installation approach described in the
+ following section.
+
+
+ Cygwin, Mac OS X, Linux, Other
+ ==============================
+
+ 1. Download the appropriate egg for your version of Python (e.g.
+ ``setuptools-0.6c9-py2.4.egg``). Do NOT rename it.
+
+ 2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
+ Setuptools will install itself using the matching version of Python (e.g.
+ ``python2.4``), and will place the ``easy_install`` executable in the
+ default location for installing Python scripts (as determined by the
+ standard distutils configuration files, or by the Python installation).
+
+ If you want to install setuptools to somewhere other than ``site-packages`` or
+ your default distutils installation locations for libraries and scripts, you
+ may include EasyInstall command-line options such as ``--prefix``,
+ ``--install-dir``, and so on, following the ``.egg`` filename on the same
+ command line. For example::
+
+ sh setuptools-0.6c9-py2.4.egg --prefix=~
+
+ You can use ``--help`` to get a full options list, but we recommend consulting
+ the `EasyInstall manual`_ for detailed instructions, especially `the section
+ on custom installation locations`_.
+
+ .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+ .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
+
+
+ Cygwin Note
+ -----------
+
+ If you are trying to install setuptools for the **Windows** version of Python
+ (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
+ sure that an appropriate executable (``python2.3``, ``python2.4``, or
+ ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg. For
+ example, doing the following at a Cygwin bash prompt will install setuptools
+ for the **Windows** Python found at ``C:\\Python24``::
+
+ ln -s /cygdrive/c/Python24/python.exe python2.4
+ PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
+ rm python2.4
+
+
+ Downloads
+ =========
+
+ All setuptools downloads can be found at `the project's home page in the Python
+ Package Index`_. Scroll to the very bottom of the page to find the links.
+
+ .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
+
+ In addition to the PyPI downloads, the development version of ``setuptools``
+ is available from the `Python SVN sandbox`_, and in-development versions of the
+ `0.6 branch`_ are available as well.
+
+ .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+
+ .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+
+ --------------------------------
+ Using Setuptools and EasyInstall
+ --------------------------------
+
+ Here are some of the available manuals, tutorials, and other resources for
+ learning about Setuptools, Python Eggs, and EasyInstall:
+
+ * `The EasyInstall user's guide and reference manual`_
+ * `The setuptools Developer's Guide`_
+ * `The pkg_resources API reference`_
+ * `Package Compatibility Notes`_ (user-maintained)
+ * `The Internal Structure of Python Eggs`_
+
+ Questions, comments, and bug reports should be directed to the `distutils-sig
+ mailing list`_. If you have written (or know of) any tutorials, documentation,
+ plug-ins, or other resources for setuptools users, please let us know about
+ them there, so this reference list can be updated. If you have working,
+ *tested* patches to correct problems or add features, you may submit them to
+ the `setuptools bug tracker`_.
+
+ .. _setuptools bug tracker: http://bugs.python.org/setuptools/
+ .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
+ .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
+ .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
+ .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
+ .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+ .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
+
+
+ -------
+ Credits
+ -------
+
+ * The original design for the ``.egg`` format and the ``pkg_resources`` API was
+ co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first
+ version of ``pkg_resources``, and supplied the OS X operating system version
+ compatibility algorithm.
+
+ * Ian Bicking implemented many early "creature comfort" features of
+ easy_install, including support for downloading via Sourceforge and
+ Subversion repositories. Ian's comments on the Web-SIG about WSGI
+ application deployment also inspired the concept of "entry points" in eggs,
+ and he has given talks at PyCon and elsewhere to inform and educate the
+ community about eggs and setuptools.
+
+ * Jim Fulton contributed time and effort to build automated tests of various
+ aspects of ``easy_install``, and supplied the doctests for the command-line
+ ``.exe`` wrappers on Windows.
+
+ * Phillip J. Eby is the principal author and maintainer of setuptools, and
+ first proposed the idea of an importable binary distribution format for
+ Python application plug-ins.
+
+ * Significant parts of the implementation of setuptools were funded by the Open
+ Source Applications Foundation, to provide a plug-in infrastructure for the
+ Chandler PIM application. In addition, many OSAF staffers (such as Mike
+ "Code Bear" Taylor) contributed their time and stress as guinea pigs for the
+ use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!)
+
+
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2 b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
new file mode 100644
index 0000000..4b3906a
--- /dev/null
+++ b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
@@ -0,0 +1,183 @@
+Metadata-Version: 1.1
+Name: setuptools
+Version: 0.6c9
+Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
+Home-page: http://pypi.python.org/pypi/setuptools
+Author: Phillip J. Eby
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+ Installing and Using Setuptools
+ ===============================
+
+ .. contents:: **Table of Contents**
+
+
+ -------------------------
+ Installation Instructions
+ -------------------------
+
+ Windows
+ =======
+
+ Install setuptools using the provided ``.exe`` installer. If you've previously
+ installed older versions of setuptools, please delete all ``setuptools*.egg``
+ and ``setuptools.pth`` files from your system's ``site-packages`` directory
+ (and any other ``sys.path`` directories) FIRST.
+
+ If you are upgrading a previous version of setuptools that was installed using
+ an ``.exe`` installer, please be sure to also *uninstall that older version*
+ via your system's "Add/Remove Programs" feature, BEFORE installing the newer
+ version.
+
+ Once installation is complete, you will find an ``easy_install.exe`` program in
+ your Python ``Scripts`` subdirectory. Be sure to add this directory to your
+ ``PATH`` environment variable, if you haven't already done so.
+
+
+ RPM-Based Systems
+ =================
+
+ Install setuptools using the provided source RPM. The included ``.spec`` file
+ assumes you are installing using the default ``python`` executable, and is not
+ specific to a particular Python version. The ``easy_install`` executable will
+ be installed to a system ``bin`` directory such as ``/usr/bin``.
+
+ If you wish to install to a location other than the default Python
+ installation's default ``site-packages`` directory (and ``$prefix/bin`` for
+ scripts), please use the ``.egg``-based installation approach described in the
+ following section.
+
+
+ Cygwin, Mac OS X, Linux, Other
+ ==============================
+
+ 1. Download the appropriate egg for your version of Python (e.g.
+ ``setuptools-0.6c9-py2.4.egg``). Do NOT rename it.
+
+ 2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
+ Setuptools will install itself using the matching version of Python (e.g.
+ ``python2.4``), and will place the ``easy_install`` executable in the
+ default location for installing Python scripts (as determined by the
+ standard distutils configuration files, or by the Python installation).
+
+ If you want to install setuptools to somewhere other than ``site-packages`` or
+ your default distutils installation locations for libraries and scripts, you
+ may include EasyInstall command-line options such as ``--prefix``,
+ ``--install-dir``, and so on, following the ``.egg`` filename on the same
+ command line. For example::
+
+ sh setuptools-0.6c9-py2.4.egg --prefix=~
+
+ You can use ``--help`` to get a full options list, but we recommend consulting
+ the `EasyInstall manual`_ for detailed instructions, especially `the section
+ on custom installation locations`_.
+
+ .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+ .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
+
+
+ Cygwin Note
+ -----------
+
+ If you are trying to install setuptools for the **Windows** version of Python
+ (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
+ sure that an appropriate executable (``python2.3``, ``python2.4``, or
+ ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg. For
+ example, doing the following at a Cygwin bash prompt will install setuptools
+ for the **Windows** Python found at ``C:\\Python24``::
+
+ ln -s /cygdrive/c/Python24/python.exe python2.4
+ PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
+ rm python2.4
+
+
+ Downloads
+ =========
+
+ All setuptools downloads can be found at `the project's home page in the Python
+ Package Index`_. Scroll to the very bottom of the page to find the links.
+
+ .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
+
+ In addition to the PyPI downloads, the development version of ``setuptools``
+ is available from the `Python SVN sandbox`_, and in-development versions of the
+ `0.6 branch`_ are available as well.
+
+ .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+
+ .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+
+ --------------------------------
+ Using Setuptools and EasyInstall
+ --------------------------------
+
+ Here are some of the available manuals, tutorials, and other resources for
+ learning about Setuptools, Python Eggs, and EasyInstall:
+
+ * `The EasyInstall user's guide and reference manual`_
+ * `The setuptools Developer's Guide`_
+ * `The pkg_resources API reference`_
+ * `Package Compatibility Notes`_ (user-maintained)
+ * `The Internal Structure of Python Eggs`_
+
+ Questions, comments, and bug reports should be directed to the `distutils-sig
+ mailing list`_. If you have written (or know of) any tutorials, documentation,
+ plug-ins, or other resources for setuptools users, please let us know about
+ them there, so this reference list can be updated. If you have working,
+ *tested* patches to correct problems or add features, you may submit them to
+ the `setuptools bug tracker`_.
+
+ .. _setuptools bug tracker: http://bugs.python.org/setuptools/
+ .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
+ .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
+ .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
+ .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
+ .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+ .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
+
+
+ -------
+ Credits
+ -------
+
+ * The original design for the ``.egg`` format and the ``pkg_resources`` API was
+ co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first
+ version of ``pkg_resources``, and supplied the OS X operating system version
+ compatibility algorithm.
+
+ * Ian Bicking implemented many early "creature comfort" features of
+ easy_install, including support for downloading via Sourceforge and
+ Subversion repositories. Ian's comments on the Web-SIG about WSGI
+ application deployment also inspired the concept of "entry points" in eggs,
+ and he has given talks at PyCon and elsewhere to inform and educate the
+ community about eggs and setuptools.
+
+ * Jim Fulton contributed time and effort to build automated tests of various
+ aspects of ``easy_install``, and supplied the doctests for the command-line
+ ``.exe`` wrappers on Windows.
+
+ * Phillip J. Eby is the principal author and maintainer of setuptools, and
+ first proposed the idea of an importable binary distribution format for
+ Python application plug-ins.
+
+ * Significant parts of the implementation of setuptools were funded by the Open
+ Source Applications Foundation, to provide a plug-in infrastructure for the
+ Chandler PIM application. In addition, many OSAF staffers (such as Mike
+ "Code Bear" Taylor) contributed their time and stress as guinea pigs for the
+ use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!)
+
+
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
+Requires: Foo
diff --git a/Lib/packaging/tests/__init__.py b/Lib/packaging/tests/__init__.py
new file mode 100644
index 0000000..cb82004
--- /dev/null
+++ b/Lib/packaging/tests/__init__.py
@@ -0,0 +1,28 @@
+"""Test suite for packaging.
+
+This test suite consists of a collection of test modules in the
+packaging.tests package. Each test module has a name starting with
+'test' and contains a function test_suite(). The function is expected
+to return an initialized unittest.TestSuite instance.
+
+Utility code is included in packaging.tests.support.
+
+Always import unittest from this module: it will be unittest from the
+standard library for packaging tests and unittest2 for distutils2 tests.
+"""
+
+import os
+import sys
+import unittest
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ here = os.path.dirname(__file__) or os.curdir
+ for fn in os.listdir(here):
+ if fn.startswith("test") and fn.endswith(".py"):
+ modname = "packaging.tests." + fn[:-3]
+ __import__(modname)
+ module = sys.modules[modname]
+ suite.addTest(module.test_suite())
+ return suite
diff --git a/Lib/packaging/tests/__main__.py b/Lib/packaging/tests/__main__.py
new file mode 100644
index 0000000..00f323e
--- /dev/null
+++ b/Lib/packaging/tests/__main__.py
@@ -0,0 +1,24 @@
+"""Packaging test suite runner."""
+
+# Ripped from importlib tests, thanks Brett!
+
+import os
+import unittest
+from test.support import run_unittest, reap_children, reap_threads
+
+
+@reap_threads
+def test_main():
+ try:
+ start_dir = os.path.dirname(__file__)
+ top_dir = os.path.dirname(os.path.dirname(start_dir))
+ test_loader = unittest.TestLoader()
+ # XXX find out how to use unittest.main, to get command-line options
+ # (failfast, catch, etc.)
+ run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
+ finally:
+ reap_children()
+
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/email/test/__init__.py b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER
index e69de29..e69de29 100644
--- a/Lib/email/test/__init__.py
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA
new file mode 100644
index 0000000..65e839a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA
@@ -0,0 +1,4 @@
+Metadata-version: 1.2
+Name: babar
+Version: 0.1
+Author: FELD Boris \ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES
new file mode 100644
index 0000000..5d0da49
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES
@@ -0,0 +1,2 @@
+babar.png,babar.png
+babar.cfg,babar.cfg \ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar.cfg b/Lib/packaging/tests/fake_dists/babar.cfg
new file mode 100644
index 0000000..ecd6efe
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar.cfg
@@ -0,0 +1 @@
+Config \ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar.png b/Lib/packaging/tests/fake_dists/babar.png
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar.png
diff --git a/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
new file mode 100644
index 0000000..a176dfd
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
@@ -0,0 +1,6 @@
+Metadata-Version: 1.2
+Name: bacon
+Version: 0.1
+Provides-Dist: truffles (2.0)
+Provides-Dist: bacon (0.1)
+Obsoletes-Dist: truffles (>=0.9,<=1.5)
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO
new file mode 100644
index 0000000..a7e118a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO
@@ -0,0 +1,18 @@
+Metadata-Version: 1.0
+Name: banana
+Version: 0.4
+Summary: A yellow fruit
+Home-page: http://en.wikipedia.org/wiki/Banana
+Author: Josip Djolonga
+Author-email: foo@nbar.com
+License: BSD
+Description: A fruit
+Keywords: foo bar
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Scientific/Engineering :: GIS
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt
new file mode 100644
index 0000000..5d3e5f6
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt
@@ -0,0 +1,3 @@
+
+ # -*- Entry points: -*-
+ \ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt
new file mode 100644
index 0000000..4354305
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt
@@ -0,0 +1,6 @@
+# this should be ignored
+
+strawberry >=0.5
+
+[section ignored]
+foo ==0.5
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt
diff --git a/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info b/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
new file mode 100644
index 0000000..27cbe30
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: cheese
+Version: 2.0.2
+Provides-Dist: truffles (1.0.2)
+Obsoletes-Dist: truffles (!=1.2,<=2.0)
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
new file mode 100644
index 0000000..418929e
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
@@ -0,0 +1,9 @@
+Metadata-Version: 1.2
+Name: choxie
+Version: 2.0.0.9
+Summary: Chocolate with a kick!
+Requires-Dist: towel-stuff (0.1)
+Requires-Dist: nut
+Provides-Dist: truffles (1.0)
+Obsoletes-Dist: truffles (<=0.8,>=0.5)
+Obsoletes-Dist: truffles (<=0.9,>=0.6)
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
new file mode 100644
index 0000000..40a96af
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
new file mode 100644
index 0000000..c4027f3
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+from towel_stuff import Towel
+
+class Chocolate(object):
+ """A piece of chocolate."""
+
+ def wrap_with_towel(self):
+ towel = Towel()
+ towel.wrap(self)
+ return towel
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py
new file mode 100644
index 0000000..342b8ea
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py
@@ -0,0 +1,5 @@
+# -*- coding: utf-8 -*-
+from choxie.chocolate import Chocolate
+
+class Truffle(Chocolate):
+ """A truffle."""
diff --git a/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
new file mode 100644
index 0000000..499a083
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: coconuts-aster
+Version: 10.3
+Provides-Dist: strawberry (0.6)
+Provides-Dist: banana (0.4)
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
new file mode 100644
index 0000000..0b99f52
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: grammar
+Version: 1.0a4
+Requires-Dist: truffles (>=1.2)
+Author: Sherlock Holmes
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py
new file mode 100644
index 0000000..40a96af
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py
new file mode 100644
index 0000000..66ba796
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+from random import randint
+
+def is_valid_grammar(sentence):
+ if randint(0, 10) < 2:
+ return False
+ else:
+ return True
diff --git a/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info b/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info
new file mode 100644
index 0000000..0c58ec1
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info
@@ -0,0 +1,3 @@
+Metadata-Version: 1.2
+Name: nut
+Version: funkyversion
diff --git a/Lib/packaging/tests/fake_dists/strawberry-0.6.egg b/Lib/packaging/tests/fake_dists/strawberry-0.6.egg
new file mode 100644
index 0000000..6d160e8
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/strawberry-0.6.egg
Binary files differ
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
new file mode 100644
index 0000000..ca46d0a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
@@ -0,0 +1,7 @@
+Metadata-Version: 1.2
+Name: towel-stuff
+Version: 0.1
+Provides-Dist: truffles (1.1.2)
+Provides-Dist: towel-stuff (0.1)
+Obsoletes-Dist: truffles (!=0.8,<1.0)
+Requires-Dist: bacon (<=0.2)
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py b/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py
new file mode 100644
index 0000000..191f895
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+class Towel(object):
+ """A towel, that one should never be without."""
+
+ def __init__(self, color='tie-dye'):
+ self.color = color
+ self.wrapped_obj = None
+
+ def wrap(self, obj):
+ """Wrap an object up in our towel."""
+ self.wrapped_obj = obj
+
+ def unwrap(self):
+ """Unwrap whatever is in our towel and return whatever it is."""
+ obj = self.wrapped_obj
+ self.wrapped_obj = None
+ return obj
diff --git a/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info b/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info
new file mode 100644
index 0000000..45f0cf8
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info
@@ -0,0 +1,3 @@
+Metadata-Version: 1.2
+Name: truffles
+Version: 5.0
diff --git a/Lib/packaging/tests/fixer/__init__.py b/Lib/packaging/tests/fixer/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fixer/__init__.py
diff --git a/Lib/packaging/tests/fixer/fix_echo.py b/Lib/packaging/tests/fixer/fix_echo.py
new file mode 100644
index 0000000..8daae3e
--- /dev/null
+++ b/Lib/packaging/tests/fixer/fix_echo.py
@@ -0,0 +1,16 @@
+# Example custom fixer, derived from fix_raw_input by Andre Roberge
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name
+
+
+class FixEcho(fixer_base.BaseFix):
+
+ BM_compatible = True
+ PATTERN = """
+ power< name='echo' trailer< '(' [any] ')' > any* >
+ """
+
+ def transform(self, node, results):
+ name = results['name']
+ name.replace(Name('print', prefix=name.prefix))
diff --git a/Lib/packaging/tests/fixer/fix_echo2.py b/Lib/packaging/tests/fixer/fix_echo2.py
new file mode 100644
index 0000000..1b92891
--- /dev/null
+++ b/Lib/packaging/tests/fixer/fix_echo2.py
@@ -0,0 +1,16 @@
+# Example custom fixer, derived from fix_raw_input by Andre Roberge
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name
+
+
+class FixEcho2(fixer_base.BaseFix):
+
+ BM_compatible = True
+ PATTERN = """
+ power< name='echo2' trailer< '(' [any] ')' > any* >
+ """
+
+ def transform(self, node, results):
+ name = results['name']
+ name.replace(Name('print', prefix=name.prefix))
diff --git a/Lib/packaging/tests/pypi_server.py b/Lib/packaging/tests/pypi_server.py
new file mode 100644
index 0000000..13c30cf
--- /dev/null
+++ b/Lib/packaging/tests/pypi_server.py
@@ -0,0 +1,449 @@
+"""Mock PyPI Server implementation, to use in tests.
+
+This module also provides a simple test case to extend if you need to use
+the PyPIServer all along your test case. Be sure to read the documentation
+before any use.
+
+XXX TODO:
+
+The mock server can handle simple HTTP request (to simulate a simple index) or
+XMLRPC requests, over HTTP. Both does not have the same intergface to deal
+with, and I think it's a pain.
+
+A good idea could be to re-think a bit the way dstributions are handled in the
+mock server. As it should return malformed HTML pages, we need to keep the
+static behavior.
+
+I think of something like that:
+
+ >>> server = PyPIMockServer()
+ >>> server.startHTTP()
+ >>> server.startXMLRPC()
+
+Then, the server must have only one port to rely on, eg.
+
+ >>> server.fulladdress()
+ "http://ip:port/"
+
+It could be simple to have one HTTP server, relaying the requests to the two
+implementations (static HTTP and XMLRPC over HTTP).
+"""
+
+import os
+import queue
+import select
+import threading
+from functools import wraps
+from http.server import HTTPServer, SimpleHTTPRequestHandler
+from xmlrpc.server import SimpleXMLRPCServer
+
+from packaging.tests import unittest
+
+
+PYPI_DEFAULT_STATIC_PATH = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
+
+
+def use_xmlrpc_server(*server_args, **server_kwargs):
+ server_kwargs['serve_xmlrpc'] = True
+ return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_http_server(*server_args, **server_kwargs):
+ server_kwargs['serve_xmlrpc'] = False
+ return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_pypi_server(*server_args, **server_kwargs):
+ """Decorator to make use of the PyPIServer for test methods,
+ just when needed, and not for the entire duration of the testcase.
+ """
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(*args, **kwargs):
+ server = PyPIServer(*server_args, **server_kwargs)
+ server.start()
+ try:
+ func(server=server, *args, **kwargs)
+ finally:
+ server.stop()
+ return wrapped
+ return wrapper
+
+
+class PyPIServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ super(PyPIServerTestCase, self).setUp()
+ self.pypi = PyPIServer()
+ self.pypi.start()
+ self.addCleanup(self.pypi.stop)
+
+
+class PyPIServer(threading.Thread):
+ """PyPI Mocked server.
+ Provides a mocked version of the PyPI API's, to ease tests.
+
+ Support serving static content and serving previously given text.
+ """
+
+ def __init__(self, test_static_path=None,
+ static_filesystem_paths=None,
+ static_uri_paths=["simple", "packages"], serve_xmlrpc=False):
+ """Initialize the server.
+
+ Default behavior is to start the HTTP server. You can either start the
+ xmlrpc server by setting xmlrpc to True. Caution: Only one server will
+ be started.
+
+ static_uri_paths and static_base_path are parameters used to provides
+ respectively the http_paths to serve statically, and where to find the
+ matching files on the filesystem.
+ """
+ # we want to launch the server in a new dedicated thread, to not freeze
+ # tests.
+ super(PyPIServer, self).__init__()
+ self._run = True
+ self._serve_xmlrpc = serve_xmlrpc
+ if static_filesystem_paths is None:
+ static_filesystem_paths = ["default"]
+
+ #TODO allow to serve XMLRPC and HTTP static files at the same time.
+ if not self._serve_xmlrpc:
+ self.server = HTTPServer(('127.0.0.1', 0), PyPIRequestHandler)
+ self.server.RequestHandlerClass.pypi_server = self
+
+ self.request_queue = queue.Queue()
+ self._requests = []
+ self.default_response_status = 404
+ self.default_response_headers = [('Content-type', 'text/plain')]
+ self.default_response_data = "The page does not exists"
+
+ # initialize static paths / filesystems
+ self.static_uri_paths = static_uri_paths
+
+ # append the static paths defined locally
+ if test_static_path is not None:
+ static_filesystem_paths.append(test_static_path)
+ self.static_filesystem_paths = [
+ PYPI_DEFAULT_STATIC_PATH + "/" + path
+ for path in static_filesystem_paths]
+ else:
+ # XMLRPC server
+ self.server = PyPIXMLRPCServer(('127.0.0.1', 0))
+ self.xmlrpc = XMLRPCMockIndex()
+ # register the xmlrpc methods
+ self.server.register_introspection_functions()
+ self.server.register_instance(self.xmlrpc)
+
+ self.address = ('127.0.0.1', self.server.server_port)
+ # to not have unwanted outputs.
+ self.server.RequestHandlerClass.log_request = lambda *_: None
+
+ def run(self):
+ # loop because we can't stop it otherwise, for python < 2.6
+ while self._run:
+ r, w, e = select.select([self.server], [], [], 0.5)
+ if r:
+ self.server.handle_request()
+
+ def stop(self):
+ """self shutdown is not supported for python < 2.6"""
+ self._run = False
+ if self.is_alive():
+ self.join()
+ self.server.server_close()
+
+ def get_next_response(self):
+ return (self.default_response_status,
+ self.default_response_headers,
+ self.default_response_data)
+
+ @property
+ def requests(self):
+ """Use this property to get all requests that have been made
+ to the server
+ """
+ while True:
+ try:
+ self._requests.append(self.request_queue.get_nowait())
+ except queue.Empty:
+ break
+ return self._requests
+
+ @property
+ def full_address(self):
+ return "http://%s:%s" % self.address
+
+
+class PyPIRequestHandler(SimpleHTTPRequestHandler):
+ # we need to access the pypi server while serving the content
+ pypi_server = None
+
+ def serve_request(self):
+ """Serve the content.
+
+ Also record the requests to be accessed later. If trying to access an
+ url matching a static uri, serve static content, otherwise serve
+ what is provided by the `get_next_response` method.
+
+ If nothing is defined there, return a 404 header.
+ """
+ # record the request. Read the input only on PUT or POST requests
+ if self.command in ("PUT", "POST"):
+ if 'content-length' in self.headers:
+ request_data = self.rfile.read(
+ int(self.headers['content-length']))
+ else:
+ request_data = self.rfile.read()
+
+ elif self.command in ("GET", "DELETE"):
+ request_data = ''
+
+ self.pypi_server.request_queue.put((self, request_data))
+
+ # serve the content from local disc if we request an URL beginning
+ # by a pattern defined in `static_paths`
+ url_parts = self.path.split("/")
+ if (len(url_parts) > 1 and
+ url_parts[1] in self.pypi_server.static_uri_paths):
+ data = None
+ # always take the last first.
+ fs_paths = []
+ fs_paths.extend(self.pypi_server.static_filesystem_paths)
+ fs_paths.reverse()
+ relative_path = self.path
+ for fs_path in fs_paths:
+ try:
+ if self.path.endswith("/"):
+ relative_path += "index.html"
+
+ if relative_path.endswith('.tar.gz'):
+ with open(fs_path + relative_path, 'rb') as file:
+ data = file.read()
+ headers = [('Content-type', 'application/x-gtar')]
+ else:
+ with open(fs_path + relative_path) as file:
+ data = file.read().encode()
+ headers = [('Content-type', 'text/html')]
+
+ headers.append(('Content-Length', len(data)))
+ self.make_response(data, headers=headers)
+
+ except IOError:
+ pass
+
+ if data is None:
+ self.make_response("Not found", 404)
+
+ # otherwise serve the content from get_next_response
+ else:
+ # send back a response
+ status, headers, data = self.pypi_server.get_next_response()
+ self.make_response(data, status, headers)
+
+ do_POST = do_GET = do_DELETE = do_PUT = serve_request
+
+ def make_response(self, data, status=200,
+ headers=[('Content-type', 'text/html')]):
+ """Send the response to the HTTP client"""
+ if not isinstance(status, int):
+ try:
+ status = int(status)
+ except ValueError:
+ # we probably got something like YYY Codename.
+ # Just get the first 3 digits
+ status = int(status[:3])
+
+ self.send_response(status)
+ for header, value in headers:
+ self.send_header(header, value)
+ self.end_headers()
+
+ if isinstance(data, str):
+ data = data.encode('utf-8')
+
+ self.wfile.write(data)
+
+
+class PyPIXMLRPCServer(SimpleXMLRPCServer):
+ def server_bind(self):
+ """Override server_bind to store the server name."""
+ super(PyPIXMLRPCServer, self).server_bind()
+ host, port = self.socket.getsockname()[:2]
+ self.server_port = port
+
+
+class MockDist:
+ """Fake distribution, used in the Mock PyPI Server"""
+
+ def __init__(self, name, version="1.0", hidden=False, url="http://url/",
+ type="sdist", filename="", size=10000,
+ digest="123456", downloads=7, has_sig=False,
+ python_version="source", comment="comment",
+ author="John Doe", author_email="john@doe.name",
+ maintainer="Main Tayner", maintainer_email="maintainer_mail",
+ project_url="http://project_url/", homepage="http://homepage/",
+ keywords="", platform="UNKNOWN", classifiers=[], licence="",
+ description="Description", summary="Summary", stable_version="",
+ ordering="", documentation_id="", code_kwalitee_id="",
+ installability_id="", obsoletes=[], obsoletes_dist=[],
+ provides=[], provides_dist=[], requires=[], requires_dist=[],
+ requires_external=[], requires_python=""):
+
+ # basic fields
+ self.name = name
+ self.version = version
+ self.hidden = hidden
+
+ # URL infos
+ self.url = url
+ self.digest = digest
+ self.downloads = downloads
+ self.has_sig = has_sig
+ self.python_version = python_version
+ self.comment = comment
+ self.type = type
+
+ # metadata
+ self.author = author
+ self.author_email = author_email
+ self.maintainer = maintainer
+ self.maintainer_email = maintainer_email
+ self.project_url = project_url
+ self.homepage = homepage
+ self.keywords = keywords
+ self.platform = platform
+ self.classifiers = classifiers
+ self.licence = licence
+ self.description = description
+ self.summary = summary
+ self.stable_version = stable_version
+ self.ordering = ordering
+ self.cheesecake_documentation_id = documentation_id
+ self.cheesecake_code_kwalitee_id = code_kwalitee_id
+ self.cheesecake_installability_id = installability_id
+
+ self.obsoletes = obsoletes
+ self.obsoletes_dist = obsoletes_dist
+ self.provides = provides
+ self.provides_dist = provides_dist
+ self.requires = requires
+ self.requires_dist = requires_dist
+ self.requires_external = requires_external
+ self.requires_python = requires_python
+
+ def url_infos(self):
+ return {
+ 'url': self.url,
+ 'packagetype': self.type,
+ 'filename': 'filename.tar.gz',
+ 'size': '6000',
+ 'md5_digest': self.digest,
+ 'downloads': self.downloads,
+ 'has_sig': self.has_sig,
+ 'python_version': self.python_version,
+ 'comment_text': self.comment,
+ }
+
+ def metadata(self):
+ return {
+ 'maintainer': self.maintainer,
+ 'project_url': [self.project_url],
+ 'maintainer_email': self.maintainer_email,
+ 'cheesecake_code_kwalitee_id': self.cheesecake_code_kwalitee_id,
+ 'keywords': self.keywords,
+ 'obsoletes_dist': self.obsoletes_dist,
+ 'requires_external': self.requires_external,
+ 'author': self.author,
+ 'author_email': self.author_email,
+ 'download_url': self.url,
+ 'platform': self.platform,
+ 'version': self.version,
+ 'obsoletes': self.obsoletes,
+ 'provides': self.provides,
+ 'cheesecake_documentation_id': self.cheesecake_documentation_id,
+ '_pypi_hidden': self.hidden,
+ 'description': self.description,
+ '_pypi_ordering': 19,
+ 'requires_dist': self.requires_dist,
+ 'requires_python': self.requires_python,
+ 'classifiers': [],
+ 'name': self.name,
+ 'licence': self.licence, # XXX licence or license?
+ 'summary': self.summary,
+ 'home_page': self.homepage,
+ 'stable_version': self.stable_version,
+ # FIXME doesn't that reproduce the bug from 6527d3106e9f?
+ 'provides_dist': (self.provides_dist or
+ "%s (%s)" % (self.name, self.version)),
+ 'requires': self.requires,
+ 'cheesecake_installability_id': self.cheesecake_installability_id,
+ }
+
+ def search_result(self):
+ return {
+ '_pypi_ordering': 0,
+ 'version': self.version,
+ 'name': self.name,
+ 'summary': self.summary,
+ }
+
+
+class XMLRPCMockIndex:
+ """Mock XMLRPC server"""
+
+ def __init__(self, dists=[]):
+ self._dists = dists
+ self._search_result = []
+
+ def add_distributions(self, dists):
+ for dist in dists:
+ self._dists.append(MockDist(**dist))
+
+ def set_distributions(self, dists):
+ self._dists = []
+ self.add_distributions(dists)
+
+ def set_search_result(self, result):
+ """set a predefined search result"""
+ self._search_result = result
+
+ def _get_search_results(self):
+ results = []
+ for name in self._search_result:
+ found_dist = [d for d in self._dists if d.name == name]
+ if found_dist:
+ results.append(found_dist[0])
+ else:
+ dist = MockDist(name)
+ results.append(dist)
+ self._dists.append(dist)
+ return [r.search_result() for r in results]
+
+ def list_packages(self):
+ return [d.name for d in self._dists]
+
+ def package_releases(self, package_name, show_hidden=False):
+ if show_hidden:
+ # return all
+ return [d.version for d in self._dists if d.name == package_name]
+ else:
+ # return only un-hidden
+ return [d.version for d in self._dists if d.name == package_name
+ and not d.hidden]
+
+ def release_urls(self, package_name, version):
+ return [d.url_infos() for d in self._dists
+ if d.name == package_name and d.version == version]
+
+ def release_data(self, package_name, version):
+ release = [d for d in self._dists
+ if d.name == package_name and d.version == version]
+ if release:
+ return release[0].metadata()
+ else:
+ return {}
+
+ def search(self, spec, operator="and"):
+ return self._get_search_results()
diff --git a/Lib/packaging/tests/pypi_test_server.py b/Lib/packaging/tests/pypi_test_server.py
new file mode 100644
index 0000000..8c8c641
--- /dev/null
+++ b/Lib/packaging/tests/pypi_test_server.py
@@ -0,0 +1,59 @@
+"""Test PyPI Server implementation at testpypi.python.org, to use in tests.
+
+This is a drop-in replacement for the mock pypi server for testing against a
+real pypi server hosted by python.org especially for testing against.
+"""
+
+import unittest
+
+PYPI_DEFAULT_STATIC_PATH = None
+
+
+def use_xmlrpc_server(*server_args, **server_kwargs):
+ server_kwargs['serve_xmlrpc'] = True
+ return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_http_server(*server_args, **server_kwargs):
+ server_kwargs['serve_xmlrpc'] = False
+ return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_pypi_server(*server_args, **server_kwargs):
+ """Decorator to make use of the PyPIServer for test methods,
+ just when needed, and not for the entire duration of the testcase.
+ """
+ def wrapper(func):
+ def wrapped(*args, **kwargs):
+ server = PyPIServer(*server_args, **server_kwargs)
+ func(server=server, *args, **kwargs)
+ return wrapped
+ return wrapper
+
+
+class PyPIServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ super(PyPIServerTestCase, self).setUp()
+ self.pypi = PyPIServer()
+ self.pypi.start()
+ self.addCleanup(self.pypi.stop)
+
+
+class PyPIServer:
+ """Shim to access testpypi.python.org, for testing a real server."""
+
+ def __init__(self, test_static_path=None,
+ static_filesystem_paths=["default"],
+ static_uri_paths=["simple"], serve_xmlrpc=False):
+ self.address = ('testpypi.python.org', '80')
+
+ def start(self):
+ pass
+
+ def stop(self):
+ pass
+
+ @property
+ def full_address(self):
+ return "http://%s:%s" % self.address
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz
new file mode 100644
index 0000000..333961e
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz
Binary files differ
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html
new file mode 100644
index 0000000..b89f1bd
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="badmd5-0.1.tar.gz#md5=3e3d86693d6564c807272b11b3069dfe" rel="download">badmd5-0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html
new file mode 100644
index 0000000..9e42b16
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="foobar-0.1.tar.gz#md5=fe18804c5b722ff024cabdf514924fc4" rel="download">foobar-0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html
new file mode 100644
index 0000000..9baee04
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html
@@ -0,0 +1,2 @@
+<a href="foobar/">foobar/</a>
+<a href="badmd5/">badmd5/</a>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html
new file mode 100644
index 0000000..c3d42c5
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for bar</title></head><body><h1>Links for bar</h1>
+<a rel="download" href="../../packages/source/F/bar/bar-1.0.tar.gz">bar-1.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/bar/bar-1.0.1.tar.gz">bar-1.0.1.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/bar/bar-2.0.tar.gz">bar-2.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/bar/bar-2.0.1.tar.gz">bar-2.0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html
new file mode 100644
index 0000000..4f34312
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for baz</title></head><body><h1>Links for baz</h1>
+<a rel="download" href="../../packages/source/F/baz/baz-1.0.tar.gz">baz-1.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/baz/baz-1.0.1.tar.gz">baz-1.0.1.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/baz/baz-2.0.tar.gz">baz-2.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/baz/baz-2.0.1.tar.gz">baz-2.0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html
new file mode 100644
index 0000000..0565e11
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for foo</title></head><body><h1>Links for foo</h1>
+<a rel="download" href="../../packages/source/F/foo/foo-1.0.tar.gz">foo-1.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/foo/foo-1.0.1.tar.gz">foo-1.0.1.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/foo/foo-2.0.tar.gz">foo-2.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/foo/foo-2.0.1.tar.gz">foo-2.0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html
new file mode 100644
index 0000000..a70cfd3
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html
@@ -0,0 +1,3 @@
+<a href="foo/">foo/</a>
+<a href="bar/">bar/</a>
+<a href="baz/">baz/</a>
diff --git a/Lib/packaging/tests/pypiserver/project_list/simple/index.html b/Lib/packaging/tests/pypiserver/project_list/simple/index.html
new file mode 100644
index 0000000..b36d728
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/project_list/simple/index.html
@@ -0,0 +1,5 @@
+<a class="test" href="yeah">FooBar-bar</a>
+<a class="test" href="yeah">Foobar-baz</a>
+<a class="test" href="yeah">Baz-FooBar</a>
+<a class="test" href="yeah">Baz</a>
+<a class="test" href="yeah">Foo</a>
diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html
new file mode 100644
index 0000000..a282a4e
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for Foobar</title></head><body><h1>Links for Foobar</h1>
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-1.0.tar.gz#md5=98fa833fdabcdd78d00245aead66c174">Foobar-1.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-1.0.1.tar.gz#md5=2351efb20f6b7b5d9ce80fa4cb1bd9ca">Foobar-1.0.1.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-2.0.tar.gz#md5=98fa833fdabcdd78d00245aead66c274">Foobar-2.0.tar.gz</a><br/>
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-2.0.1.tar.gz#md5=2352efb20f6b7b5d9ce80fa4cb2bd9ca">Foobar-2.0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a>
diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html
new file mode 100644
index 0000000..265ee0a
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html
@@ -0,0 +1 @@
+index.html from external server
diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html
new file mode 100644
index 0000000..6f97667
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html
@@ -0,0 +1 @@
+Yeah
diff --git a/Lib/packaging/tests/pypiserver/with_externals/external/external.html b/Lib/packaging/tests/pypiserver/with_externals/external/external.html
new file mode 100644
index 0000000..92e4702
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/external/external.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html
new file mode 100644
index 0000000..b100a26
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a rel ="download" href="/foobar-0.1.tar.gz#md5=12345678901234567">foobar-0.1.tar.gz</a><br/>
+<a href="../../external/external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a>
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html
new file mode 100644
index 0000000..1cc0c32
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html
@@ -0,0 +1,7 @@
+<html>
+<body>
+<p>a rel=homepage HTML page</p>
+<a href="/foobar-2.0.tar.gz">foobar 2.0</a>
+</body>
+</html>
+
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html
new file mode 100644
index 0000000..f6ace22
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html
@@ -0,0 +1 @@
+A page linked without rel="download" or rel="homepage" link.
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html
new file mode 100644
index 0000000..171df93
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html
@@ -0,0 +1,6 @@
+<html><body>
+<a rel="download" href="/foobar-0.1.tar.gz" rel="download">foobar-0.1.tar.gz</a><br/>
+<a href="../../external/homepage.html" rel="homepage">external homepage</a><br/>
+<a href="../../external/nonrel.html">unrelated link</a><br/>
+<a href="/unrelated-0.2.tar.gz">unrelated download</a></br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a>
diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html
new file mode 100644
index 0000000..b2885ae
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a rel="download" href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
+<a href="http://a-really-external-website/external/external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a>
diff --git a/Lib/packaging/tests/support.py b/Lib/packaging/tests/support.py
new file mode 100644
index 0000000..d76d3db
--- /dev/null
+++ b/Lib/packaging/tests/support.py
@@ -0,0 +1,400 @@
+"""Support code for packaging test cases.
+
+*This module should not be considered public: its content and API may
+change in incompatible ways.*
+
+A few helper classes are provided: LoggingCatcher, TempdirManager and
+EnvironRestorer. They are written to be used as mixins::
+
+ from packaging.tests import unittest
+ from packaging.tests.support import LoggingCatcher
+
+ class SomeTestCase(LoggingCatcher, unittest.TestCase):
+ ...
+
+If you need to define a setUp method on your test class, you have to
+call the mixin class' setUp method or it won't work (same thing for
+tearDown):
+
+ def setUp(self):
+ super(SomeTestCase, self).setUp()
+ ... # other setup code
+
+Also provided is a DummyCommand class, useful to mock commands in the
+tests of another command that needs them, for example to fake
+compilation in build_ext (this requires that the mock build_ext command
+be injected into the distribution object's command_obj dictionary).
+
+For tests that need to compile an extension module, use the
+copy_xxmodule_c and fixup_build_ext functions.
+
+Each class or function has a docstring to explain its purpose and usage.
+Existing tests should also be used as examples.
+"""
+
+import os
+import sys
+import shutil
+import logging
+import weakref
+import tempfile
+import sysconfig
+
+from packaging.dist import Distribution
+from packaging.util import resolve_name
+from packaging.command import set_command, _COMMANDS
+
+from packaging.tests import unittest
+from test.support import requires_zlib, unlink
+
+# define __all__ to make pydoc more useful
+__all__ = [
+ # TestCase mixins
+ 'LoggingCatcher', 'TempdirManager', 'EnvironRestorer',
+ # mocks
+ 'DummyCommand', 'TestDistribution', 'Inputs',
+ # misc. functions and decorators
+ 'fake_dec', 'create_distribution', 'use_command',
+ 'copy_xxmodule_c', 'fixup_build_ext',
+ 'skip_2to3_optimize',
+ # imported from this module for backport purposes
+ 'unittest', 'requires_zlib', 'skip_unless_symlink',
+]
+
+
+logger = logging.getLogger('packaging')
+logger2to3 = logging.getLogger('RefactoringTool')
+
+
+class _TestHandler(logging.handlers.BufferingHandler):
+ # stolen and adapted from test.support
+
+ def __init__(self):
+ super(_TestHandler, self).__init__(0)
+ self.setLevel(logging.DEBUG)
+
+ def shouldFlush(self):
+ return False
+
+ def emit(self, record):
+ self.buffer.append(record)
+
+
+class LoggingCatcher:
+ """TestCase-compatible mixin to receive logging calls.
+
+ Upon setUp, instances of this classes get a BufferingHandler that's
+ configured to record all messages logged to the 'packaging' logger.
+
+ Use get_logs to retrieve messages and self.loghandler.flush to discard
+ them. get_logs automatically flushes the logs, unless you pass
+ *flush=False*, for example to make multiple calls to the method with
+ different level arguments. If your test calls some code that generates
+ logging message and then you don't call get_logs, you will need to flush
+ manually before testing other code in the same test_* method, otherwise
+ get_logs in the next lines will see messages from the previous lines.
+ See example in test_command_check.
+ """
+
+ def setUp(self):
+ super(LoggingCatcher, self).setUp()
+ self.loghandler = handler = _TestHandler()
+ self._old_levels = logger.level, logger2to3.level
+ logger.addHandler(handler)
+ logger.setLevel(logging.DEBUG) # we want all messages
+ logger2to3.setLevel(logging.CRITICAL) # we don't want 2to3 messages
+
+ def tearDown(self):
+ handler = self.loghandler
+ # All this is necessary to properly shut down the logging system and
+ # avoid a regrtest complaint. Thanks to Vinay Sajip for the help.
+ handler.close()
+ logger.removeHandler(handler)
+ for ref in weakref.getweakrefs(handler):
+ logging._removeHandlerRef(ref)
+ del self.loghandler
+ logger.setLevel(self._old_levels[0])
+ logger2to3.setLevel(self._old_levels[1])
+ super(LoggingCatcher, self).tearDown()
+
+ def get_logs(self, level=logging.WARNING, flush=True):
+ """Return all log messages with given level.
+
+ *level* defaults to logging.WARNING.
+
+ For log calls with arguments (i.e. logger.info('bla bla %r', arg)),
+ the messages will be formatted before being returned (e.g. "bla bla
+ 'thing'").
+
+ Returns a list. Automatically flushes the loghandler after being
+ called, unless *flush* is False (this is useful to get e.g. all
+ warnings then all info messages).
+ """
+ messages = [log.getMessage() for log in self.loghandler.buffer
+ if log.levelno == level]
+ if flush:
+ self.loghandler.flush()
+ return messages
+
+
+class TempdirManager:
+ """TestCase-compatible mixin to create temporary directories and files.
+
+ Directories and files created in a test_* method will be removed after it
+ has run.
+ """
+
+ def setUp(self):
+ super(TempdirManager, self).setUp()
+ self._olddir = os.getcwd()
+ self._basetempdir = tempfile.mkdtemp()
+ self._files = []
+
+ def tearDown(self):
+ for handle, name in self._files:
+ handle.close()
+ unlink(name)
+
+ os.chdir(self._olddir)
+ shutil.rmtree(self._basetempdir)
+ super(TempdirManager, self).tearDown()
+
+ def mktempfile(self):
+ """Create a read-write temporary file and return it."""
+ fd, fn = tempfile.mkstemp(dir=self._basetempdir)
+ os.close(fd)
+ fp = open(fn, 'w+')
+ self._files.append((fp, fn))
+ return fp
+
+ def mkdtemp(self):
+ """Create a temporary directory and return its path."""
+ d = tempfile.mkdtemp(dir=self._basetempdir)
+ return d
+
+ def write_file(self, path, content='xxx', encoding=None):
+ """Write a file at the given path.
+
+ path can be a string, a tuple or a list; if it's a tuple or list,
+ os.path.join will be used to produce a path.
+ """
+ if isinstance(path, (list, tuple)):
+ path = os.path.join(*path)
+ with open(path, 'w', encoding=encoding) as f:
+ f.write(content)
+
+ def create_dist(self, **kw):
+ """Create a stub distribution object and files.
+
+ This function creates a Distribution instance (use keyword arguments
+ to customize it) and a temporary directory with a project structure
+ (currently an empty directory).
+
+ It returns the path to the directory and the Distribution instance.
+ You can use self.write_file to write any file in that
+ directory, e.g. setup scripts or Python modules.
+ """
+ if 'name' not in kw:
+ kw['name'] = 'foo'
+ tmp_dir = self.mkdtemp()
+ project_dir = os.path.join(tmp_dir, kw['name'])
+ os.mkdir(project_dir)
+ dist = Distribution(attrs=kw)
+ return project_dir, dist
+
+ def assertIsFile(self, *args):
+ path = os.path.join(*args)
+ dirname = os.path.dirname(path)
+ file = os.path.basename(path)
+ if os.path.isdir(dirname):
+ files = os.listdir(dirname)
+ msg = "%s not found in %s: %s" % (file, dirname, files)
+ assert os.path.isfile(path), msg
+ else:
+ raise AssertionError(
+ '%s not found. %s does not exist' % (file, dirname))
+
+ def assertIsNotFile(self, *args):
+ path = os.path.join(*args)
+ self.assertFalse(os.path.isfile(path), "%r exists" % path)
+
+
+class EnvironRestorer:
+ """TestCase-compatible mixin to restore or delete environment variables.
+
+ The variables to restore (or delete if they were not originally present)
+ must be explicitly listed in self.restore_environ. It's better to be
+ aware of what we're modifying instead of saving and restoring the whole
+ environment.
+ """
+
+ def setUp(self):
+ super(EnvironRestorer, self).setUp()
+ self._saved = []
+ self._added = []
+ for key in self.restore_environ:
+ if key in os.environ:
+ self._saved.append((key, os.environ[key]))
+ else:
+ self._added.append(key)
+
+ def tearDown(self):
+ for key, value in self._saved:
+ os.environ[key] = value
+ for key in self._added:
+ os.environ.pop(key, None)
+ super(EnvironRestorer, self).tearDown()
+
+
+class DummyCommand:
+ """Class to store options for retrieval via set_undefined_options().
+
+ Useful for mocking one dependency command in the tests for another
+ command, see e.g. the dummy build command in test_build_scripts.
+ """
+ # XXX does not work with dist.reinitialize_command, which typechecks
+ # and wants a finalized attribute
+
+ def __init__(self, **kwargs):
+ for kw, val in kwargs.items():
+ setattr(self, kw, val)
+
+ def ensure_finalized(self):
+ pass
+
+
+class TestDistribution(Distribution):
+ """Distribution subclasses that avoids the default search for
+ configuration files.
+
+ The ._config_files attribute must be set before
+ .parse_config_files() is called.
+ """
+
+ def find_config_files(self):
+ return self._config_files
+
+
+class Inputs:
+ """Fakes user inputs."""
+ # TODO document usage
+ # TODO use context manager or something for auto cleanup
+
+ def __init__(self, *answers):
+ self.answers = answers
+ self.index = 0
+
+ def __call__(self, prompt=''):
+ try:
+ return self.answers[self.index]
+ finally:
+ self.index += 1
+
+
+def create_distribution(configfiles=()):
+ """Prepares a distribution with given config files parsed."""
+ d = TestDistribution()
+ d.config.find_config_files = d.find_config_files
+ d._config_files = configfiles
+ d.parse_config_files()
+ d.parse_command_line()
+ return d
+
+
+def use_command(testcase, fullname):
+ """Register command at *fullname* for the duration of a test."""
+ set_command(fullname)
+ # XXX maybe set_command should return the class object
+ name = resolve_name(fullname).get_command_name()
+ # XXX maybe we need a public API to remove commands
+ testcase.addCleanup(_COMMANDS.__delitem__, name)
+
+
+def fake_dec(*args, **kw):
+ """Fake decorator"""
+ def _wrap(func):
+ def __wrap(*args, **kw):
+ return func(*args, **kw)
+ return __wrap
+ return _wrap
+
+
+def copy_xxmodule_c(directory):
+ """Helper for tests that need the xxmodule.c source file.
+
+ Example use:
+
+ def test_compile(self):
+ copy_xxmodule_c(self.tmpdir)
+ self.assertIn('xxmodule.c', os.listdir(self.tmpdir))
+
+ If the source file can be found, it will be copied to *directory*. If not,
+ the test will be skipped. Errors during copy are not caught.
+ """
+ filename = _get_xxmodule_path()
+ if filename is None:
+ raise unittest.SkipTest('cannot find xxmodule.c')
+ shutil.copy(filename, directory)
+
+
+def _get_xxmodule_path():
+ if sysconfig.is_python_build():
+ srcdir = sysconfig.get_config_var('projectbase')
+ path = os.path.join(os.getcwd(), srcdir, 'Modules', 'xxmodule.c')
+ else:
+ path = os.path.join(os.path.dirname(__file__), 'xxmodule.c')
+ if os.path.exists(path):
+ return path
+
+
+def fixup_build_ext(cmd):
+ """Function needed to make build_ext tests pass.
+
+ When Python was built with --enable-shared on Unix, -L. is not enough to
+ find libpython<blah>.so, because regrtest runs in a tempdir, not in the
+ source directory where the .so lives. (Mac OS X embeds absolute paths
+ to shared libraries into executables, so the fixup is a no-op on that
+ platform.)
+
+ When Python was built with in debug mode on Windows, build_ext commands
+ need their debug attribute set, and it is not done automatically for
+ some reason.
+
+ This function handles both of these things, and also fixes
+ cmd.distribution.include_dirs if the running Python is an uninstalled
+ build. Example use:
+
+ cmd = build_ext(dist)
+ support.fixup_build_ext(cmd)
+ cmd.ensure_finalized()
+ """
+ if os.name == 'nt':
+ cmd.debug = sys.executable.endswith('_d.exe')
+ elif sysconfig.get_config_var('Py_ENABLE_SHARED'):
+ # To further add to the shared builds fun on Unix, we can't just add
+ # library_dirs to the Extension() instance because that doesn't get
+ # plumbed through to the final compiler command.
+ runshared = sysconfig.get_config_var('RUNSHARED')
+ if runshared is None:
+ cmd.library_dirs = ['.']
+ else:
+ if sys.platform == 'darwin':
+ cmd.library_dirs = []
+ else:
+ name, equals, value = runshared.partition('=')
+ cmd.library_dirs = value.split(os.pathsep)
+
+ # Allow tests to run with an uninstalled Python
+ if sysconfig.is_python_build():
+ pysrcdir = sysconfig.get_config_var('projectbase')
+ cmd.distribution.include_dirs.append(os.path.join(pysrcdir, 'Include'))
+
+
+try:
+ from test.support import skip_unless_symlink
+except ImportError:
+ skip_unless_symlink = unittest.skip(
+ 'requires test.support.skip_unless_symlink')
+
+skip_2to3_optimize = unittest.skipIf(sys.flags.optimize,
+ "2to3 doesn't work under -O")
diff --git a/Lib/packaging/tests/test_ccompiler.py b/Lib/packaging/tests/test_ccompiler.py
new file mode 100644
index 0000000..dd4bdd9
--- /dev/null
+++ b/Lib/packaging/tests/test_ccompiler.py
@@ -0,0 +1,15 @@
+"""Tests for distutils.compiler.ccompiler."""
+
+from packaging.compiler import ccompiler
+from packaging.tests import unittest, support
+
+
+class CCompilerTestCase(unittest.TestCase):
+ pass # XXX need some tests on CCompiler
+
+
+def test_suite():
+ return unittest.makeSuite(CCompilerTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_bdist.py b/Lib/packaging/tests/test_command_bdist.py
new file mode 100644
index 0000000..7b2ea01
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist.py
@@ -0,0 +1,61 @@
+"""Tests for distutils.command.bdist."""
+import os
+from test.support import captured_stdout
+from packaging.command.bdist import bdist, show_formats
+from packaging.tests import unittest, support
+
+
+class BuildTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_formats(self):
+ # let's create a command and make sure
+ # we can set the format
+ dist = self.create_dist()[1]
+ cmd = bdist(dist)
+ cmd.formats = ['msi']
+ cmd.ensure_finalized()
+ self.assertEqual(cmd.formats, ['msi'])
+
+ # what formats does bdist offer?
+ # XXX hard-coded lists are not the best way to find available bdist_*
+ # commands; we should add a registry
+ formats = ['bztar', 'gztar', 'msi', 'tar', 'wininst', 'zip']
+ found = sorted(cmd.format_command)
+ self.assertEqual(found, formats)
+
+ def test_skip_build(self):
+ # bug #10946: bdist --skip-build should trickle down to subcommands
+ dist = self.create_dist()[1]
+ cmd = bdist(dist)
+ cmd.skip_build = True
+ cmd.ensure_finalized()
+ dist.command_obj['bdist'] = cmd
+
+ names = ['bdist_dumb', 'bdist_wininst']
+ if os.name == 'nt':
+ names.append('bdist_msi')
+
+ for name in names:
+ subcmd = cmd.get_finalized_command(name)
+ self.assertTrue(subcmd.skip_build,
+ '%s should take --skip-build from bdist' % name)
+
+ def test_show_formats(self):
+ with captured_stdout() as stdout:
+ show_formats()
+ stdout = stdout.getvalue()
+
+ # the output should be a header line + one line per format
+ num_formats = len(bdist.format_commands)
+ output = [line for line in stdout.split('\n')
+ if line.strip().startswith('--formats=')]
+ self.assertEqual(len(output), num_formats)
+
+
+def test_suite():
+ return unittest.makeSuite(BuildTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_dumb.py b/Lib/packaging/tests/test_command_bdist_dumb.py
new file mode 100644
index 0000000..15cf658
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_dumb.py
@@ -0,0 +1,91 @@
+"""Tests for distutils.command.bdist_dumb."""
+
+import os
+import imp
+import sys
+import zipfile
+import packaging.util
+
+from packaging.dist import Distribution
+from packaging.command.bdist_dumb import bdist_dumb
+from packaging.tests import unittest, support
+from packaging.tests.support import requires_zlib
+
+
+class BuildDumbTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(BuildDumbTestCase, self).setUp()
+ self.old_location = os.getcwd()
+
+ def tearDown(self):
+ os.chdir(self.old_location)
+ packaging.util._path_created.clear()
+ super(BuildDumbTestCase, self).tearDown()
+
+ @requires_zlib
+ def test_simple_built(self):
+
+ # let's create a simple package
+ tmp_dir = self.mkdtemp()
+ pkg_dir = os.path.join(tmp_dir, 'foo')
+ os.mkdir(pkg_dir)
+ self.write_file((pkg_dir, 'foo.py'), '#')
+ self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
+ self.write_file((pkg_dir, 'README'), '')
+
+ dist = Distribution({'name': 'foo', 'version': '0.1',
+ 'py_modules': ['foo'],
+ 'home_page': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx'})
+ os.chdir(pkg_dir)
+ cmd = bdist_dumb(dist)
+
+ # so the output is the same no matter
+ # what is the platform
+ cmd.format = 'zip'
+
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # see what we have
+ dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
+ base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name)
+ if os.name == 'os2':
+ base = base.replace(':', '-')
+
+ self.assertEqual(dist_created, [base])
+
+ # now let's check what we have in the zip file
+ with zipfile.ZipFile(os.path.join('dist', base)) as fp:
+ contents = fp.namelist()
+
+ contents = sorted(os.path.basename(fn) for fn in contents)
+ wanted = ['foo.py',
+ 'foo.%s.pyc' % imp.get_tag(),
+ 'METADATA', 'INSTALLER', 'REQUESTED', 'RECORD']
+ self.assertEqual(contents, sorted(wanted))
+
+ def test_finalize_options(self):
+ pkg_dir, dist = self.create_dist()
+ os.chdir(pkg_dir)
+ cmd = bdist_dumb(dist)
+ self.assertEqual(cmd.bdist_dir, None)
+ cmd.finalize_options()
+
+ # bdist_dir is initialized to bdist_base/dumb if not set
+ base = cmd.get_finalized_command('bdist').bdist_base
+ self.assertEqual(cmd.bdist_dir, os.path.join(base, 'dumb'))
+
+ # the format is set to a default value depending on the os.name
+ default = cmd.default_format[os.name]
+ self.assertEqual(cmd.format, default)
+
+
+def test_suite():
+ return unittest.makeSuite(BuildDumbTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_msi.py b/Lib/packaging/tests/test_command_bdist_msi.py
new file mode 100644
index 0000000..86754a8
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_msi.py
@@ -0,0 +1,25 @@
+"""Tests for distutils.command.bdist_msi."""
+import sys
+
+from packaging.tests import unittest, support
+
+
+@unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows')
+class BDistMSITestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_minimal(self):
+ # minimal test XXX need more tests
+ from packaging.command.bdist_msi import bdist_msi
+ project_dir, dist = self.create_dist()
+ cmd = bdist_msi(dist)
+ cmd.ensure_finalized()
+
+
+def test_suite():
+ return unittest.makeSuite(BDistMSITestCase)
+
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_wininst.py b/Lib/packaging/tests/test_command_bdist_wininst.py
new file mode 100644
index 0000000..09bdaad
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_wininst.py
@@ -0,0 +1,32 @@
+"""Tests for distutils.command.bdist_wininst."""
+
+from packaging.command.bdist_wininst import bdist_wininst
+from packaging.tests import unittest, support
+
+
+class BuildWinInstTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_get_exe_bytes(self):
+
+ # issue5731: command was broken on non-windows platforms
+ # this test makes sure it works now for every platform
+ # let's create a command
+ pkg_pth, dist = self.create_dist()
+ cmd = bdist_wininst(dist)
+ cmd.ensure_finalized()
+
+ # let's run the code that finds the right wininst*.exe file
+ # and make sure it finds it and returns its content
+ # no matter what platform we have
+ exe_file = cmd.get_exe_bytes()
+ self.assertGreater(len(exe_file), 10)
+
+
+def test_suite():
+ return unittest.makeSuite(BuildWinInstTestCase)
+
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_build.py b/Lib/packaging/tests/test_command_build.py
new file mode 100644
index 0000000..280d709
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build.py
@@ -0,0 +1,56 @@
+"""Tests for distutils.command.build."""
+import os
+import sys
+
+from packaging.command.build import build
+from sysconfig import get_platform
+from packaging.tests import unittest, support
+
+
+class BuildTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_finalize_options(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build(dist)
+ cmd.finalize_options()
+
+ # if not specified, plat_name gets the current platform
+ self.assertEqual(cmd.plat_name, get_platform())
+
+ # build_purelib is build + lib
+ wanted = os.path.join(cmd.build_base, 'lib')
+ self.assertEqual(cmd.build_purelib, wanted)
+
+ # build_platlib is 'build/lib.platform-x.x[-pydebug]'
+ # examples:
+ # build/lib.macosx-10.3-i386-2.7
+ pyversion = '%s.%s' % sys.version_info[:2]
+ plat_spec = '.%s-%s' % (cmd.plat_name, pyversion)
+ if hasattr(sys, 'gettotalrefcount'):
+ self.assertTrue(cmd.build_platlib.endswith('-pydebug'))
+ plat_spec += '-pydebug'
+ wanted = os.path.join(cmd.build_base, 'lib' + plat_spec)
+ self.assertEqual(cmd.build_platlib, wanted)
+
+ # by default, build_lib = build_purelib
+ self.assertEqual(cmd.build_lib, cmd.build_purelib)
+
+ # build_temp is build/temp.<plat>
+ wanted = os.path.join(cmd.build_base, 'temp' + plat_spec)
+ self.assertEqual(cmd.build_temp, wanted)
+
+ # build_scripts is build/scripts-x.x
+ wanted = os.path.join(cmd.build_base, 'scripts-' + pyversion)
+ self.assertEqual(cmd.build_scripts, wanted)
+
+ # executable is os.path.normpath(sys.executable)
+ self.assertEqual(cmd.executable, os.path.normpath(sys.executable))
+
+
+def test_suite():
+ return unittest.makeSuite(BuildTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_clib.py b/Lib/packaging/tests/test_command_build_clib.py
new file mode 100644
index 0000000..a2a8583
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_clib.py
@@ -0,0 +1,141 @@
+"""Tests for distutils.command.build_clib."""
+import os
+import sys
+
+from packaging.util import find_executable
+from packaging.command.build_clib import build_clib
+from packaging.errors import PackagingSetupError
+from packaging.tests import unittest, support
+
+
+class BuildCLibTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_check_library_dist(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ # 'libraries' option must be a list
+ self.assertRaises(PackagingSetupError, cmd.check_library_list, 'foo')
+
+ # each element of 'libraries' must a 2-tuple
+ self.assertRaises(PackagingSetupError, cmd.check_library_list,
+ ['foo1', 'foo2'])
+
+ # first element of each tuple in 'libraries'
+ # must be a string (the library name)
+ self.assertRaises(PackagingSetupError, cmd.check_library_list,
+ [(1, 'foo1'), ('name', 'foo2')])
+
+ # library name may not contain directory separators
+ self.assertRaises(PackagingSetupError, cmd.check_library_list,
+ [('name', 'foo1'),
+ ('another/name', 'foo2')])
+
+ # second element of each tuple must be a dictionary (build info)
+ self.assertRaises(PackagingSetupError, cmd.check_library_list,
+ [('name', {}),
+ ('another', 'foo2')])
+
+ # those work
+ libs = [('name', {}), ('name', {'ok': 'good'})]
+ cmd.check_library_list(libs)
+
+ def test_get_source_files(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ # "in 'libraries' option 'sources' must be present and must be
+ # a list of source filenames
+ cmd.libraries = [('name', {})]
+ self.assertRaises(PackagingSetupError, cmd.get_source_files)
+
+ cmd.libraries = [('name', {'sources': 1})]
+ self.assertRaises(PackagingSetupError, cmd.get_source_files)
+
+ cmd.libraries = [('name', {'sources': ['a', 'b']})]
+ self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+ cmd.libraries = [('name', {'sources': ('a', 'b')})]
+ self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+ cmd.libraries = [('name', {'sources': ('a', 'b')}),
+ ('name2', {'sources': ['c', 'd']})]
+ self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd'])
+
+ def test_build_libraries(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ class FakeCompiler:
+ def compile(*args, **kw):
+ pass
+ create_static_lib = compile
+
+ cmd.compiler = FakeCompiler()
+
+ # build_libraries is also doing a bit of type checking
+ lib = [('name', {'sources': 'notvalid'})]
+ self.assertRaises(PackagingSetupError, cmd.build_libraries, lib)
+
+ lib = [('name', {'sources': []})]
+ cmd.build_libraries(lib)
+
+ lib = [('name', {'sources': ()})]
+ cmd.build_libraries(lib)
+
+ def test_finalize_options(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ cmd.include_dirs = 'one-dir'
+ cmd.finalize_options()
+ self.assertEqual(cmd.include_dirs, ['one-dir'])
+
+ cmd.include_dirs = None
+ cmd.finalize_options()
+ self.assertEqual(cmd.include_dirs, [])
+
+ cmd.distribution.libraries = 'WONTWORK'
+ self.assertRaises(PackagingSetupError, cmd.finalize_options)
+
+ @unittest.skipIf(sys.platform == 'win32', 'disabled on win32')
+ def test_run(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ foo_c = os.path.join(pkg_dir, 'foo.c')
+ self.write_file(foo_c, 'int main(void) { return 1;}\n')
+ cmd.libraries = [('foo', {'sources': [foo_c]})]
+
+ build_temp = os.path.join(pkg_dir, 'build')
+ os.mkdir(build_temp)
+ cmd.build_temp = build_temp
+ cmd.build_clib = build_temp
+
+ # before we run the command, we want to make sure
+ # all commands are present on the system
+ # by creating a compiler and checking its executables
+ from packaging.compiler import new_compiler, customize_compiler
+
+ compiler = new_compiler()
+ customize_compiler(compiler)
+ for ccmd in compiler.executables.values():
+ if ccmd is None:
+ continue
+ if find_executable(ccmd[0]) is None:
+ raise unittest.SkipTest("can't test")
+
+ # this should work
+ cmd.run()
+
+ # let's check the result
+ self.assertIn('libfoo.a', os.listdir(build_temp))
+
+
+def test_suite():
+ return unittest.makeSuite(BuildCLibTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_ext.py b/Lib/packaging/tests/test_command_build_ext.py
new file mode 100644
index 0000000..9a00c11
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_ext.py
@@ -0,0 +1,394 @@
+import os
+import sys
+import site
+import sysconfig
+import textwrap
+from packaging.dist import Distribution
+from packaging.errors import (UnknownFileError, CompileError,
+ PackagingPlatformError)
+from packaging.command.build_ext import build_ext
+from packaging.compiler.extension import Extension
+
+from test.script_helper import assert_python_ok
+from packaging.tests import support, unittest
+
+
+class BuildExtTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+ def setUp(self):
+ super(BuildExtTestCase, self).setUp()
+ self.tmp_dir = self.mkdtemp()
+ self.old_user_base = site.USER_BASE
+ site.USER_BASE = self.mkdtemp()
+
+ def tearDown(self):
+ site.USER_BASE = self.old_user_base
+ super(BuildExtTestCase, self).tearDown()
+
+ def test_build_ext(self):
+ support.copy_xxmodule_c(self.tmp_dir)
+ xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
+ xx_ext = Extension('xx', [xx_c])
+ dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
+ dist.package_dir = self.tmp_dir
+ cmd = build_ext(dist)
+ support.fixup_build_ext(cmd)
+ cmd.build_lib = self.tmp_dir
+ cmd.build_temp = self.tmp_dir
+ cmd.ensure_finalized()
+ cmd.run()
+
+ code = textwrap.dedent("""\
+ import sys
+ sys.path.insert(0, %r)
+
+ import xx
+
+ for attr in ('error', 'foo', 'new', 'roj'):
+ assert hasattr(xx, attr)
+
+ assert xx.foo(2, 5) == 7
+ assert xx.foo(13, 15) == 28
+ assert xx.new().demo() is None
+ doc = 'This is a template module just for instruction.'
+ assert xx.__doc__ == doc
+ assert isinstance(xx.Null(), xx.Null)
+ assert isinstance(xx.Str(), xx.Str)
+ """)
+ code = code % self.tmp_dir
+ assert_python_ok('-c', code)
+
+ def test_solaris_enable_shared(self):
+ dist = Distribution({'name': 'xx'})
+ cmd = build_ext(dist)
+ old = sys.platform
+
+ sys.platform = 'sunos' # fooling finalize_options
+
+ old_var = sysconfig.get_config_var('Py_ENABLE_SHARED')
+ sysconfig._CONFIG_VARS['Py_ENABLE_SHARED'] = 1
+ try:
+ cmd.ensure_finalized()
+ finally:
+ sys.platform = old
+ if old_var is None:
+ del sysconfig._CONFIG_VARS['Py_ENABLE_SHARED']
+ else:
+ sysconfig._CONFIG_VARS['Py_ENABLE_SHARED'] = old_var
+
+ # make sure we get some library dirs under solaris
+ self.assertGreater(len(cmd.library_dirs), 0)
+
+ def test_user_site(self):
+ dist = Distribution({'name': 'xx'})
+ cmd = build_ext(dist)
+
+ # making sure the user option is there
+ options = [name for name, short, label in
+ cmd.user_options]
+ self.assertIn('user', options)
+
+ # setting a value
+ cmd.user = True
+
+ # setting user based lib and include
+ lib = os.path.join(site.USER_BASE, 'lib')
+ incl = os.path.join(site.USER_BASE, 'include')
+ os.mkdir(lib)
+ os.mkdir(incl)
+
+ # let's run finalize
+ cmd.ensure_finalized()
+
+ # see if include_dirs and library_dirs
+ # were set
+ self.assertIn(lib, cmd.library_dirs)
+ self.assertIn(lib, cmd.rpath)
+ self.assertIn(incl, cmd.include_dirs)
+
+ def test_optional_extension(self):
+
+ # this extension will fail, but let's ignore this failure
+ # with the optional argument.
+ modules = [Extension('foo', ['xxx'], optional=False)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = build_ext(dist)
+ cmd.ensure_finalized()
+ self.assertRaises((UnknownFileError, CompileError),
+ cmd.run) # should raise an error
+
+ modules = [Extension('foo', ['xxx'], optional=True)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = build_ext(dist)
+ cmd.ensure_finalized()
+ cmd.run() # should pass
+
+ def test_finalize_options(self):
+ # Make sure Python's include directories (for Python.h, pyconfig.h,
+ # etc.) are in the include search path.
+ modules = [Extension('foo', ['xxx'], optional=False)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = build_ext(dist)
+ cmd.finalize_options()
+
+ py_include = sysconfig.get_path('include')
+ self.assertIn(py_include, cmd.include_dirs)
+
+ plat_py_include = sysconfig.get_path('platinclude')
+ self.assertIn(plat_py_include, cmd.include_dirs)
+
+ # make sure cmd.libraries is turned into a list
+ # if it's a string
+ cmd = build_ext(dist)
+ cmd.libraries = 'my_lib, other_lib lastlib'
+ cmd.finalize_options()
+ self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib'])
+
+ # make sure cmd.library_dirs is turned into a list
+ # if it's a string
+ cmd = build_ext(dist)
+ cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep
+ cmd.finalize_options()
+ self.assertIn('my_lib_dir', cmd.library_dirs)
+ self.assertIn('other_lib_dir', cmd.library_dirs)
+
+ # make sure rpath is turned into a list
+ # if it's a string
+ cmd = build_ext(dist)
+ cmd.rpath = 'one%stwo' % os.pathsep
+ cmd.finalize_options()
+ self.assertEqual(cmd.rpath, ['one', 'two'])
+
+ # XXX more tests to perform for win32
+
+ # make sure define is turned into 2-tuples
+ # strings if they are ','-separated strings
+ cmd = build_ext(dist)
+ cmd.define = 'one,two'
+ cmd.finalize_options()
+ self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])
+
+ # make sure undef is turned into a list of
+ # strings if they are ','-separated strings
+ cmd = build_ext(dist)
+ cmd.undef = 'one,two'
+ cmd.finalize_options()
+ self.assertEqual(cmd.undef, ['one', 'two'])
+
+ # make sure swig_opts is turned into a list
+ cmd = build_ext(dist)
+ cmd.swig_opts = None
+ cmd.finalize_options()
+ self.assertEqual(cmd.swig_opts, [])
+
+ cmd = build_ext(dist)
+ cmd.swig_opts = '1 2'
+ cmd.finalize_options()
+ self.assertEqual(cmd.swig_opts, ['1', '2'])
+
+ def test_get_source_files(self):
+ modules = [Extension('foo', ['xxx'], optional=False)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = build_ext(dist)
+ cmd.ensure_finalized()
+ self.assertEqual(cmd.get_source_files(), ['xxx'])
+
+ def test_compiler_option(self):
+ # cmd.compiler is an option and
+ # should not be overriden by a compiler instance
+ # when the command is run
+ dist = Distribution()
+ cmd = build_ext(dist)
+ cmd.compiler = 'unix'
+ cmd.ensure_finalized()
+ cmd.run()
+ self.assertEqual(cmd.compiler, 'unix')
+
+ def test_get_outputs(self):
+ tmp_dir = self.mkdtemp()
+ c_file = os.path.join(tmp_dir, 'foo.c')
+ self.write_file(c_file, 'void PyInit_foo(void) {}\n')
+ ext = Extension('foo', [c_file], optional=False)
+ dist = Distribution({'name': 'xx',
+ 'ext_modules': [ext]})
+ cmd = build_ext(dist)
+ support.fixup_build_ext(cmd)
+ cmd.ensure_finalized()
+ self.assertEqual(len(cmd.get_outputs()), 1)
+
+ cmd.build_lib = os.path.join(self.tmp_dir, 'build')
+ cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
+
+ # issue #5977 : distutils build_ext.get_outputs
+ # returns wrong result with --inplace
+ other_tmp_dir = os.path.realpath(self.mkdtemp())
+ old_wd = os.getcwd()
+ os.chdir(other_tmp_dir)
+ try:
+ cmd.inplace = True
+ cmd.run()
+ so_file = cmd.get_outputs()[0]
+ finally:
+ os.chdir(old_wd)
+ self.assertTrue(os.path.exists(so_file))
+ so_ext = sysconfig.get_config_var('SO')
+ self.assertTrue(so_file.endswith(so_ext))
+ so_dir = os.path.dirname(so_file)
+ self.assertEqual(so_dir, other_tmp_dir)
+
+ cmd.inplace = False
+ cmd.run()
+ so_file = cmd.get_outputs()[0]
+ self.assertTrue(os.path.exists(so_file))
+ self.assertTrue(so_file.endswith(so_ext))
+ so_dir = os.path.dirname(so_file)
+ self.assertEqual(so_dir, cmd.build_lib)
+
+ # inplace = False, cmd.package = 'bar'
+ build_py = cmd.get_finalized_command('build_py')
+ build_py.package_dir = 'bar'
+ path = cmd.get_ext_fullpath('foo')
+ # checking that the last directory is the build_dir
+ path = os.path.split(path)[0]
+ self.assertEqual(path, cmd.build_lib)
+
+ # inplace = True, cmd.package = 'bar'
+ cmd.inplace = True
+ other_tmp_dir = os.path.realpath(self.mkdtemp())
+ old_wd = os.getcwd()
+ os.chdir(other_tmp_dir)
+ try:
+ path = cmd.get_ext_fullpath('foo')
+ finally:
+ os.chdir(old_wd)
+ # checking that the last directory is bar
+ path = os.path.split(path)[0]
+ lastdir = os.path.split(path)[-1]
+ self.assertEqual(lastdir, 'bar')
+
+ def test_ext_fullpath(self):
+ ext = sysconfig.get_config_vars()['SO']
+ # building lxml.etree inplace
+ #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
+ #etree_ext = Extension('lxml.etree', [etree_c])
+ #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
+ dist = Distribution()
+ cmd = build_ext(dist)
+ cmd.inplace = True
+ cmd.distribution.package_dir = 'src'
+ cmd.distribution.packages = ['lxml', 'lxml.html']
+ curdir = os.getcwd()
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
+ path = cmd.get_ext_fullpath('lxml.etree')
+ self.assertEqual(wanted, path)
+
+ # building lxml.etree not inplace
+ cmd.inplace = False
+ cmd.build_lib = os.path.join(curdir, 'tmpdir')
+ wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
+ path = cmd.get_ext_fullpath('lxml.etree')
+ self.assertEqual(wanted, path)
+
+ # building twisted.runner.portmap not inplace
+ build_py = cmd.get_finalized_command('build_py')
+ build_py.package_dir = None
+ cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
+ path = cmd.get_ext_fullpath('twisted.runner.portmap')
+ wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
+ 'portmap' + ext)
+ self.assertEqual(wanted, path)
+
+ # building twisted.runner.portmap inplace
+ cmd.inplace = True
+ path = cmd.get_ext_fullpath('twisted.runner.portmap')
+ wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
+ self.assertEqual(wanted, path)
+
+ @unittest.skipUnless(sys.platform == 'darwin',
+ 'test only relevant for Mac OS X')
+ def test_deployment_target_default(self):
+ # Issue 9516: Test that, in the absence of the environment variable,
+ # an extension module is compiled with the same deployment target as
+ # the interpreter.
+ self._try_compile_deployment_target('==', None)
+
+ @unittest.skipUnless(sys.platform == 'darwin',
+ 'test only relevant for Mac OS X')
+ def test_deployment_target_too_low(self):
+ # Issue 9516: Test that an extension module is not allowed to be
+ # compiled with a deployment target less than that of the interpreter.
+ self.assertRaises(PackagingPlatformError,
+ self._try_compile_deployment_target, '>', '10.1')
+
+ @unittest.skipUnless(sys.platform == 'darwin',
+ 'test only relevant for Mac OS X')
+ def test_deployment_target_higher_ok(self):
+ # Issue 9516: Test that an extension module can be compiled with a
+ # deployment target higher than that of the interpreter: the ext
+ # module may depend on some newer OS feature.
+ deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+ if deptarget:
+ # increment the minor version number (i.e. 10.6 -> 10.7)
+ deptarget = [int(x) for x in deptarget.split('.')]
+ deptarget[-1] += 1
+ deptarget = '.'.join(str(i) for i in deptarget)
+ self._try_compile_deployment_target('<', deptarget)
+
+ def _try_compile_deployment_target(self, operator, target):
+ orig_environ = os.environ
+ os.environ = orig_environ.copy()
+ self.addCleanup(setattr, os, 'environ', orig_environ)
+
+ if target is None:
+ if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
+ del os.environ['MACOSX_DEPLOYMENT_TARGET']
+ else:
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
+
+ deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')
+
+ with open(deptarget_c, 'w') as fp:
+ fp.write(textwrap.dedent('''\
+ #include <AvailabilityMacros.h>
+
+ int dummy;
+
+ #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
+ #else
+ #error "Unexpected target"
+ #endif
+
+ ''' % operator))
+
+ # get the deployment target that the interpreter was built with
+ target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+ target = tuple(map(int, target.split('.')))
+ target = '%02d%01d0' % target
+
+ deptarget_ext = Extension(
+ 'deptarget',
+ [deptarget_c],
+ extra_compile_args=['-DTARGET=%s' % (target,)],
+ )
+ dist = Distribution({
+ 'name': 'deptarget',
+ 'ext_modules': [deptarget_ext],
+ })
+ dist.package_dir = self.tmp_dir
+ cmd = build_ext(dist)
+ cmd.build_lib = self.tmp_dir
+ cmd.build_temp = self.tmp_dir
+
+ try:
+ cmd.ensure_finalized()
+ cmd.run()
+ except CompileError:
+ self.fail("Wrong deployment target during compilation")
+
+
+def test_suite():
+ return unittest.makeSuite(BuildExtTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_build_py.py b/Lib/packaging/tests/test_command_build_py.py
new file mode 100644
index 0000000..0599bf2
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_py.py
@@ -0,0 +1,146 @@
+"""Tests for distutils.command.build_py."""
+
+import os
+import sys
+import imp
+
+from packaging.command.build_py import build_py
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError
+
+from packaging.tests import unittest, support
+
+
+class BuildPyTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_package_data(self):
+ sources = self.mkdtemp()
+ pkg_dir = os.path.join(sources, 'pkg')
+ os.mkdir(pkg_dir)
+ f = open(os.path.join(pkg_dir, "__init__.py"), "w")
+ try:
+ f.write("# Pretend this is a package.")
+ finally:
+ f.close()
+ # let's have two files to make sure globbing works
+ f = open(os.path.join(pkg_dir, "README.txt"), "w")
+ try:
+ f.write("Info about this package")
+ finally:
+ f.close()
+ f = open(os.path.join(pkg_dir, "HACKING.txt"), "w")
+ try:
+ f.write("How to contribute")
+ finally:
+ f.close()
+
+ destination = self.mkdtemp()
+
+ dist = Distribution({"packages": ["pkg"],
+ "package_dir": sources})
+
+ dist.command_obj["build"] = support.DummyCommand(
+ force=False,
+ build_lib=destination,
+ use_2to3_fixers=None,
+ convert_2to3_doctests=None,
+ use_2to3=False)
+ dist.packages = ["pkg"]
+ dist.package_data = {"pkg": ["*.txt"]}
+ dist.package_dir = sources
+
+ cmd = build_py(dist)
+ cmd.compile = True
+ cmd.ensure_finalized()
+ self.assertEqual(cmd.package_data, dist.package_data)
+
+ cmd.run()
+
+ # This makes sure the list of outputs includes byte-compiled
+ # files for Python modules but not for package data files
+ # (there shouldn't *be* byte-code files for those!).
+ # FIXME the test below is not doing what the comment above says, and
+ # if it did it would show a code bug: if we add a demo.py file to
+ # package_data, it gets byte-compiled!
+ outputs = cmd.get_outputs()
+ self.assertEqual(len(outputs), 4, outputs)
+ pkgdest = os.path.join(destination, "pkg")
+ files = os.listdir(pkgdest)
+ pycache_dir = os.path.join(pkgdest, "__pycache__")
+ self.assertIn("__init__.py", files)
+ self.assertIn("README.txt", files)
+ self.assertIn("HACKING.txt", files)
+ pyc_files = os.listdir(pycache_dir)
+ self.assertEqual(["__init__.%s.pyc" % imp.get_tag()], pyc_files)
+
+ def test_empty_package_dir(self):
+ # See SF 1668596/1720897.
+ # create the distribution files.
+ sources = self.mkdtemp()
+ pkg = os.path.join(sources, 'pkg')
+ os.mkdir(pkg)
+ open(os.path.join(pkg, "__init__.py"), "wb").close()
+ testdir = os.path.join(pkg, "doc")
+ os.mkdir(testdir)
+ open(os.path.join(testdir, "testfile"), "wb").close()
+
+ os.chdir(sources)
+ dist = Distribution({"packages": ["pkg"],
+ "package_dir": sources,
+ "package_data": {"pkg": ["doc/*"]}})
+ dist.script_args = ["build"]
+ dist.parse_command_line()
+
+ try:
+ dist.run_commands()
+ except PackagingFileError:
+ self.fail("failed package_data test when package_dir is ''")
+
+ def test_byte_compile(self):
+ project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
+ os.chdir(project_dir)
+ self.write_file('boiledeggs.py', 'import antigravity')
+ cmd = build_py(dist)
+ cmd.compile = True
+ cmd.build_lib = 'here'
+ cmd.finalize_options()
+ cmd.run()
+
+ found = os.listdir(cmd.build_lib)
+ self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
+ found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
+ self.assertEqual(found, ['boiledeggs.%s.pyc' % imp.get_tag()])
+
+ def test_byte_compile_optimized(self):
+ project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
+ os.chdir(project_dir)
+ self.write_file('boiledeggs.py', 'import antigravity')
+ cmd = build_py(dist)
+ cmd.compile = True
+ cmd.optimize = 1
+ cmd.build_lib = 'here'
+ cmd.finalize_options()
+ cmd.run()
+
+ found = os.listdir(cmd.build_lib)
+ self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
+ found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
+ self.assertEqual(sorted(found), ['boiledeggs.%s.pyc' % imp.get_tag(),
+ 'boiledeggs.%s.pyo' % imp.get_tag()])
+
+ def test_byte_compile_under_B(self):
+ # make sure byte compilation works under -B (dont_write_bytecode)
+ self.addCleanup(setattr, sys, 'dont_write_bytecode',
+ sys.dont_write_bytecode)
+ sys.dont_write_bytecode = True
+ self.test_byte_compile()
+ self.test_byte_compile_optimized()
+
+
+def test_suite():
+ return unittest.makeSuite(BuildPyTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_scripts.py b/Lib/packaging/tests/test_command_build_scripts.py
new file mode 100644
index 0000000..fd3ac24
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_scripts.py
@@ -0,0 +1,109 @@
+"""Tests for distutils.command.build_scripts."""
+
+import os
+import sys
+import sysconfig
+from packaging.dist import Distribution
+from packaging.command.build_scripts import build_scripts
+
+from packaging.tests import unittest, support
+
+
+class BuildScriptsTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_default_settings(self):
+ cmd = self.get_build_scripts_cmd("/foo/bar", [])
+ self.assertFalse(cmd.force)
+ self.assertIs(cmd.build_dir, None)
+
+ cmd.finalize_options()
+
+ self.assertTrue(cmd.force)
+ self.assertEqual(cmd.build_dir, "/foo/bar")
+
+ def test_build(self):
+ source = self.mkdtemp()
+ target = self.mkdtemp()
+ expected = self.write_sample_scripts(source)
+
+ cmd = self.get_build_scripts_cmd(target,
+ [os.path.join(source, fn)
+ for fn in expected])
+ cmd.finalize_options()
+ cmd.run()
+
+ built = os.listdir(target)
+ for name in expected:
+ self.assertIn(name, built)
+
+ def get_build_scripts_cmd(self, target, scripts):
+ dist = Distribution()
+ dist.scripts = scripts
+ dist.command_obj["build"] = support.DummyCommand(
+ build_scripts=target,
+ force=True,
+ executable=sys.executable,
+ use_2to3=False,
+ use_2to3_fixers=None,
+ convert_2to3_doctests=None
+ )
+ return build_scripts(dist)
+
+ def write_sample_scripts(self, dir):
+ expected = []
+ expected.append("script1.py")
+ self.write_script(dir, "script1.py",
+ ("#! /usr/bin/env python2.3\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ expected.append("script2.py")
+ self.write_script(dir, "script2.py",
+ ("#!/usr/bin/python\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ expected.append("shell.sh")
+ self.write_script(dir, "shell.sh",
+ ("#!/bin/sh\n"
+ "# bogus shell script w/ sh-bang\n"
+ "exit 0\n"))
+ return expected
+
+ def write_script(self, dir, name, text):
+ with open(os.path.join(dir, name), "w") as f:
+ f.write(text)
+
+ def test_version_int(self):
+ source = self.mkdtemp()
+ target = self.mkdtemp()
+ expected = self.write_sample_scripts(source)
+
+
+ cmd = self.get_build_scripts_cmd(target,
+ [os.path.join(source, fn)
+ for fn in expected])
+ cmd.finalize_options()
+
+ # http://bugs.python.org/issue4524
+ #
+ # On linux-g++-32 with command line `./configure --enable-ipv6
+ # --with-suffix=3`, python is compiled okay but the build scripts
+ # failed when writing the name of the executable
+ old = sysconfig.get_config_vars().get('VERSION')
+ sysconfig._CONFIG_VARS['VERSION'] = 4
+ try:
+ cmd.run()
+ finally:
+ if old is not None:
+ sysconfig._CONFIG_VARS['VERSION'] = old
+
+ built = os.listdir(target)
+ for name in expected:
+ self.assertIn(name, built)
+
+def test_suite():
+ return unittest.makeSuite(BuildScriptsTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_check.py b/Lib/packaging/tests/test_command_check.py
new file mode 100644
index 0000000..0b91050
--- /dev/null
+++ b/Lib/packaging/tests/test_command_check.py
@@ -0,0 +1,161 @@
+"""Tests for distutils.command.check."""
+
+from packaging.command.check import check
+from packaging.metadata import _HAS_DOCUTILS
+from packaging.errors import PackagingSetupError, MetadataMissingError
+from packaging.tests import unittest, support
+
+
+class CheckTestCase(support.LoggingCatcher,
+ support.TempdirManager,
+ unittest.TestCase):
+
+ def _run(self, metadata=None, **options):
+ if metadata is None:
+ metadata = {'name': 'xxx', 'version': '1.2'}
+ pkg_info, dist = self.create_dist(**metadata)
+ cmd = check(dist)
+ cmd.initialize_options()
+ for name, value in options.items():
+ setattr(cmd, name, value)
+ cmd.ensure_finalized()
+ cmd.run()
+ return cmd
+
+ def test_check_metadata(self):
+ # let's run the command with no metadata at all
+ # by default, check is checking the metadata
+ # should have some warnings
+ self._run()
+ # trick: using assertNotEqual with an empty list will give us a more
+ # useful error message than assertGreater(.., 0) when the code change
+ # and the test fails
+ self.assertNotEqual(self.get_logs(), [])
+
+ # now let's add the required fields
+ # and run it again, to make sure we don't get
+ # any warning anymore
+ metadata = {'home_page': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': '4.2',
+ }
+ self._run(metadata)
+ self.assertEqual(self.get_logs(), [])
+
+ # now with the strict mode, we should
+ # get an error if there are missing metadata
+ self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+ self.assertRaises(PackagingSetupError, self._run,
+ {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
+
+ # clear warnings from the previous calls
+ self.loghandler.flush()
+
+ # and of course, no error when all metadata fields are present
+ self._run(metadata, strict=True)
+ self.assertEqual(self.get_logs(), [])
+
+ # now a test with non-ASCII characters
+ metadata = {'home_page': 'xxx', 'author': '\u00c9ric',
+ 'author_email': 'xxx', 'name': 'xxx',
+ 'version': '1.2',
+ 'summary': 'Something about esszet \u00df',
+ 'description': 'More things about esszet \u00df'}
+ self._run(metadata)
+ self.assertEqual(self.get_logs(), [])
+
+ def test_check_metadata_1_2(self):
+ # let's run the command with no metadata at all
+ # by default, check is checking the metadata
+ # should have some warnings
+ self._run()
+ self.assertNotEqual(self.get_logs(), [])
+
+ # now let's add the required fields and run it again, to make sure we
+ # don't get any warning anymore let's use requires_python as a marker
+ # to enforce Metadata-Version 1.2
+ metadata = {'home_page': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': '4.2',
+ 'requires_python': '2.4',
+ }
+ self._run(metadata)
+ self.assertEqual(self.get_logs(), [])
+
+ # now with the strict mode, we should
+ # get an error if there are missing metadata
+ self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+ self.assertRaises(PackagingSetupError, self._run,
+ {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
+
+ # complain about version format
+ metadata['version'] = 'xxx'
+ self.assertRaises(PackagingSetupError, self._run, metadata,
+ **{'strict': 1})
+
+ # clear warnings from the previous calls
+ self.loghandler.flush()
+
+ # now with correct version format again
+ metadata['version'] = '4.2'
+ self._run(metadata, strict=True)
+ self.assertEqual(self.get_logs(), [])
+
+ @unittest.skipUnless(_HAS_DOCUTILS, "requires docutils")
+ def test_check_restructuredtext(self):
+ # let's see if it detects broken rest in description
+ broken_rest = 'title\n===\n\ntest'
+ pkg_info, dist = self.create_dist(description=broken_rest)
+ cmd = check(dist)
+ cmd.check_restructuredtext()
+ self.assertEqual(len(self.get_logs()), 1)
+
+ # let's see if we have an error with strict=1
+ metadata = {'home_page': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': '1.2',
+ 'description': broken_rest}
+ self.assertRaises(PackagingSetupError, self._run, metadata,
+ strict=True, all=True)
+ self.loghandler.flush()
+
+ # and non-broken rest, including a non-ASCII character to test #12114
+ dist = self.create_dist(description='title\n=====\n\ntest \u00df')[1]
+ cmd = check(dist)
+ cmd.check_restructuredtext()
+ self.assertEqual(self.get_logs(), [])
+
+ def test_check_all(self):
+ self.assertRaises(PackagingSetupError, self._run,
+ {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1,
+ 'all': 1})
+ self.assertRaises(MetadataMissingError, self._run,
+ {}, **{'strict': 1,
+ 'all': 1})
+
+ def test_check_hooks(self):
+ pkg_info, dist = self.create_dist()
+ dist.command_options['install_dist'] = {
+ 'pre_hook': ('file', {"a": 'some.nonextistant.hook.ghrrraarrhll'}),
+ }
+ cmd = check(dist)
+ cmd.check_hooks_resolvable()
+ self.assertEqual(len(self.get_logs()), 1)
+
+ def test_warn(self):
+ _, dist = self.create_dist()
+ cmd = check(dist)
+ self.assertEqual(self.get_logs(), [])
+ cmd.warn('hello')
+ self.assertEqual(self.get_logs(), ['check: hello'])
+ cmd.warn('hello %s', 'world')
+ self.assertEqual(self.get_logs(), ['check: hello world'])
+ cmd.warn('hello %s %s', 'beautiful', 'world')
+ self.assertEqual(self.get_logs(), ['check: hello beautiful world'])
+
+
+def test_suite():
+ return unittest.makeSuite(CheckTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_clean.py b/Lib/packaging/tests/test_command_clean.py
new file mode 100644
index 0000000..a78c3a7
--- /dev/null
+++ b/Lib/packaging/tests/test_command_clean.py
@@ -0,0 +1,46 @@
+"""Tests for distutils.command.clean."""
+import os
+
+from packaging.command.clean import clean
+from packaging.tests import unittest, support
+
+
+class cleanTestCase(support.TempdirManager, support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_simple_run(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = clean(dist)
+
+ # let's add some elements clean should remove
+ dirs = [(d, os.path.join(pkg_dir, d))
+ for d in ('build_temp', 'build_lib', 'bdist_base',
+ 'build_scripts', 'build_base')]
+
+ for name, path in dirs:
+ os.mkdir(path)
+ setattr(cmd, name, path)
+ if name == 'build_base':
+ continue
+ for f in ('one', 'two', 'three'):
+ self.write_file((path, f))
+
+ # let's run the command
+ cmd.all = True
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # make sure the files where removed
+ for name, path in dirs:
+ self.assertFalse(os.path.exists(path),
+ '%r was not removed' % path)
+
+ # let's run the command again (should spit warnings but succeed)
+ cmd.run()
+
+
+def test_suite():
+ return unittest.makeSuite(cleanTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_cmd.py b/Lib/packaging/tests/test_command_cmd.py
new file mode 100644
index 0000000..6d00ec3
--- /dev/null
+++ b/Lib/packaging/tests/test_command_cmd.py
@@ -0,0 +1,102 @@
+"""Tests for distutils.cmd."""
+import os
+import logging
+
+from packaging.command.cmd import Command
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+from packaging.tests import support, unittest
+
+
+class MyCmd(Command):
+ def initialize_options(self):
+ pass
+
+
+class CommandTestCase(support.LoggingCatcher,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(CommandTestCase, self).setUp()
+ dist = Distribution()
+ self.cmd = MyCmd(dist)
+
+ def test_make_file(self):
+ cmd = self.cmd
+
+ # making sure it raises when infiles is not a string or a list/tuple
+ self.assertRaises(TypeError, cmd.make_file,
+ infiles=1, outfile='', func='func', args=())
+
+ # making sure execute gets called properly
+ def _execute(func, args, exec_msg, level):
+ self.assertEqual(exec_msg, 'generating out from in')
+ cmd.force = True
+ cmd.execute = _execute
+ cmd.make_file(infiles='in', outfile='out', func='func', args=())
+
+ def test_dump_options(self):
+ cmd = self.cmd
+ cmd.option1 = 1
+ cmd.option2 = 1
+ cmd.user_options = [('option1', '', ''), ('option2', '', '')]
+ cmd.dump_options()
+
+ wanted = ["command options for 'MyCmd':", ' option1 = 1',
+ ' option2 = 1']
+ msgs = self.get_logs(logging.INFO)
+ self.assertEqual(msgs, wanted)
+
+ def test_ensure_string(self):
+ cmd = self.cmd
+ cmd.option1 = 'ok'
+ cmd.ensure_string('option1')
+
+ cmd.option2 = None
+ cmd.ensure_string('option2', 'xxx')
+ self.assertTrue(hasattr(cmd, 'option2'))
+
+ cmd.option3 = 1
+ self.assertRaises(PackagingOptionError, cmd.ensure_string, 'option3')
+
+ def test_ensure_string_list(self):
+ cmd = self.cmd
+ cmd.option1 = 'ok,dok'
+ cmd.ensure_string_list('option1')
+ self.assertEqual(cmd.option1, ['ok', 'dok'])
+
+ cmd.yes_string_list = ['one', 'two', 'three']
+ cmd.yes_string_list2 = 'ok'
+ cmd.ensure_string_list('yes_string_list')
+ cmd.ensure_string_list('yes_string_list2')
+ self.assertEqual(cmd.yes_string_list, ['one', 'two', 'three'])
+ self.assertEqual(cmd.yes_string_list2, ['ok'])
+
+ cmd.not_string_list = ['one', 2, 'three']
+ cmd.not_string_list2 = object()
+ self.assertRaises(PackagingOptionError,
+ cmd.ensure_string_list, 'not_string_list')
+
+ self.assertRaises(PackagingOptionError,
+ cmd.ensure_string_list, 'not_string_list2')
+
+ def test_ensure_filename(self):
+ cmd = self.cmd
+ cmd.option1 = __file__
+ cmd.ensure_filename('option1')
+ cmd.option2 = 'xxx'
+ self.assertRaises(PackagingOptionError, cmd.ensure_filename, 'option2')
+
+ def test_ensure_dirname(self):
+ cmd = self.cmd
+ cmd.option1 = os.path.dirname(__file__) or os.curdir
+ cmd.ensure_dirname('option1')
+ cmd.option2 = 'xxx'
+ self.assertRaises(PackagingOptionError, cmd.ensure_dirname, 'option2')
+
+
+def test_suite():
+ return unittest.makeSuite(CommandTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_config.py b/Lib/packaging/tests/test_command_config.py
new file mode 100644
index 0000000..dae75b4
--- /dev/null
+++ b/Lib/packaging/tests/test_command_config.py
@@ -0,0 +1,76 @@
+"""Tests for distutils.command.config."""
+import os
+import sys
+import logging
+
+from packaging.command.config import dump_file, config
+from packaging.tests import unittest, support
+
+
+class ConfigTestCase(support.LoggingCatcher,
+ support.TempdirManager,
+ unittest.TestCase):
+
+ def test_dump_file(self):
+ this_file = __file__.rstrip('co')
+ with open(this_file) as f:
+ numlines = len(f.readlines())
+
+ dump_file(this_file, 'I am the header')
+
+ logs = []
+ for log in self.get_logs(logging.INFO):
+ logs.extend(line for line in log.split('\n'))
+ self.assertEqual(len(logs), numlines + 2)
+
+ @unittest.skipIf(sys.platform == 'win32', 'disabled on win32')
+ def test_search_cpp(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = config(dist)
+
+ # simple pattern searches
+ match = cmd.search_cpp(pattern='xxx', body='/* xxx */')
+ self.assertEqual(match, 0)
+
+ match = cmd.search_cpp(pattern='_configtest', body='/* xxx */')
+ self.assertEqual(match, 1)
+
+ def test_finalize_options(self):
+ # finalize_options does a bit of transformation
+ # on options
+ pkg_dir, dist = self.create_dist()
+ cmd = config(dist)
+ cmd.include_dirs = 'one%stwo' % os.pathsep
+ cmd.libraries = 'one'
+ cmd.library_dirs = 'three%sfour' % os.pathsep
+ cmd.ensure_finalized()
+
+ self.assertEqual(cmd.include_dirs, ['one', 'two'])
+ self.assertEqual(cmd.libraries, ['one'])
+ self.assertEqual(cmd.library_dirs, ['three', 'four'])
+
+ def test_clean(self):
+ # _clean removes files
+ tmp_dir = self.mkdtemp()
+ f1 = os.path.join(tmp_dir, 'one')
+ f2 = os.path.join(tmp_dir, 'two')
+
+ self.write_file(f1, 'xxx')
+ self.write_file(f2, 'xxx')
+
+ for f in (f1, f2):
+ self.assertTrue(os.path.exists(f))
+
+ pkg_dir, dist = self.create_dist()
+ cmd = config(dist)
+ cmd._clean(f1, f2)
+
+ for f in (f1, f2):
+ self.assertFalse(os.path.exists(f))
+
+
+def test_suite():
+ return unittest.makeSuite(ConfigTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_data.py b/Lib/packaging/tests/test_command_install_data.py
new file mode 100644
index 0000000..8d4373d
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_data.py
@@ -0,0 +1,148 @@
+"""Tests for packaging.command.install_data."""
+import os
+import sys
+import sysconfig
+import packaging.database
+from sysconfig import _get_default_scheme
+from packaging.tests import unittest, support
+from packaging.command.install_data import install_data
+from packaging.command.install_dist import install_dist
+from packaging.command.install_distinfo import install_distinfo
+
+
+class InstallDataTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(InstallDataTestCase, self).setUp()
+ scheme = _get_default_scheme()
+ old_items = sysconfig._SCHEMES.items(scheme)
+
+ def restore():
+ sysconfig._SCHEMES.remove_section(scheme)
+ sysconfig._SCHEMES.add_section(scheme)
+ for option, value in old_items:
+ sysconfig._SCHEMES.set(scheme, option, value)
+
+ self.addCleanup(restore)
+
+ def test_simple_run(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = install_data(dist)
+ cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
+ scheme = _get_default_scheme()
+
+ sysconfig._SCHEMES.set(scheme, 'inst',
+ os.path.join(pkg_dir, 'inst'))
+ sysconfig._SCHEMES.set(scheme, 'inst2',
+ os.path.join(pkg_dir, 'inst2'))
+
+ one = os.path.join(pkg_dir, 'one')
+ self.write_file(one, 'xxx')
+ inst2 = os.path.join(pkg_dir, 'inst2')
+ two = os.path.join(pkg_dir, 'two')
+ self.write_file(two, 'xxx')
+
+ # FIXME this creates a literal \{inst2\} directory!
+ cmd.data_files = {one: '{inst}/one', two: '{inst2}/two'}
+ self.assertCountEqual(cmd.get_inputs(), [one, two])
+
+ # let's run the command
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the result
+ self.assertEqual(len(cmd.get_outputs()), 2)
+ rtwo = os.path.split(two)[-1]
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+ rone = os.path.split(one)[-1]
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+ cmd.outfiles = []
+
+ # let's try with warn_dir one
+ cmd.warn_dir = True
+ cmd.finalized = False
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the result
+ self.assertEqual(len(cmd.get_outputs()), 2)
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+ cmd.outfiles = []
+
+ # now using root and empty dir
+ cmd.root = os.path.join(pkg_dir, 'root')
+ three = os.path.join(cmd.install_dir, 'three')
+ self.write_file(three, 'xx')
+
+ sysconfig._SCHEMES.set(scheme, 'inst3', cmd.install_dir)
+
+ cmd.data_files = {one: '{inst}/one', two: '{inst2}/two',
+ three: '{inst3}/three'}
+ cmd.finalized = False
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the result
+ self.assertEqual(len(cmd.get_outputs()), 3)
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+
+ def test_resources(self):
+ install_dir = self.mkdtemp()
+ scripts_dir = self.mkdtemp()
+ project_dir, dist = self.create_dist(
+ name='Spamlib', version='0.1',
+ data_files={'spamd': '{scripts}/spamd'})
+
+ os.chdir(project_dir)
+ self.write_file('spamd', '# Python script')
+ sysconfig._SCHEMES.set(_get_default_scheme(), 'scripts', scripts_dir)
+ sys.path.insert(0, install_dir)
+ packaging.database.disable_cache()
+ self.addCleanup(sys.path.remove, install_dir)
+ self.addCleanup(packaging.database.enable_cache)
+
+ cmd = install_dist(dist)
+ cmd.outputs = ['spamd']
+ cmd.install_lib = install_dir
+ dist.command_obj['install_dist'] = cmd
+
+ cmd = install_data(dist)
+ cmd.install_dir = install_dir
+ cmd.ensure_finalized()
+ dist.command_obj['install_data'] = cmd
+ cmd.run()
+
+ cmd = install_distinfo(dist)
+ cmd.ensure_finalized()
+ dist.command_obj['install_distinfo'] = cmd
+ cmd.run()
+
+ # first a few sanity checks
+ self.assertEqual(os.listdir(scripts_dir), ['spamd'])
+ self.assertEqual(os.listdir(install_dir), ['Spamlib-0.1.dist-info'])
+
+ # now the real test
+ fn = os.path.join(install_dir, 'Spamlib-0.1.dist-info', 'RESOURCES')
+ with open(fn, encoding='utf-8') as fp:
+ content = fp.read().strip()
+
+ expected = 'spamd,%s' % os.path.join(scripts_dir, 'spamd')
+ self.assertEqual(content, expected)
+
+ # just to be sure, we also test that get_file works here, even though
+ # packaging.database has its own test file
+ with packaging.database.get_file('Spamlib', 'spamd') as fp:
+ content = fp.read()
+
+ self.assertEqual('# Python script', content)
+
+
+def test_suite():
+ return unittest.makeSuite(InstallDataTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_dist.py b/Lib/packaging/tests/test_command_install_dist.py
new file mode 100644
index 0000000..3345d2e
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_dist.py
@@ -0,0 +1,241 @@
+"""Tests for packaging.command.install."""
+
+import os
+import imp
+import sys
+from sysconfig import (get_scheme_names, get_config_vars,
+ _SCHEMES, get_config_var, get_path)
+
+from packaging.command.build_ext import build_ext
+from packaging.command.install_dist import install_dist
+from packaging.compiler.extension import Extension
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+
+from packaging.tests import unittest, support
+
+
+_CONFIG_VARS = get_config_vars()
+
+
+def _make_ext_name(modname):
+ if os.name == 'nt' and sys.executable.endswith('_d.exe'):
+ modname += '_d'
+ return modname + get_config_var('SO')
+
+
+class InstallTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_home_installation_scheme(self):
+ # This ensure two things:
+ # - that --home generates the desired set of directory names
+ # - test --home is supported on all platforms
+ builddir = self.mkdtemp()
+ destination = os.path.join(builddir, "installation")
+
+ dist = Distribution({"name": "foopkg"})
+ dist.command_obj["build"] = support.DummyCommand(
+ build_base=builddir,
+ build_lib=os.path.join(builddir, "lib"),
+ )
+
+ old_posix_prefix = _SCHEMES.get('posix_prefix', 'platinclude')
+ old_posix_home = _SCHEMES.get('posix_home', 'platinclude')
+
+ new_path = '{platbase}/include/python{py_version_short}'
+ _SCHEMES.set('posix_prefix', 'platinclude', new_path)
+ _SCHEMES.set('posix_home', 'platinclude', '{platbase}/include/python')
+
+ try:
+ cmd = install_dist(dist)
+ cmd.home = destination
+ cmd.ensure_finalized()
+ finally:
+ _SCHEMES.set('posix_prefix', 'platinclude', old_posix_prefix)
+ _SCHEMES.set('posix_home', 'platinclude', old_posix_home)
+
+ self.assertEqual(cmd.install_base, destination)
+ self.assertEqual(cmd.install_platbase, destination)
+
+ def check_path(got, expected):
+ got = os.path.normpath(got)
+ expected = os.path.normpath(expected)
+ self.assertEqual(got, expected)
+
+ libdir = os.path.join(destination, "lib", "python")
+ check_path(cmd.install_lib, libdir)
+ check_path(cmd.install_platlib, libdir)
+ check_path(cmd.install_purelib, libdir)
+ check_path(cmd.install_headers,
+ os.path.join(destination, "include", "python", "foopkg"))
+ check_path(cmd.install_scripts, os.path.join(destination, "bin"))
+ check_path(cmd.install_data, destination)
+
+ def test_user_site(self):
+ # test install with --user
+ # preparing the environment for the test
+ self.old_user_base = get_config_var('userbase')
+ self.old_user_site = get_path('purelib', '%s_user' % os.name)
+ self.tmpdir = self.mkdtemp()
+ self.user_base = os.path.join(self.tmpdir, 'B')
+ self.user_site = os.path.join(self.tmpdir, 'S')
+ _CONFIG_VARS['userbase'] = self.user_base
+ scheme = '%s_user' % os.name
+ _SCHEMES.set(scheme, 'purelib', self.user_site)
+
+ def _expanduser(path):
+ if path[0] == '~':
+ path = os.path.normpath(self.tmpdir) + path[1:]
+ return path
+
+ self.old_expand = os.path.expanduser
+ os.path.expanduser = _expanduser
+
+ def cleanup():
+ _CONFIG_VARS['userbase'] = self.old_user_base
+ _SCHEMES.set(scheme, 'purelib', self.old_user_site)
+ os.path.expanduser = self.old_expand
+
+ self.addCleanup(cleanup)
+
+ schemes = get_scheme_names()
+ for key in ('nt_user', 'posix_user', 'os2_home'):
+ self.assertIn(key, schemes)
+
+ dist = Distribution({'name': 'xx'})
+ cmd = install_dist(dist)
+
+ # making sure the user option is there
+ options = [name for name, short, lable in
+ cmd.user_options]
+ self.assertIn('user', options)
+
+ # setting a value
+ cmd.user = True
+
+ # user base and site shouldn't be created yet
+ self.assertFalse(os.path.exists(self.user_base))
+ self.assertFalse(os.path.exists(self.user_site))
+
+ # let's run finalize
+ cmd.ensure_finalized()
+
+ # now they should
+ self.assertTrue(os.path.exists(self.user_base))
+ self.assertTrue(os.path.exists(self.user_site))
+
+ self.assertIn('userbase', cmd.config_vars)
+ self.assertIn('usersite', cmd.config_vars)
+
+ def test_handle_extra_path(self):
+ dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
+ cmd = install_dist(dist)
+
+ # two elements
+ cmd.handle_extra_path()
+ self.assertEqual(cmd.extra_path, ['path', 'dirs'])
+ self.assertEqual(cmd.extra_dirs, 'dirs')
+ self.assertEqual(cmd.path_file, 'path')
+
+ # one element
+ cmd.extra_path = ['path']
+ cmd.handle_extra_path()
+ self.assertEqual(cmd.extra_path, ['path'])
+ self.assertEqual(cmd.extra_dirs, 'path')
+ self.assertEqual(cmd.path_file, 'path')
+
+ # none
+ dist.extra_path = cmd.extra_path = None
+ cmd.handle_extra_path()
+ self.assertEqual(cmd.extra_path, None)
+ self.assertEqual(cmd.extra_dirs, '')
+ self.assertEqual(cmd.path_file, None)
+
+ # three elements (no way !)
+ cmd.extra_path = 'path,dirs,again'
+ self.assertRaises(PackagingOptionError, cmd.handle_extra_path)
+
+ def test_finalize_options(self):
+ dist = Distribution({'name': 'xx'})
+ cmd = install_dist(dist)
+
+ # must supply either prefix/exec-prefix/home or
+ # install-base/install-platbase -- not both
+ cmd.prefix = 'prefix'
+ cmd.install_base = 'base'
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+ # must supply either home or prefix/exec-prefix -- not both
+ cmd.install_base = None
+ cmd.home = 'home'
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+ # can't combine user with with prefix/exec_prefix/home or
+ # install_(plat)base
+ cmd.prefix = None
+ cmd.user = 'user'
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+ def test_old_record(self):
+ # test pre-PEP 376 --record option (outside dist-info dir)
+ install_dir = self.mkdtemp()
+ project_dir, dist = self.create_dist(py_modules=['hello'],
+ scripts=['sayhi'])
+ os.chdir(project_dir)
+ self.write_file('hello.py', "def main(): print('o hai')")
+ self.write_file('sayhi', 'from hello import main; main()')
+
+ cmd = install_dist(dist)
+ dist.command_obj['install_dist'] = cmd
+ cmd.root = install_dir
+ cmd.record = os.path.join(project_dir, 'filelist')
+ cmd.ensure_finalized()
+ cmd.run()
+
+ with open(cmd.record) as f:
+ content = f.read()
+
+ found = [os.path.basename(line) for line in content.splitlines()]
+ expected = ['hello.py', 'hello.%s.pyc' % imp.get_tag(), 'sayhi',
+ 'METADATA', 'INSTALLER', 'REQUESTED', 'RECORD']
+ self.assertEqual(sorted(found), sorted(expected))
+
+ # XXX test that fancy_getopt is okay with options named
+ # record and no-record but unrelated
+
+ def test_old_record_extensions(self):
+ # test pre-PEP 376 --record option with ext modules
+ install_dir = self.mkdtemp()
+ project_dir, dist = self.create_dist(ext_modules=[
+ Extension('xx', ['xxmodule.c'])])
+ os.chdir(project_dir)
+ support.copy_xxmodule_c(project_dir)
+
+ buildextcmd = build_ext(dist)
+ support.fixup_build_ext(buildextcmd)
+ buildextcmd.ensure_finalized()
+
+ cmd = install_dist(dist)
+ dist.command_obj['install_dist'] = cmd
+ dist.command_obj['build_ext'] = buildextcmd
+ cmd.root = install_dir
+ cmd.record = os.path.join(project_dir, 'filelist')
+ cmd.ensure_finalized()
+ cmd.run()
+
+ with open(cmd.record) as f:
+ content = f.read()
+
+ found = [os.path.basename(line) for line in content.splitlines()]
+ expected = [_make_ext_name('xx'),
+ 'METADATA', 'INSTALLER', 'REQUESTED', 'RECORD']
+ self.assertEqual(found, expected)
+
+
+def test_suite():
+ return unittest.makeSuite(InstallTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_distinfo.py b/Lib/packaging/tests/test_command_install_distinfo.py
new file mode 100644
index 0000000..33153e7
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_distinfo.py
@@ -0,0 +1,252 @@
+"""Tests for ``packaging.command.install_distinfo``.
+
+Writing of the RESOURCES file is tested in test_command_install_data.
+"""
+
+import os
+import csv
+import hashlib
+import sysconfig
+
+from packaging.command.install_distinfo import install_distinfo
+from packaging.command.cmd import Command
+from packaging.compiler.extension import Extension
+from packaging.metadata import Metadata
+from packaging.tests import unittest, support
+
+
+class DummyInstallCmd(Command):
+
+ def __init__(self, dist=None):
+ self.outputs = []
+ self.distribution = dist
+
+ def __getattr__(self, name):
+ return None
+
+ def ensure_finalized(self):
+ pass
+
+ def get_outputs(self):
+ return (self.outputs +
+ self.get_finalized_command('install_distinfo').get_outputs())
+
+
+class InstallDistinfoTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ checkLists = lambda self, x, y: self.assertListEqual(sorted(x), sorted(y))
+
+ def test_empty_install(self):
+ pkg_dir, dist = self.create_dist(name='foo',
+ version='1.0')
+ install_dir = self.mkdtemp()
+
+ install = DummyInstallCmd(dist)
+ dist.command_obj['install_dist'] = install
+
+ cmd = install_distinfo(dist)
+ dist.command_obj['install_distinfo'] = cmd
+
+ cmd.install_dir = install_dir
+ cmd.ensure_finalized()
+ cmd.run()
+
+ self.checkLists(os.listdir(install_dir), ['foo-1.0.dist-info'])
+
+ dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+ self.checkLists(os.listdir(dist_info),
+ ['METADATA', 'RECORD', 'REQUESTED', 'INSTALLER'])
+ with open(os.path.join(dist_info, 'INSTALLER')) as fp:
+ self.assertEqual(fp.read(), 'distutils')
+ with open(os.path.join(dist_info, 'REQUESTED')) as fp:
+ self.assertEqual(fp.read(), '')
+ meta_path = os.path.join(dist_info, 'METADATA')
+ self.assertTrue(Metadata(path=meta_path).check())
+
+ def test_installer(self):
+ pkg_dir, dist = self.create_dist(name='foo',
+ version='1.0')
+ install_dir = self.mkdtemp()
+
+ install = DummyInstallCmd(dist)
+ dist.command_obj['install_dist'] = install
+
+ cmd = install_distinfo(dist)
+ dist.command_obj['install_distinfo'] = cmd
+
+ cmd.install_dir = install_dir
+ cmd.installer = 'bacon-python'
+ cmd.ensure_finalized()
+ cmd.run()
+
+ dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+ with open(os.path.join(dist_info, 'INSTALLER')) as fp:
+ self.assertEqual(fp.read(), 'bacon-python')
+
+ def test_requested(self):
+ pkg_dir, dist = self.create_dist(name='foo',
+ version='1.0')
+ install_dir = self.mkdtemp()
+
+ install = DummyInstallCmd(dist)
+ dist.command_obj['install_dist'] = install
+
+ cmd = install_distinfo(dist)
+ dist.command_obj['install_distinfo'] = cmd
+
+ cmd.install_dir = install_dir
+ cmd.requested = False
+ cmd.ensure_finalized()
+ cmd.run()
+
+ dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+ self.checkLists(os.listdir(dist_info),
+ ['METADATA', 'RECORD', 'INSTALLER'])
+
+ def test_no_record(self):
+ pkg_dir, dist = self.create_dist(name='foo',
+ version='1.0')
+ install_dir = self.mkdtemp()
+
+ install = DummyInstallCmd(dist)
+ dist.command_obj['install_dist'] = install
+
+ cmd = install_distinfo(dist)
+ dist.command_obj['install_distinfo'] = cmd
+
+ cmd.install_dir = install_dir
+ cmd.no_record = True
+ cmd.ensure_finalized()
+ cmd.run()
+
+ dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+ self.checkLists(os.listdir(dist_info),
+ ['METADATA', 'REQUESTED', 'INSTALLER'])
+
+ def test_record_basic(self):
+ install_dir = self.mkdtemp()
+ modules_dest = os.path.join(install_dir, 'lib')
+ scripts_dest = os.path.join(install_dir, 'bin')
+ project_dir, dist = self.create_dist(
+ name='Spamlib', version='0.1',
+ py_modules=['spam'], scripts=['spamd'],
+ ext_modules=[Extension('_speedspam', ['_speedspam.c'])])
+
+ # using a real install_dist command is too painful, so we use a mock
+ # class that's only a holder for options to be used by install_distinfo
+ # and we create placeholder files manually instead of using build_*.
+ # the install_* commands will still be consulted by install_distinfo.
+ os.chdir(project_dir)
+ self.write_file('spam', '# Python module')
+ self.write_file('spamd', '# Python script')
+ extmod = '_speedspam' + sysconfig.get_config_var('SO')
+ self.write_file(extmod, '')
+
+ install = DummyInstallCmd(dist)
+ install.outputs = ['spam', 'spamd', extmod]
+ install.install_lib = modules_dest
+ install.install_scripts = scripts_dest
+ dist.command_obj['install_dist'] = install
+
+ cmd = install_distinfo(dist)
+ cmd.ensure_finalized()
+ dist.command_obj['install_distinfo'] = cmd
+ cmd.run()
+
+ # checksum and size are not hard-coded for METADATA as it is
+ # platform-dependent (line endings)
+ metadata = os.path.join(modules_dest, 'Spamlib-0.1.dist-info',
+ 'METADATA')
+ with open(metadata, 'rb') as fp:
+ content = fp.read()
+
+ metadata_size = str(len(content))
+ metadata_md5 = hashlib.md5(content).hexdigest()
+
+ record = os.path.join(modules_dest, 'Spamlib-0.1.dist-info', 'RECORD')
+ with open(record, encoding='utf-8') as fp:
+ content = fp.read()
+
+ found = []
+ for line in content.splitlines():
+ filename, checksum, size = line.split(',')
+ filename = os.path.basename(filename)
+ found.append((filename, checksum, size))
+
+ expected = [
+ ('spam', '6ab2f288ef2545868effe68757448b45', '15'),
+ ('spamd', 'd13e6156ce78919a981e424b2fdcd974', '15'),
+ (extmod, 'd41d8cd98f00b204e9800998ecf8427e', '0'),
+ ('METADATA', metadata_md5, metadata_size),
+ ('INSTALLER', '44e3fde05f3f537ed85831969acf396d', '9'),
+ ('REQUESTED', 'd41d8cd98f00b204e9800998ecf8427e', '0'),
+ ('RECORD', '', ''),
+ ]
+ self.assertEqual(found, expected)
+
+ def test_record(self):
+ pkg_dir, dist = self.create_dist(name='foo',
+ version='1.0')
+ install_dir = self.mkdtemp()
+
+ install = DummyInstallCmd(dist)
+ dist.command_obj['install_dist'] = install
+
+ fake_dists = os.path.join(os.path.dirname(__file__), 'fake_dists')
+ fake_dists = os.path.realpath(fake_dists)
+
+ # for testing, we simply add all files from _backport's fake_dists
+ dirs = []
+ for dir in os.listdir(fake_dists):
+ full_path = os.path.join(fake_dists, dir)
+ if (not dir.endswith('.egg') or dir.endswith('.egg-info') or
+ dir.endswith('.dist-info')) and os.path.isdir(full_path):
+ dirs.append(full_path)
+
+ for dir in dirs:
+ for path, subdirs, files in os.walk(dir):
+ install.outputs += [os.path.join(path, f) for f in files]
+ install.outputs += [os.path.join('path', f + 'c')
+ for f in files if f.endswith('.py')]
+
+ cmd = install_distinfo(dist)
+ dist.command_obj['install_distinfo'] = cmd
+
+ cmd.install_dir = install_dir
+ cmd.ensure_finalized()
+ cmd.run()
+
+ dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+
+ expected = []
+ for f in install.get_outputs():
+ if (f.endswith(('.pyc', '.pyo')) or f == os.path.join(
+ install_dir, 'foo-1.0.dist-info', 'RECORD')):
+ expected.append([f, '', ''])
+ else:
+ size = os.path.getsize(f)
+ md5 = hashlib.md5()
+ with open(f, 'rb') as fp:
+ md5.update(fp.read())
+ hash = md5.hexdigest()
+ expected.append([f, hash, str(size)])
+
+ parsed = []
+ with open(os.path.join(dist_info, 'RECORD'), 'r') as f:
+ reader = csv.reader(f, delimiter=',',
+ lineterminator=os.linesep,
+ quotechar='"')
+ parsed = list(reader)
+
+ self.maxDiff = None
+ self.checkLists(parsed, expected)
+
+
+def test_suite():
+ return unittest.makeSuite(InstallDistinfoTestCase)
+
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_headers.py b/Lib/packaging/tests/test_command_install_headers.py
new file mode 100644
index 0000000..f2906a7
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_headers.py
@@ -0,0 +1,38 @@
+"""Tests for packaging.command.install_headers."""
+import os
+
+from packaging.command.install_headers import install_headers
+from packaging.tests import unittest, support
+
+
+class InstallHeadersTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_simple_run(self):
+ # we have two headers
+ header_list = self.mkdtemp()
+ header1 = os.path.join(header_list, 'header1')
+ header2 = os.path.join(header_list, 'header2')
+ self.write_file(header1)
+ self.write_file(header2)
+ headers = [header1, header2]
+
+ pkg_dir, dist = self.create_dist(headers=headers)
+ cmd = install_headers(dist)
+ self.assertEqual(cmd.get_inputs(), headers)
+
+ # let's run the command
+ cmd.install_dir = os.path.join(pkg_dir, 'inst')
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the results
+ self.assertEqual(len(cmd.get_outputs()), 2)
+
+
+def test_suite():
+ return unittest.makeSuite(InstallHeadersTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_lib.py b/Lib/packaging/tests/test_command_install_lib.py
new file mode 100644
index 0000000..79e8fa8
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_lib.py
@@ -0,0 +1,110 @@
+"""Tests for packaging.command.install_data."""
+import os
+import sys
+import imp
+
+from packaging.tests import unittest, support
+from packaging.command.install_lib import install_lib
+from packaging.compiler.extension import Extension
+from packaging.errors import PackagingOptionError
+
+
+class InstallLibTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ support.EnvironRestorer,
+ unittest.TestCase):
+
+ restore_environ = ['PYTHONPATH']
+
+ def test_finalize_options(self):
+ dist = self.create_dist()[1]
+ cmd = install_lib(dist)
+
+ cmd.finalize_options()
+ self.assertTrue(cmd.compile)
+ self.assertEqual(cmd.optimize, 0)
+
+ # optimize must be 0, 1, or 2
+ cmd.optimize = 'foo'
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+ cmd.optimize = '4'
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+ cmd.optimize = '2'
+ cmd.finalize_options()
+ self.assertEqual(cmd.optimize, 2)
+
+ def test_byte_compile(self):
+ project_dir, dist = self.create_dist()
+ os.chdir(project_dir)
+ cmd = install_lib(dist)
+ cmd.compile = True
+ cmd.optimize = 1
+
+ f = os.path.join(project_dir, 'foo.py')
+ self.write_file(f, '# python file')
+ cmd.byte_compile([f])
+ pyc_file = imp.cache_from_source('foo.py', True)
+ pyo_file = imp.cache_from_source('foo.py', False)
+ self.assertTrue(os.path.exists(pyc_file))
+ self.assertTrue(os.path.exists(pyo_file))
+
+ def test_byte_compile_under_B(self):
+ # make sure byte compilation works under -B (dont_write_bytecode)
+ self.addCleanup(setattr, sys, 'dont_write_bytecode',
+ sys.dont_write_bytecode)
+ sys.dont_write_bytecode = True
+ self.test_byte_compile()
+
+ def test_get_outputs(self):
+ project_dir, dist = self.create_dist()
+ os.chdir(project_dir)
+ os.mkdir('spam')
+ cmd = install_lib(dist)
+
+ # setting up a dist environment
+ cmd.compile = True
+ cmd.optimize = 1
+ cmd.install_dir = self.mkdtemp()
+ f = os.path.join(project_dir, 'spam', '__init__.py')
+ self.write_file(f, '# python package')
+ cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+ cmd.distribution.packages = ['spam']
+
+ # make sure the build_lib is set the temp dir # XXX what? this is not
+ # needed in the same distutils test and should work without manual
+ # intervention
+ build_dir = os.path.split(project_dir)[0]
+ cmd.get_finalized_command('build_py').build_lib = build_dir
+
+ # get_outputs should return 4 elements: spam/__init__.py, .pyc and
+ # .pyo, foo.import-tag-abiflags.so / foo.pyd
+ outputs = cmd.get_outputs()
+ self.assertEqual(len(outputs), 4, outputs)
+
+ def test_get_inputs(self):
+ project_dir, dist = self.create_dist()
+ os.chdir(project_dir)
+ os.mkdir('spam')
+ cmd = install_lib(dist)
+
+ # setting up a dist environment
+ cmd.compile = True
+ cmd.optimize = 1
+ cmd.install_dir = self.mkdtemp()
+ f = os.path.join(project_dir, 'spam', '__init__.py')
+ self.write_file(f, '# python package')
+ cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+ cmd.distribution.packages = ['spam']
+
+ # get_inputs should return 2 elements: spam/__init__.py and
+ # foo.import-tag-abiflags.so / foo.pyd
+ inputs = cmd.get_inputs()
+ self.assertEqual(len(inputs), 2, inputs)
+
+
+def test_suite():
+ return unittest.makeSuite(InstallLibTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_scripts.py b/Lib/packaging/tests/test_command_install_scripts.py
new file mode 100644
index 0000000..6452a34
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_scripts.py
@@ -0,0 +1,75 @@
+"""Tests for packaging.command.install_scripts."""
+import os
+
+from packaging.tests import unittest, support
+from packaging.command.install_scripts import install_scripts
+from packaging.dist import Distribution
+
+
+class InstallScriptsTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def test_default_settings(self):
+ dist = Distribution()
+ dist.command_obj["build"] = support.DummyCommand(
+ build_scripts="/foo/bar")
+ dist.command_obj["install_dist"] = support.DummyCommand(
+ install_scripts="/splat/funk",
+ force=True,
+ skip_build=True,
+ )
+ cmd = install_scripts(dist)
+ self.assertFalse(cmd.force)
+ self.assertFalse(cmd.skip_build)
+ self.assertIs(cmd.build_dir, None)
+ self.assertIs(cmd.install_dir, None)
+
+ cmd.finalize_options()
+
+ self.assertTrue(cmd.force)
+ self.assertTrue(cmd.skip_build)
+ self.assertEqual(cmd.build_dir, "/foo/bar")
+ self.assertEqual(cmd.install_dir, "/splat/funk")
+
+ def test_installation(self):
+ source = self.mkdtemp()
+ expected = []
+
+ def write_script(name, text):
+ expected.append(name)
+ with open(os.path.join(source, name), "w") as f:
+ f.write(text)
+
+ write_script("script1.py", ("#! /usr/bin/env python2.3\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ write_script("script2.py", ("#!/usr/bin/python\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ write_script("shell.sh", ("#!/bin/sh\n"
+ "# bogus shell script w/ sh-bang\n"
+ "exit 0\n"))
+
+ target = self.mkdtemp()
+ dist = Distribution()
+ dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
+ dist.command_obj["install_dist"] = support.DummyCommand(
+ install_scripts=target,
+ force=True,
+ skip_build=True,
+ )
+ cmd = install_scripts(dist)
+ cmd.finalize_options()
+ cmd.run()
+
+ installed = os.listdir(target)
+ for name in expected:
+ self.assertIn(name, installed)
+
+
+def test_suite():
+ return unittest.makeSuite(InstallScriptsTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_register.py b/Lib/packaging/tests/test_command_register.py
new file mode 100644
index 0000000..07fad89
--- /dev/null
+++ b/Lib/packaging/tests/test_command_register.py
@@ -0,0 +1,260 @@
+"""Tests for packaging.command.register."""
+import os
+import getpass
+import urllib.request
+import urllib.error
+import urllib.parse
+
+try:
+ import docutils
+ DOCUTILS_SUPPORT = True
+except ImportError:
+ DOCUTILS_SUPPORT = False
+
+from packaging.tests import unittest, support
+from packaging.tests.support import Inputs
+from packaging.command import register as register_module
+from packaging.command.register import register
+from packaging.errors import PackagingSetupError
+
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+ server1
+
+[server1]
+username:me
+"""
+
+WANTED_PYPIRC = """\
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username:tarek
+password:password
+"""
+
+
+class FakeOpener:
+ """Fakes a PyPI server"""
+ def __init__(self):
+ self.reqs = []
+
+ def __call__(self, *args):
+ return self
+
+ def open(self, req):
+ self.reqs.append(req)
+ return self
+
+ def read(self):
+ return 'xxx'
+
+
+class RegisterTestCase(support.TempdirManager,
+ support.EnvironRestorer,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ restore_environ = ['HOME']
+
+ def setUp(self):
+ super(RegisterTestCase, self).setUp()
+ self.tmp_dir = self.mkdtemp()
+ self.rc = os.path.join(self.tmp_dir, '.pypirc')
+ os.environ['HOME'] = self.tmp_dir
+
+ # patching the password prompt
+ self._old_getpass = getpass.getpass
+
+ def _getpass(prompt):
+ return 'password'
+
+ getpass.getpass = _getpass
+ self.old_opener = urllib.request.build_opener
+ self.conn = urllib.request.build_opener = FakeOpener()
+
+ def tearDown(self):
+ getpass.getpass = self._old_getpass
+ urllib.request.build_opener = self.old_opener
+ if hasattr(register_module, 'input'):
+ del register_module.input
+ super(RegisterTestCase, self).tearDown()
+
+ def _get_cmd(self, metadata=None):
+ if metadata is None:
+ metadata = {'home_page': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': 'xxx'}
+ pkg_info, dist = self.create_dist(**metadata)
+ return register(dist)
+
+ def test_create_pypirc(self):
+ # this test makes sure a .pypirc file
+ # is created when requested.
+
+ # let's create a register instance
+ cmd = self._get_cmd()
+
+ # we shouldn't have a .pypirc file yet
+ self.assertFalse(os.path.exists(self.rc))
+
+ # patching input and getpass.getpass
+ # so register gets happy
+ # Here's what we are faking :
+ # use your existing login (choice 1.)
+ # Username : 'tarek'
+ # Password : 'password'
+ # Save your login (y/N)? : 'y'
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # we should have a brand new .pypirc file
+ self.assertTrue(os.path.exists(self.rc))
+
+ # with the content similar to WANTED_PYPIRC
+ with open(self.rc) as fp:
+ content = fp.read()
+ self.assertEqual(content, WANTED_PYPIRC)
+
+ # now let's make sure the .pypirc file generated
+ # really works : we shouldn't be asked anything
+ # if we run the command again
+ def _no_way(prompt=''):
+ raise AssertionError(prompt)
+
+ register_module.input = _no_way
+ cmd.show_response = True
+ cmd.finalized = False
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's see what the server received : we should
+ # have 2 similar requests
+ self.assertEqual(len(self.conn.reqs), 2)
+ req1 = dict(self.conn.reqs[0].headers)
+ req2 = dict(self.conn.reqs[1].headers)
+ self.assertEqual(req2['Content-length'], req1['Content-length'])
+ self.assertIn(b'xxx', self.conn.reqs[1].data)
+
+ def test_password_not_in_file(self):
+
+ self.write_file(self.rc, PYPIRC_NOPASSWORD)
+ cmd = self._get_cmd()
+ cmd.finalize_options()
+ cmd._set_config()
+ cmd.send_metadata()
+
+ # dist.password should be set
+ # therefore used afterwards by other commands
+ self.assertEqual(cmd.distribution.password, 'password')
+
+ def test_registration(self):
+ # this test runs choice 2
+ cmd = self._get_cmd()
+ inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
+ register_module.input = inputs
+ # let's run the command
+ # FIXME does this send a real request? use a mock server
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # we should have send a request
+ self.assertEqual(len(self.conn.reqs), 1)
+ req = self.conn.reqs[0]
+ headers = dict(req.headers)
+ self.assertEqual(headers['Content-length'], '628')
+ self.assertIn(b'tarek', req.data)
+
+ def test_password_reset(self):
+ # this test runs choice 3
+ cmd = self._get_cmd()
+ inputs = Inputs('3', 'tarek@ziade.org')
+ register_module.input = inputs
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # we should have send a request
+ self.assertEqual(len(self.conn.reqs), 1)
+ req = self.conn.reqs[0]
+ headers = dict(req.headers)
+ self.assertEqual(headers['Content-length'], '298')
+ self.assertIn(b'tarek', req.data)
+
+ @unittest.skipUnless(DOCUTILS_SUPPORT, 'needs docutils')
+ def test_strict(self):
+ # testing the strict option: when on, the register command stops if the
+ # metadata is incomplete or if description contains bad reST
+
+ # empty metadata # XXX this is not really empty..
+ cmd = self._get_cmd({'name': 'xxx', 'version': 'xxx'})
+ cmd.ensure_finalized()
+ cmd.strict = True
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs
+ self.assertRaises(PackagingSetupError, cmd.run)
+
+ # metadata is OK but description is broken
+ metadata = {'home_page': 'xxx', 'author': 'xxx',
+ 'author_email': 'éxéxé',
+ 'name': 'xxx', 'version': '4.2',
+ 'description': 'title\n==\n\ntext'}
+
+ cmd = self._get_cmd(metadata)
+ cmd.ensure_finalized()
+ cmd.strict = True
+ self.assertRaises(PackagingSetupError, cmd.run)
+
+ # now something that works
+ metadata['description'] = 'title\n=====\n\ntext'
+ cmd = self._get_cmd(metadata)
+ cmd.ensure_finalized()
+ cmd.strict = True
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # strict is not by default
+ cmd = self._get_cmd()
+ cmd.ensure_finalized()
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # and finally a Unicode test (bug #12114)
+ metadata = {'home_page': 'xxx', 'author': '\u00c9ric',
+ 'author_email': 'xxx', 'name': 'xxx',
+ 'version': 'xxx',
+ 'summary': 'Something about esszet \u00df',
+ 'description': 'More things about esszet \u00df'}
+
+ cmd = self._get_cmd(metadata)
+ cmd.ensure_finalized()
+ cmd.strict = True
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs
+ cmd.ensure_finalized()
+ cmd.run()
+
+ def test_register_pep345(self):
+ cmd = self._get_cmd({})
+ cmd.ensure_finalized()
+ cmd.distribution.metadata['Requires-Dist'] = ['lxml']
+ data = cmd.build_post_data('submit')
+ self.assertEqual(data['metadata_version'], '1.2')
+ self.assertEqual(data['requires_dist'], ['lxml'])
+
+
+def test_suite():
+ return unittest.makeSuite(RegisterTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_sdist.py b/Lib/packaging/tests/test_command_sdist.py
new file mode 100644
index 0000000..d974718
--- /dev/null
+++ b/Lib/packaging/tests/test_command_sdist.py
@@ -0,0 +1,394 @@
+"""Tests for packaging.command.sdist."""
+import os
+import tarfile
+import zipfile
+
+try:
+ import grp
+ import pwd
+ UID_GID_SUPPORT = True
+except ImportError:
+ UID_GID_SUPPORT = False
+
+from shutil import get_archive_formats
+from os.path import join
+from packaging.dist import Distribution
+from packaging.util import find_executable
+from packaging.errors import PackagingOptionError
+from packaging.command.sdist import sdist, show_formats
+
+from test.support import captured_stdout
+from packaging.tests import support, unittest
+from packaging.tests.support import requires_zlib
+
+
+MANIFEST = """\
+# file GENERATED by packaging, do NOT edit
+inroot.txt
+setup.cfg
+data%(sep)sdata.dt
+scripts%(sep)sscript.py
+some%(sep)sfile.txt
+some%(sep)sother_file.txt
+somecode%(sep)s__init__.py
+somecode%(sep)sdoc.dat
+somecode%(sep)sdoc.txt
+"""
+
+
+def builder(dist, filelist):
+ filelist.append('bah')
+
+
+class SDistTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ support.EnvironRestorer,
+ unittest.TestCase):
+
+ restore_environ = ['HOME']
+
+ def setUp(self):
+ super(SDistTestCase, self).setUp()
+ self.tmp_dir = self.mkdtemp()
+ os.environ['HOME'] = self.tmp_dir
+ # setting up an environment
+ self.old_path = os.getcwd()
+ os.mkdir(join(self.tmp_dir, 'somecode'))
+ os.mkdir(join(self.tmp_dir, 'dist'))
+ # a package, and a README
+ self.write_file((self.tmp_dir, 'README'), 'xxx')
+ self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#')
+ os.chdir(self.tmp_dir)
+
+ def tearDown(self):
+ # back to normal
+ os.chdir(self.old_path)
+ super(SDistTestCase, self).tearDown()
+
+ def get_cmd(self, metadata=None):
+ """Returns a cmd"""
+ if metadata is None:
+ metadata = {'name': 'fake', 'version': '1.0',
+ 'home_page': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx'}
+ dist = Distribution(metadata)
+ dist.packages = ['somecode']
+ cmd = sdist(dist)
+ cmd.dist_dir = 'dist'
+ return dist, cmd
+
+ @requires_zlib
+ def test_prune_file_list(self):
+ # this test creates a package with some vcs dirs in it
+ # and launch sdist to make sure they get pruned
+ # on all systems
+
+ # creating VCS directories with some files in them
+ os.mkdir(join(self.tmp_dir, 'somecode', '.svn'))
+ self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx')
+
+ os.mkdir(join(self.tmp_dir, 'somecode', '.hg'))
+ self.write_file((self.tmp_dir, 'somecode', '.hg',
+ 'ok'), 'xxx')
+
+ os.mkdir(join(self.tmp_dir, 'somecode', '.git'))
+ self.write_file((self.tmp_dir, 'somecode', '.git',
+ 'ok'), 'xxx')
+
+ # now building a sdist
+ dist, cmd = self.get_cmd()
+
+ # zip is available universally
+ # (tar might not be installed under win32)
+ cmd.formats = ['zip']
+
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # now let's check what we have
+ dist_folder = join(self.tmp_dir, 'dist')
+ files = os.listdir(dist_folder)
+ self.assertEqual(files, ['fake-1.0.zip'])
+
+ with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file:
+ content = zip_file.namelist()
+
+ # making sure everything has been pruned correctly
+ self.assertEqual(len(content), 2)
+
+ @requires_zlib
+ @unittest.skipIf(find_executable('tar') is None or
+ find_executable('gzip') is None,
+ 'requires tar and gzip programs')
+ def test_make_distribution(self):
+ # building a sdist
+ dist, cmd = self.get_cmd()
+
+ # creating a gztar then a tar
+ cmd.formats = ['gztar', 'tar']
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # making sure we have two files
+ dist_folder = join(self.tmp_dir, 'dist')
+ result = sorted(os.listdir(dist_folder))
+ self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+ os.remove(join(dist_folder, 'fake-1.0.tar'))
+ os.remove(join(dist_folder, 'fake-1.0.tar.gz'))
+
+ # now trying a tar then a gztar
+ cmd.formats = ['tar', 'gztar']
+ cmd.finalized = False
+ cmd.ensure_finalized()
+ cmd.run()
+
+ result = sorted(os.listdir(dist_folder))
+ self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+ @requires_zlib
+ def test_add_defaults(self):
+
+ # http://bugs.python.org/issue2279
+
+ # add_default should also include
+ # data_files and package_data
+ dist, cmd = self.get_cmd()
+
+ # filling data_files by pointing files
+ # in package_data
+ dist.package_data = {'': ['*.cfg', '*.dat'],
+ 'somecode': ['*.txt']}
+ self.write_file((self.tmp_dir, 'setup.cfg'), '#')
+ self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+ self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#')
+
+ # adding some data in data_files
+ data_dir = join(self.tmp_dir, 'data')
+ os.mkdir(data_dir)
+ self.write_file((data_dir, 'data.dt'), '#')
+ some_dir = join(self.tmp_dir, 'some')
+ os.mkdir(some_dir)
+ self.write_file((self.tmp_dir, 'inroot.txt'), '#')
+ self.write_file((some_dir, 'file.txt'), '#')
+ self.write_file((some_dir, 'other_file.txt'), '#')
+
+ dist.data_files = {'data/data.dt': '{appdata}/data.dt',
+ 'inroot.txt': '{appdata}/inroot.txt',
+ 'some/file.txt': '{appdata}/file.txt',
+ 'some/other_file.txt': '{appdata}/other_file.txt'}
+
+ # adding a script
+ script_dir = join(self.tmp_dir, 'scripts')
+ os.mkdir(script_dir)
+ self.write_file((script_dir, 'script.py'), '#')
+ dist.scripts = [join('scripts', 'script.py')]
+
+ cmd.formats = ['zip']
+ cmd.use_defaults = True
+
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # now let's check what we have
+ dist_folder = join(self.tmp_dir, 'dist')
+ files = os.listdir(dist_folder)
+ self.assertEqual(files, ['fake-1.0.zip'])
+
+ with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file:
+ content = zip_file.namelist()
+
+ # Making sure everything was added. This includes 8 code and data
+ # files in addition to PKG-INFO and setup.cfg
+ self.assertEqual(len(content), 10)
+
+ # Checking the MANIFEST
+ with open(join(self.tmp_dir, 'MANIFEST')) as fp:
+ manifest = fp.read()
+ self.assertEqual(manifest, MANIFEST % {'sep': os.sep})
+
+ @requires_zlib
+ def test_metadata_check_option(self):
+ # testing the `check-metadata` option
+ dist, cmd = self.get_cmd(metadata={'name': 'xxx', 'version': 'xxx'})
+
+ # this should cause the check subcommand to log two warnings:
+ # version is invalid, home-page and author are missing
+ cmd.ensure_finalized()
+ cmd.run()
+ warnings = self.get_logs()
+ check_warnings = [msg for msg in warnings if
+ not msg.startswith('sdist:')]
+ self.assertEqual(len(check_warnings), 2, warnings)
+
+ # trying with a complete set of metadata
+ self.loghandler.flush()
+ dist, cmd = self.get_cmd()
+ cmd.ensure_finalized()
+ cmd.metadata_check = False
+ cmd.run()
+ warnings = self.get_logs()
+ self.assertEqual(len(warnings), 2)
+ self.assertIn('using default file list', warnings[0])
+ self.assertIn("'setup.cfg' file not found", warnings[1])
+
+ def test_show_formats(self):
+ with captured_stdout() as stdout:
+ show_formats()
+ stdout = stdout.getvalue()
+
+ # the output should be a header line + one line per format
+ num_formats = len(get_archive_formats())
+ output = [line for line in stdout.split('\n')
+ if line.strip().startswith('--formats=')]
+ self.assertEqual(len(output), num_formats)
+
+ def test_finalize_options(self):
+ dist, cmd = self.get_cmd()
+ cmd.finalize_options()
+
+ # default options set by finalize
+ self.assertEqual(cmd.manifest, 'MANIFEST')
+ self.assertEqual(cmd.dist_dir, 'dist')
+
+ # formats has to be a string splitable on (' ', ',') or
+ # a stringlist
+ cmd.formats = 1
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+ cmd.formats = ['zip']
+ cmd.finalize_options()
+
+ # formats has to be known
+ cmd.formats = 'supazipa'
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+ @requires_zlib
+ def test_template(self):
+ dist, cmd = self.get_cmd()
+ dist.extra_files = ['include yeah']
+ cmd.ensure_finalized()
+ self.write_file((self.tmp_dir, 'yeah'), 'xxx')
+ cmd.run()
+ with open(cmd.manifest) as f:
+ content = f.read()
+
+ self.assertIn('yeah', content)
+
+ @requires_zlib
+ @unittest.skipUnless(UID_GID_SUPPORT, "requires grp and pwd support")
+ @unittest.skipIf(find_executable('tar') is None or
+ find_executable('gzip') is None,
+ 'requires tar and gzip programs')
+ def test_make_distribution_owner_group(self):
+ # building a sdist
+ dist, cmd = self.get_cmd()
+
+ # creating a gztar and specifying the owner+group
+ cmd.formats = ['gztar']
+ cmd.owner = pwd.getpwuid(0)[0]
+ cmd.group = grp.getgrgid(0)[0]
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # making sure we have the good rights
+ archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+ with tarfile.open(archive_name) as archive:
+ for member in archive.getmembers():
+ self.assertEqual(member.uid, 0)
+ self.assertEqual(member.gid, 0)
+
+ # building a sdist again
+ dist, cmd = self.get_cmd()
+
+ # creating a gztar
+ cmd.formats = ['gztar']
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # making sure we have the good rights
+ archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+ with tarfile.open(archive_name) as archive:
+
+ # note that we are not testing the group ownership here
+ # because, depending on the platforms and the container
+ # rights (see #7408)
+ for member in archive.getmembers():
+ self.assertEqual(member.uid, os.getuid())
+
+ @requires_zlib
+ def test_get_file_list(self):
+ # make sure MANIFEST is recalculated
+ dist, cmd = self.get_cmd()
+ # filling data_files by pointing files in package_data
+ dist.package_data = {'somecode': ['*.txt']}
+ self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # Should produce four lines. Those lines are one comment, one default
+ # (README) and two package files.
+ with open(cmd.manifest) as f:
+ manifest = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+ self.assertEqual(len(manifest), 3)
+
+ # Adding a file
+ self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
+
+ # make sure build_py is reinitialized, like a fresh run
+ build_py = dist.get_command_obj('build_py')
+ build_py.finalized = False
+ build_py.ensure_finalized()
+
+ cmd.run()
+
+ with open(cmd.manifest) as f:
+ manifest2 = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+
+ # Do we have the new file in MANIFEST?
+ self.assertEqual(len(manifest2), 4)
+ self.assertIn('doc2.txt', manifest2[-1])
+
+ @requires_zlib
+ def test_manifest_marker(self):
+ # check that autogenerated MANIFESTs have a marker
+ dist, cmd = self.get_cmd()
+ cmd.ensure_finalized()
+ cmd.run()
+
+ with open(cmd.manifest) as f:
+ manifest = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+
+ self.assertEqual(manifest[0],
+ '# file GENERATED by packaging, do NOT edit')
+
+ @requires_zlib
+ def test_manual_manifest(self):
+ # check that a MANIFEST without a marker is left alone
+ dist, cmd = self.get_cmd()
+ cmd.ensure_finalized()
+ self.write_file((self.tmp_dir, cmd.manifest), 'README.manual')
+ cmd.run()
+
+ with open(cmd.manifest) as f:
+ manifest = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+
+ self.assertEqual(manifest, ['README.manual'])
+
+ @requires_zlib
+ def test_manifest_builder(self):
+ dist, cmd = self.get_cmd()
+ cmd.manifest_builders = 'packaging.tests.test_command_sdist.builder'
+ cmd.ensure_finalized()
+ cmd.run()
+ self.assertIn('bah', cmd.filelist.files)
+
+
+def test_suite():
+ return unittest.makeSuite(SDistTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_test.py b/Lib/packaging/tests/test_command_test.py
new file mode 100644
index 0000000..7aa1f79
--- /dev/null
+++ b/Lib/packaging/tests/test_command_test.py
@@ -0,0 +1,224 @@
+import os
+import re
+import sys
+import shutil
+import unittest as ut1
+import packaging.database
+
+from os.path import join
+from operator import getitem, setitem, delitem
+from packaging.command.build import build
+from packaging.tests import unittest
+from packaging.tests.support import (TempdirManager, EnvironRestorer,
+ LoggingCatcher)
+from packaging.command.test import test
+from packaging.command import set_command
+from packaging.dist import Distribution
+
+
+EXPECTED_OUTPUT_RE = r'''FAIL: test_blah \(myowntestmodule.SomeTest\)
+----------------------------------------------------------------------
+Traceback \(most recent call last\):
+ File ".+/myowntestmodule.py", line \d+, in test_blah
+ self.fail\("horribly"\)
+AssertionError: horribly
+'''
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class MockBuildCmd(build):
+ build_lib = "mock build lib"
+ command_name = 'build'
+ plat_name = 'whatever'
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ self._record.append("build has run")
+
+
+class TestTest(TempdirManager,
+ EnvironRestorer,
+ LoggingCatcher,
+ unittest.TestCase):
+
+ restore_environ = ['PYTHONPATH']
+
+ def setUp(self):
+ super(TestTest, self).setUp()
+ self.addCleanup(packaging.database.clear_cache)
+ new_pythonpath = os.path.dirname(os.path.dirname(here))
+ pythonpath = os.environ.get('PYTHONPATH')
+ if pythonpath is not None:
+ new_pythonpath = os.pathsep.join((new_pythonpath, pythonpath))
+ os.environ['PYTHONPATH'] = new_pythonpath
+
+ def assert_re_match(self, pattern, string):
+ def quote(s):
+ lines = ['## ' + line for line in s.split('\n')]
+ sep = ["#" * 60]
+ return [''] + sep + lines + sep
+ msg = quote(pattern) + ["didn't match"] + quote(string)
+ msg = "\n".join(msg)
+ if not re.search(pattern, string):
+ self.fail(msg)
+
+ def prepare_dist(self, dist_name):
+ pkg_dir = join(os.path.dirname(__file__), "dists", dist_name)
+ temp_pkg_dir = join(self.mkdtemp(), dist_name)
+ shutil.copytree(pkg_dir, temp_pkg_dir)
+ return temp_pkg_dir
+
+ def safely_replace(self, obj, attr,
+ new_val=None, delete=False, dictionary=False):
+ """Replace a object's attribute returning to its original state at the
+ end of the test run. Creates the attribute if not present before
+ (deleting afterwards). When delete=True, makes sure the value is del'd
+ for the test run. If dictionary is set to True, operates of its items
+ rather than attributes."""
+ if dictionary:
+ _setattr, _getattr, _delattr = setitem, getitem, delitem
+
+ def _hasattr(_dict, value):
+ return value in _dict
+ else:
+ _setattr, _getattr, _delattr, _hasattr = (setattr, getattr,
+ delattr, hasattr)
+
+ orig_has_attr = _hasattr(obj, attr)
+ if orig_has_attr:
+ orig_val = _getattr(obj, attr)
+
+ if delete is False:
+ _setattr(obj, attr, new_val)
+ elif orig_has_attr:
+ _delattr(obj, attr)
+
+ def do_cleanup():
+ if orig_has_attr:
+ _setattr(obj, attr, orig_val)
+ elif _hasattr(obj, attr):
+ _delattr(obj, attr)
+
+ self.addCleanup(do_cleanup)
+
+ def test_runs_unittest(self):
+ module_name, a_module = self.prepare_a_module()
+ record = []
+ a_module.recorder = lambda *args: record.append("suite")
+
+ class MockTextTestRunner:
+ def __init__(*_, **__):
+ pass
+
+ def run(_self, suite):
+ record.append("run")
+
+ self.safely_replace(ut1, "TextTestRunner", MockTextTestRunner)
+
+ dist = Distribution()
+ cmd = test(dist)
+ cmd.suite = "%s.recorder" % module_name
+ cmd.run()
+ self.assertEqual(record, ["suite", "run"])
+
+ def test_builds_before_running_tests(self):
+ self.addCleanup(set_command, 'packaging.command.build.build')
+ set_command('packaging.tests.test_command_test.MockBuildCmd')
+
+ dist = Distribution()
+ dist.get_command_obj('build')._record = record = []
+ cmd = test(dist)
+ cmd.runner = self.prepare_named_function(lambda: None)
+ cmd.ensure_finalized()
+ cmd.run()
+ self.assertEqual(['build has run'], record)
+
+ @unittest.skip('needs to be written')
+ def test_works_with_2to3(self):
+ pass
+
+ def test_checks_requires(self):
+ dist = Distribution()
+ cmd = test(dist)
+ phony_project = 'ohno_ohno-impossible_1234-name_stop-that!'
+ cmd.tests_require = [phony_project]
+ cmd.ensure_finalized()
+ logs = self.get_logs()
+ self.assertIn(phony_project, logs[-1])
+
+ def prepare_a_module(self):
+ tmp_dir = self.mkdtemp()
+ sys.path.append(tmp_dir)
+ self.addCleanup(sys.path.remove, tmp_dir)
+
+ self.write_file((tmp_dir, 'packaging_tests_a.py'), '')
+ import packaging_tests_a as a_module
+ return "packaging_tests_a", a_module
+
+ def prepare_named_function(self, func):
+ module_name, a_module = self.prepare_a_module()
+ a_module.recorder = func
+ return "%s.recorder" % module_name
+
+ def test_custom_runner(self):
+ dist = Distribution()
+ cmd = test(dist)
+ record = []
+ cmd.runner = self.prepare_named_function(
+ lambda: record.append("runner called"))
+ cmd.ensure_finalized()
+ cmd.run()
+ self.assertEqual(["runner called"], record)
+
+ def prepare_mock_ut2(self):
+ class MockUTClass:
+ def __init__(*_, **__):
+ pass
+
+ def discover(self):
+ pass
+
+ def run(self, _):
+ pass
+
+ class MockUTModule:
+ TestLoader = MockUTClass
+ TextTestRunner = MockUTClass
+
+ mock_ut2 = MockUTModule()
+ self.safely_replace(sys.modules, "unittest2",
+ mock_ut2, dictionary=True)
+ return mock_ut2
+
+ def test_gets_unittest_discovery(self):
+ mock_ut2 = self.prepare_mock_ut2()
+ dist = Distribution()
+ cmd = test(dist)
+ self.safely_replace(ut1.TestLoader, "discover", lambda: None)
+ self.assertEqual(cmd.get_ut_with_discovery(), ut1)
+
+ del ut1.TestLoader.discover
+ self.assertEqual(cmd.get_ut_with_discovery(), mock_ut2)
+
+ def test_calls_discover(self):
+ self.safely_replace(ut1.TestLoader, "discover", delete=True)
+ mock_ut2 = self.prepare_mock_ut2()
+ record = []
+ mock_ut2.TestLoader.discover = lambda self, path: record.append(path)
+ dist = Distribution()
+ cmd = test(dist)
+ cmd.run()
+ self.assertEqual([os.curdir], record)
+
+
+def test_suite():
+ return unittest.makeSuite(TestTest)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_upload.py b/Lib/packaging/tests/test_command_upload.py
new file mode 100644
index 0000000..1f68c1d
--- /dev/null
+++ b/Lib/packaging/tests/test_command_upload.py
@@ -0,0 +1,159 @@
+"""Tests for packaging.command.upload."""
+import os
+
+from packaging.command.upload import upload
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+
+from packaging.tests import unittest, support
+try:
+ import threading
+ from packaging.tests.pypi_server import PyPIServerTestCase
+except ImportError:
+ threading = None
+ PyPIServerTestCase = unittest.TestCase
+
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+ server1
+
+[server1]
+username:me
+"""
+
+PYPIRC = """\
+[distutils]
+
+index-servers =
+ server1
+ server2
+
+[server1]
+username:me
+password:secret
+
+[server2]
+username:meagain
+password: secret
+realm:acme
+repository:http://another.pypi/
+"""
+
+
+@unittest.skipIf(threading is None, 'needs threading')
+class UploadTestCase(support.TempdirManager, support.EnvironRestorer,
+ support.LoggingCatcher, PyPIServerTestCase):
+
+ restore_environ = ['HOME']
+
+ def setUp(self):
+ super(UploadTestCase, self).setUp()
+ self.tmp_dir = self.mkdtemp()
+ self.rc = os.path.join(self.tmp_dir, '.pypirc')
+ os.environ['HOME'] = self.tmp_dir
+
+ def test_finalize_options(self):
+ # new format
+ self.write_file(self.rc, PYPIRC)
+ dist = Distribution()
+ cmd = upload(dist)
+ cmd.finalize_options()
+ for attr, expected in (('username', 'me'), ('password', 'secret'),
+ ('realm', 'pypi'),
+ ('repository', 'http://pypi.python.org/pypi')):
+ self.assertEqual(getattr(cmd, attr), expected)
+
+ def test_finalize_options_unsigned_identity_raises_exception(self):
+ self.write_file(self.rc, PYPIRC)
+ dist = Distribution()
+ cmd = upload(dist)
+ cmd.identity = True
+ cmd.sign = False
+ self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+ def test_saved_password(self):
+ # file with no password
+ self.write_file(self.rc, PYPIRC_NOPASSWORD)
+
+ # make sure it passes
+ dist = Distribution()
+ cmd = upload(dist)
+ cmd.ensure_finalized()
+ self.assertEqual(cmd.password, None)
+
+ # make sure we get it as well, if another command
+ # initialized it at the dist level
+ dist.password = 'xxx'
+ cmd = upload(dist)
+ cmd.finalize_options()
+ self.assertEqual(cmd.password, 'xxx')
+
+ def test_upload_without_files_raises_exception(self):
+ dist = Distribution()
+ cmd = upload(dist)
+ self.assertRaises(PackagingOptionError, cmd.run)
+
+ def test_upload(self):
+ path = os.path.join(self.tmp_dir, 'xxx')
+ self.write_file(path)
+ command, pyversion, filename = 'xxx', '3.3', path
+ dist_files = [(command, pyversion, filename)]
+
+ # let's run it
+ dist = self.create_dist(dist_files=dist_files, author='dédé')[1]
+ cmd = upload(dist)
+ cmd.ensure_finalized()
+ cmd.repository = self.pypi.full_address
+ cmd.run()
+
+ # what did we send?
+ handler, request_data = self.pypi.requests[-1]
+ headers = handler.headers
+ self.assertIn('dédé'.encode('utf-8'), request_data)
+ self.assertIn(b'xxx', request_data)
+
+ self.assertEqual(int(headers['content-length']), len(request_data))
+ self.assertLess(int(headers['content-length']), 2500)
+ self.assertTrue(headers['content-type'].startswith(
+ 'multipart/form-data'))
+ self.assertEqual(handler.command, 'POST')
+ self.assertNotIn('\n', headers['authorization'])
+
+ def test_upload_docs(self):
+ path = os.path.join(self.tmp_dir, 'xxx')
+ self.write_file(path)
+ command, pyversion, filename = 'xxx', '3.3', path
+ dist_files = [(command, pyversion, filename)]
+ docs_path = os.path.join(self.tmp_dir, "build", "docs")
+ os.makedirs(docs_path)
+ self.write_file((docs_path, "index.html"), "yellow")
+ self.write_file(self.rc, PYPIRC)
+
+ # let's run it
+ dist = self.create_dist(dist_files=dist_files, author='dédé')[1]
+
+ cmd = upload(dist)
+ cmd.get_finalized_command("build").run()
+ cmd.upload_docs = True
+ cmd.ensure_finalized()
+ cmd.repository = self.pypi.full_address
+ os.chdir(self.tmp_dir)
+ cmd.run()
+
+ handler, request_data = self.pypi.requests[-1]
+ action, name, content = request_data.split(
+ "----------------GHSKFJDLGDS7543FJKLFHRE75642756743254"
+ .encode())[1:4]
+
+ self.assertIn(b'name=":action"', action)
+ self.assertIn(b'doc_upload', action)
+
+
+def test_suite():
+ return unittest.makeSuite(UploadTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_upload_docs.py b/Lib/packaging/tests/test_command_upload_docs.py
new file mode 100644
index 0000000..803e733
--- /dev/null
+++ b/Lib/packaging/tests/test_command_upload_docs.py
@@ -0,0 +1,186 @@
+"""Tests for packaging.command.upload_docs."""
+import os
+import shutil
+import logging
+import zipfile
+try:
+ import _ssl
+except ImportError:
+ _ssl = None
+
+from packaging.command import upload_docs as upload_docs_mod
+from packaging.command.upload_docs import upload_docs, zip_dir
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError, PackagingOptionError
+
+from packaging.tests import unittest, support
+try:
+ import threading
+ from packaging.tests.pypi_server import PyPIServerTestCase
+except ImportError:
+ threading = None
+ PyPIServerTestCase = unittest.TestCase
+
+
+PYPIRC = """\
+[distutils]
+index-servers = server1
+
+[server1]
+repository = %s
+username = real_slim_shady
+password = long_island
+"""
+
+
+@unittest.skipIf(threading is None, "Needs threading")
+class UploadDocsTestCase(support.TempdirManager,
+ support.EnvironRestorer,
+ support.LoggingCatcher,
+ PyPIServerTestCase):
+
+ restore_environ = ['HOME']
+
+ def setUp(self):
+ super(UploadDocsTestCase, self).setUp()
+ self.tmp_dir = self.mkdtemp()
+ self.rc = os.path.join(self.tmp_dir, '.pypirc')
+ os.environ['HOME'] = self.tmp_dir
+ self.dist = Distribution()
+ self.dist.metadata['Name'] = "distr-name"
+ self.cmd = upload_docs(self.dist)
+
+ def test_default_uploaddir(self):
+ sandbox = self.mkdtemp()
+ os.chdir(sandbox)
+ os.mkdir("build")
+ self.prepare_sample_dir("build")
+ self.cmd.ensure_finalized()
+ self.assertEqual(self.cmd.upload_dir, os.path.join("build", "docs"))
+
+ def test_default_uploaddir_looks_for_doc_also(self):
+ sandbox = self.mkdtemp()
+ os.chdir(sandbox)
+ os.mkdir("build")
+ self.prepare_sample_dir("build")
+ os.rename(os.path.join("build", "docs"), os.path.join("build", "doc"))
+ self.cmd.ensure_finalized()
+ self.assertEqual(self.cmd.upload_dir, os.path.join("build", "doc"))
+
+ def prepare_sample_dir(self, sample_dir=None):
+ if sample_dir is None:
+ sample_dir = self.mkdtemp()
+ os.mkdir(os.path.join(sample_dir, "docs"))
+ self.write_file((sample_dir, "docs", "index.html"), "Ce mortel ennui")
+ self.write_file((sample_dir, "index.html"), "Oh la la")
+ return sample_dir
+
+ def test_zip_dir(self):
+ source_dir = self.prepare_sample_dir()
+ compressed = zip_dir(source_dir)
+
+ zip_f = zipfile.ZipFile(compressed)
+ self.assertEqual(zip_f.namelist(), ['index.html', 'docs/index.html'])
+
+ def prepare_command(self):
+ self.cmd.upload_dir = self.prepare_sample_dir()
+ self.cmd.ensure_finalized()
+ self.cmd.repository = self.pypi.full_address
+ self.cmd.username = "username"
+ self.cmd.password = "password"
+
+ def test_upload(self):
+ self.prepare_command()
+ self.cmd.run()
+
+ self.assertEqual(len(self.pypi.requests), 1)
+ handler, request_data = self.pypi.requests[-1]
+ self.assertIn(b"content", request_data)
+ self.assertIn("Basic", handler.headers['authorization'])
+ self.assertTrue(handler.headers['content-type']
+ .startswith('multipart/form-data;'))
+
+ action, name, version, content = request_data.split(
+ b'----------------GHSKFJDLGDS7543FJKLFHRE75642756743254')[1:5]
+
+ # check that we picked the right chunks
+ self.assertIn(b'name=":action"', action)
+ self.assertIn(b'name="name"', name)
+ self.assertIn(b'name="version"', version)
+ self.assertIn(b'name="content"', content)
+
+ # check their contents
+ self.assertIn(b'doc_upload', action)
+ self.assertIn(b'distr-name', name)
+ self.assertIn(b'docs/index.html', content)
+ self.assertIn(b'Ce mortel ennui', content)
+
+ @unittest.skipIf(_ssl is None, 'Needs SSL support')
+ def test_https_connection(self):
+ self.https_called = False
+ self.addCleanup(
+ setattr, upload_docs_mod.http.client, 'HTTPSConnection',
+ upload_docs_mod.http.client.HTTPSConnection)
+
+ def https_conn_wrapper(*args):
+ self.https_called = True
+ # the testing server is http
+ return upload_docs_mod.http.client.HTTPConnection(*args)
+
+ upload_docs_mod.http.client.HTTPSConnection = https_conn_wrapper
+
+ self.prepare_command()
+ self.cmd.run()
+ self.assertFalse(self.https_called)
+
+ self.cmd.repository = self.cmd.repository.replace("http", "https")
+ self.cmd.run()
+ self.assertTrue(self.https_called)
+
+ def test_handling_response(self):
+ self.pypi.default_response_status = '403 Forbidden'
+ self.prepare_command()
+ self.cmd.run()
+ errors = self.get_logs(logging.ERROR)
+ self.assertEqual(len(errors), 1)
+ self.assertIn('Upload failed (403): Forbidden', errors[0])
+
+ self.pypi.default_response_status = '301 Moved Permanently'
+ self.pypi.default_response_headers.append(
+ ("Location", "brand_new_location"))
+ self.cmd.run()
+ lastlog = self.get_logs(logging.INFO)[-1]
+ self.assertIn('brand_new_location', lastlog)
+
+ def test_reads_pypirc_data(self):
+ self.write_file(self.rc, PYPIRC % self.pypi.full_address)
+ self.cmd.repository = self.pypi.full_address
+ self.cmd.upload_dir = self.prepare_sample_dir()
+ self.cmd.ensure_finalized()
+ self.assertEqual(self.cmd.username, "real_slim_shady")
+ self.assertEqual(self.cmd.password, "long_island")
+
+ def test_checks_index_html_presence(self):
+ self.cmd.upload_dir = self.prepare_sample_dir()
+ os.remove(os.path.join(self.cmd.upload_dir, "index.html"))
+ self.assertRaises(PackagingFileError, self.cmd.ensure_finalized)
+
+ def test_checks_upload_dir(self):
+ self.cmd.upload_dir = self.prepare_sample_dir()
+ shutil.rmtree(os.path.join(self.cmd.upload_dir))
+ self.assertRaises(PackagingOptionError, self.cmd.ensure_finalized)
+
+ def test_show_response(self):
+ self.prepare_command()
+ self.cmd.show_response = True
+ self.cmd.run()
+ record = self.get_logs(logging.INFO)[-1]
+ self.assertTrue(record, "should report the response")
+ self.assertIn(self.pypi.default_response_data, record)
+
+
+def test_suite():
+ return unittest.makeSuite(UploadDocsTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_compiler.py b/Lib/packaging/tests/test_compiler.py
new file mode 100644
index 0000000..2c620cb
--- /dev/null
+++ b/Lib/packaging/tests/test_compiler.py
@@ -0,0 +1,66 @@
+"""Tests for distutils.compiler."""
+import os
+
+from packaging.compiler import (get_default_compiler, customize_compiler,
+ gen_lib_options)
+from packaging.tests import unittest, support
+
+
+class FakeCompiler:
+
+ name = 'fake'
+ description = 'Fake'
+
+ def library_dir_option(self, dir):
+ return "-L" + dir
+
+ def runtime_library_dir_option(self, dir):
+ return ["-cool", "-R" + dir]
+
+ def find_library_file(self, dirs, lib, debug=False):
+ return 'found'
+
+ def library_option(self, lib):
+ return "-l" + lib
+
+
+class CompilerTestCase(support.EnvironRestorer, unittest.TestCase):
+
+ restore_environ = ['AR', 'ARFLAGS']
+
+ @unittest.skipUnless(get_default_compiler() == 'unix',
+ 'irrelevant if default compiler is not unix')
+ def test_customize_compiler(self):
+
+ os.environ['AR'] = 'my_ar'
+ os.environ['ARFLAGS'] = '-arflags'
+
+ # make sure AR gets caught
+ class compiler:
+ name = 'unix'
+
+ def set_executables(self, **kw):
+ self.exes = kw
+
+ comp = compiler()
+ customize_compiler(comp)
+ self.assertEqual(comp.exes['archiver'], 'my_ar -arflags')
+
+ def test_gen_lib_options(self):
+ compiler = FakeCompiler()
+ libdirs = ['lib1', 'lib2']
+ runlibdirs = ['runlib1']
+ libs = [os.path.join('dir', 'name'), 'name2']
+
+ opts = gen_lib_options(compiler, libdirs, runlibdirs, libs)
+ wanted = ['-Llib1', '-Llib2', '-cool', '-Rrunlib1', 'found',
+ '-lname2']
+ self.assertEqual(opts, wanted)
+
+
+def test_suite():
+ return unittest.makeSuite(CompilerTestCase)
+
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_config.py b/Lib/packaging/tests/test_config.py
new file mode 100644
index 0000000..0d76b29
--- /dev/null
+++ b/Lib/packaging/tests/test_config.py
@@ -0,0 +1,519 @@
+"""Tests for packaging.config."""
+import os
+import sys
+
+from packaging import command
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError, PackagingOptionError
+from packaging.compiler import new_compiler, _COMPILERS
+from packaging.command.sdist import sdist
+
+from packaging.tests import unittest, support
+from packaging.tests.support import requires_zlib
+
+
+SETUP_CFG = """
+[metadata]
+name = RestingParrot
+version = 0.6.4
+author = Carl Meyer
+author_email = carl@oddbird.net
+maintainer = Éric Araujo
+maintainer_email = merwok@netwok.org
+summary = A sample project demonstrating packaging
+description-file = %(description-file)s
+keywords = packaging, sample project
+
+classifier =
+ Development Status :: 4 - Beta
+ Environment :: Console (Text Based)
+ Environment :: X11 Applications :: GTK; python_version < '3'
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 3
+
+requires_python = >=2.4, <3.2
+
+requires_dist =
+ PetShoppe
+ MichaelPalin (> 1.1)
+ pywin32; sys.platform == 'win32'
+ pysqlite2; python_version < '2.5'
+ inotify (0.0.1); sys.platform == 'linux2'
+
+requires_external = libxml2
+
+provides_dist = packaging-sample-project (0.2)
+ unittest2-sample-project
+
+project_url =
+ Main repository, http://bitbucket.org/carljm/sample-distutils2-project
+ Fork in progress, http://bitbucket.org/Merwok/sample-distutils2-project
+
+[files]
+packages_root = src
+
+packages = one
+ two
+ three
+
+modules = haven
+
+scripts =
+ script1.py
+ scripts/find-coconuts
+ bin/taunt
+
+package_data =
+ cheese = data/templates/* doc/*
+ doc/images/*.png
+
+
+extra_files = %(extra-files)s
+
+# Replaces MANIFEST.in
+# FIXME no, it's extra_files
+# (but sdist_extra is a better name, should use it)
+sdist_extra =
+ include THANKS HACKING
+ recursive-include examples *.txt *.py
+ prune examples/sample?/build
+
+resources=
+ bm/ {b1,b2}.gif = {icon}
+ Cf*/ *.CFG = {config}/baBar/
+ init_script = {script}/JunGle/
+
+[global]
+commands =
+ packaging.tests.test_config.FooBarBazTest
+
+compilers =
+ packaging.tests.test_config.DCompiler
+
+setup_hooks = %(setup-hooks)s
+
+
+
+[install_dist]
+sub_commands = foo
+"""
+
+SETUP_CFG_PKGDATA_BUGGY_1 = """
+[files]
+package_data = foo.*
+"""
+
+SETUP_CFG_PKGDATA_BUGGY_2 = """
+[files]
+package_data =
+ foo.*
+"""
+
+# Can not be merged with SETUP_CFG else install_dist
+# command will fail when trying to compile C sources
+# TODO use a DummyCommand to mock build_ext
+EXT_SETUP_CFG = """
+[files]
+packages = one
+ two
+ parent.undeclared
+
+[extension:one.speed_coconuts]
+sources = c_src/speed_coconuts.c
+extra_link_args = "`gcc -print-file-name=libgcc.a`" -shared
+define_macros = HAVE_CAIRO HAVE_GTK2
+libraries = gecodeint gecodekernel -- sys.platform != 'win32'
+ GecodeInt GecodeKernel -- sys.platform == 'win32'
+
+[extension: two.fast_taunt]
+sources = cxx_src/utils_taunt.cxx
+ cxx_src/python_module.cxx
+include_dirs = /usr/include/gecode
+ /usr/include/blitz
+extra_compile_args = -fPIC -O2
+ -DGECODE_VERSION=$(./gecode_version) -- sys.platform != 'win32'
+ /DGECODE_VERSION=win32 -- sys.platform == 'win32'
+language = cxx
+
+# corner case: if the parent package of an extension is declared but
+# not its grandparent, it's legal
+[extension: parent.undeclared._speed]
+sources = parent/undeclared/_speed.c
+"""
+
+EXT_SETUP_CFG_BUGGY_1 = """
+[extension: realname]
+name = crash_here
+"""
+
+EXT_SETUP_CFG_BUGGY_2 = """
+[files]
+packages = ham
+
+[extension: spam.eggs]
+"""
+
+EXT_SETUP_CFG_BUGGY_3 = """
+[files]
+packages = ok
+ ok.works
+
+[extension: ok.works.breaks._ext]
+"""
+
+HOOKS_MODULE = """
+import logging
+
+logger = logging.getLogger('packaging')
+
+def logging_hook(config):
+ logger.warning('logging_hook called')
+"""
+
+
+class DCompiler:
+ name = 'd'
+ description = 'D Compiler'
+
+ def __init__(self, *args):
+ pass
+
+
+def version_hook(config):
+ config['metadata']['version'] += '.dev1'
+
+
+def first_hook(config):
+ config['files']['modules'] += '\n first'
+
+
+def third_hook(config):
+ config['files']['modules'] += '\n third'
+
+
+class FooBarBazTest:
+
+ def __init__(self, dist):
+ self.distribution = dist
+ self._record = []
+
+ @classmethod
+ def get_command_name(cls):
+ return 'foo'
+
+ def run(self):
+ self._record.append('foo has run')
+
+ def nothing(self):
+ pass
+
+ def get_source_files(self):
+ return []
+
+ ensure_finalized = finalize_options = initialize_options = nothing
+
+
+class ConfigTestCase(support.TempdirManager,
+ support.EnvironRestorer,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ restore_environ = ['PLAT']
+
+ def setUp(self):
+ super(ConfigTestCase, self).setUp()
+ tempdir = self.mkdtemp()
+ self.working_dir = os.getcwd()
+ os.chdir(tempdir)
+ self.tempdir = tempdir
+
+ def write_setup(self, kwargs=None):
+ opts = {'description-file': 'README', 'extra-files': '',
+ 'setup-hooks': 'packaging.tests.test_config.version_hook'}
+ if kwargs:
+ opts.update(kwargs)
+ self.write_file('setup.cfg', SETUP_CFG % opts, encoding='utf-8')
+
+ def get_dist(self):
+ dist = Distribution()
+ dist.parse_config_files()
+ return dist
+
+ def test_config(self):
+ self.write_setup()
+ self.write_file('README', 'yeah')
+ os.mkdir('bm')
+ self.write_file(('bm', 'b1.gif'), '')
+ self.write_file(('bm', 'b2.gif'), '')
+ os.mkdir('Cfg')
+ self.write_file(('Cfg', 'data.CFG'), '')
+ self.write_file('init_script', '')
+
+ # try to load the metadata now
+ dist = self.get_dist()
+
+ # check what was done
+ self.assertEqual(dist.metadata['Author'], 'Carl Meyer')
+ self.assertEqual(dist.metadata['Author-Email'], 'carl@oddbird.net')
+
+ # the hook adds .dev1
+ self.assertEqual(dist.metadata['Version'], '0.6.4.dev1')
+
+ wanted = [
+ 'Development Status :: 4 - Beta',
+ 'Environment :: Console (Text Based)',
+ "Environment :: X11 Applications :: GTK; python_version < '3'",
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3']
+ self.assertEqual(dist.metadata['Classifier'], wanted)
+
+ wanted = ['packaging', 'sample project']
+ self.assertEqual(dist.metadata['Keywords'], wanted)
+
+ self.assertEqual(dist.metadata['Requires-Python'], '>=2.4, <3.2')
+
+ wanted = ['PetShoppe',
+ 'MichaelPalin (> 1.1)',
+ "pywin32; sys.platform == 'win32'",
+ "pysqlite2; python_version < '2.5'",
+ "inotify (0.0.1); sys.platform == 'linux2'"]
+
+ self.assertEqual(dist.metadata['Requires-Dist'], wanted)
+ urls = [('Main repository',
+ 'http://bitbucket.org/carljm/sample-distutils2-project'),
+ ('Fork in progress',
+ 'http://bitbucket.org/Merwok/sample-distutils2-project')]
+ self.assertEqual(dist.metadata['Project-Url'], urls)
+
+ self.assertEqual(dist.packages, ['one', 'two', 'three'])
+ self.assertEqual(dist.py_modules, ['haven'])
+ self.assertEqual(dist.package_data,
+ {'cheese': ['data/templates/*', 'doc/*',
+ 'doc/images/*.png']})
+ self.assertEqual(dist.data_files,
+ {'bm/b1.gif': '{icon}/b1.gif',
+ 'bm/b2.gif': '{icon}/b2.gif',
+ 'Cfg/data.CFG': '{config}/baBar/data.CFG',
+ 'init_script': '{script}/JunGle/init_script'})
+
+ self.assertEqual(dist.package_dir, 'src')
+
+ # Make sure we get the foo command loaded. We use a string comparison
+ # instead of assertIsInstance because the class is not the same when
+ # this test is run directly: foo is packaging.tests.test_config.Foo
+ # because get_command_class uses the full name, but a bare "Foo" in
+ # this file would be __main__.Foo when run as "python test_config.py".
+ # The name FooBarBazTest should be unique enough to prevent
+ # collisions.
+ self.assertEqual(dist.get_command_obj('foo').__class__.__name__,
+ 'FooBarBazTest')
+
+ # did the README got loaded ?
+ self.assertEqual(dist.metadata['description'], 'yeah')
+
+ # do we have the D Compiler enabled ?
+ self.assertIn('d', _COMPILERS)
+ d = new_compiler(compiler='d')
+ self.assertEqual(d.description, 'D Compiler')
+
+ # check error reporting for invalid package_data value
+ self.write_file('setup.cfg', SETUP_CFG_PKGDATA_BUGGY_1)
+ self.assertRaises(PackagingOptionError, self.get_dist)
+
+ self.write_file('setup.cfg', SETUP_CFG_PKGDATA_BUGGY_2)
+ self.assertRaises(PackagingOptionError, self.get_dist)
+
+ def test_multiple_description_file(self):
+ self.write_setup({'description-file': 'README CHANGES'})
+ self.write_file('README', 'yeah')
+ self.write_file('CHANGES', 'changelog2')
+ dist = self.get_dist()
+ self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+ def test_multiline_description_file(self):
+ self.write_setup({'description-file': 'README\n CHANGES'})
+ self.write_file('README', 'yeah')
+ self.write_file('CHANGES', 'changelog')
+ dist = self.get_dist()
+ self.assertEqual(dist.metadata['description'], 'yeah\nchangelog')
+ self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+ def test_parse_extensions_in_config(self):
+ self.write_file('setup.cfg', EXT_SETUP_CFG)
+ dist = self.get_dist()
+
+ ext_modules = dict((mod.name, mod) for mod in dist.ext_modules)
+ self.assertEqual(len(ext_modules), 3)
+ ext = ext_modules.get('one.speed_coconuts')
+ self.assertEqual(ext.sources, ['c_src/speed_coconuts.c'])
+ self.assertEqual(ext.define_macros, ['HAVE_CAIRO', 'HAVE_GTK2'])
+ libs = ['gecodeint', 'gecodekernel']
+ if sys.platform == 'win32':
+ libs = ['GecodeInt', 'GecodeKernel']
+ self.assertEqual(ext.libraries, libs)
+ self.assertEqual(ext.extra_link_args,
+ ['`gcc -print-file-name=libgcc.a`', '-shared'])
+
+ ext = ext_modules.get('two.fast_taunt')
+ self.assertEqual(ext.sources,
+ ['cxx_src/utils_taunt.cxx', 'cxx_src/python_module.cxx'])
+ self.assertEqual(ext.include_dirs,
+ ['/usr/include/gecode', '/usr/include/blitz'])
+ cargs = ['-fPIC', '-O2']
+ if sys.platform == 'win32':
+ cargs.append("/DGECODE_VERSION=win32")
+ else:
+ cargs.append('-DGECODE_VERSION=$(./gecode_version)')
+ self.assertEqual(ext.extra_compile_args, cargs)
+ self.assertEqual(ext.language, 'cxx')
+
+ self.write_file('setup.cfg', EXT_SETUP_CFG_BUGGY_1)
+ self.assertRaises(PackagingOptionError, self.get_dist)
+
+ self.write_file('setup.cfg', EXT_SETUP_CFG_BUGGY_2)
+ self.assertRaises(PackagingOptionError, self.get_dist)
+
+ self.write_file('setup.cfg', EXT_SETUP_CFG_BUGGY_3)
+ self.assertRaises(PackagingOptionError, self.get_dist)
+
+ def test_project_setup_hook_works(self):
+ # Bug #11637: ensure the project directory is on sys.path to allow
+ # project-specific hooks
+ self.write_setup({'setup-hooks': 'hooks.logging_hook'})
+ self.write_file('README', 'yeah')
+ self.write_file('hooks.py', HOOKS_MODULE)
+ self.get_dist()
+ self.assertEqual(['logging_hook called'], self.get_logs())
+ self.assertIn('hooks', sys.modules)
+
+ def test_missing_setup_hook_warns(self):
+ self.write_setup({'setup-hooks': 'does._not.exist'})
+ self.write_file('README', 'yeah')
+ self.get_dist()
+ logs = self.get_logs()
+ self.assertEqual(1, len(logs))
+ self.assertIn('cannot find setup hook', logs[0])
+
+ def test_multiple_setup_hooks(self):
+ self.write_setup({
+ 'setup-hooks': '\n packaging.tests.test_config.first_hook'
+ '\n packaging.tests.test_config.missing_hook'
+ '\n packaging.tests.test_config.third_hook',
+ })
+ self.write_file('README', 'yeah')
+ dist = self.get_dist()
+
+ self.assertEqual(['haven', 'first', 'third'], dist.py_modules)
+ logs = self.get_logs()
+ self.assertEqual(1, len(logs))
+ self.assertIn('cannot find setup hook', logs[0])
+
+ def test_metadata_requires_description_files_missing(self):
+ self.write_setup({'description-file': 'README README2'})
+ self.write_file('README', 'yeah')
+ self.write_file('README2', 'yeah')
+ os.mkdir('src')
+ self.write_file(('src', 'haven.py'), '#')
+ self.write_file('script1.py', '#')
+ os.mkdir('scripts')
+ self.write_file(('scripts', 'find-coconuts'), '#')
+ os.mkdir('bin')
+ self.write_file(('bin', 'taunt'), '#')
+
+ for pkg in ('one', 'two', 'three'):
+ pkg = os.path.join('src', pkg)
+ os.mkdir(pkg)
+ self.write_file((pkg, '__init__.py'), '#')
+
+ dist = self.get_dist()
+ cmd = sdist(dist)
+ cmd.finalize_options()
+ cmd.get_file_list()
+ self.assertRaises(PackagingFileError, cmd.make_distribution)
+
+ @requires_zlib
+ def test_metadata_requires_description_files(self):
+ # Create the following file structure:
+ # README
+ # README2
+ # script1.py
+ # scripts/
+ # find-coconuts
+ # bin/
+ # taunt
+ # src/
+ # haven.py
+ # one/__init__.py
+ # two/__init__.py
+ # three/__init__.py
+
+ self.write_setup({'description-file': 'README\n README2',
+ 'extra-files': '\n README3'})
+ self.write_file('README', 'yeah 1')
+ self.write_file('README2', 'yeah 2')
+ self.write_file('README3', 'yeah 3')
+ os.mkdir('src')
+ self.write_file(('src', 'haven.py'), '#')
+ self.write_file('script1.py', '#')
+ os.mkdir('scripts')
+ self.write_file(('scripts', 'find-coconuts'), '#')
+ os.mkdir('bin')
+ self.write_file(('bin', 'taunt'), '#')
+
+ for pkg in ('one', 'two', 'three'):
+ pkg = os.path.join('src', pkg)
+ os.mkdir(pkg)
+ self.write_file((pkg, '__init__.py'), '#')
+
+ dist = self.get_dist()
+ self.assertIn('yeah 1\nyeah 2', dist.metadata['description'])
+
+ cmd = sdist(dist)
+ cmd.finalize_options()
+ cmd.get_file_list()
+ self.assertRaises(PackagingFileError, cmd.make_distribution)
+
+ self.write_setup({'description-file': 'README\n README2',
+ 'extra-files': '\n README2\n README'})
+ dist = self.get_dist()
+ cmd = sdist(dist)
+ cmd.finalize_options()
+ cmd.get_file_list()
+ cmd.make_distribution()
+ with open('MANIFEST') as fp:
+ self.assertIn('README\nREADME2\n', fp.read())
+
+ def test_sub_commands(self):
+ self.write_setup()
+ self.write_file('README', 'yeah')
+ os.mkdir('src')
+ self.write_file(('src', 'haven.py'), '#')
+ self.write_file('script1.py', '#')
+ os.mkdir('scripts')
+ self.write_file(('scripts', 'find-coconuts'), '#')
+ os.mkdir('bin')
+ self.write_file(('bin', 'taunt'), '#')
+
+ for pkg in ('one', 'two', 'three'):
+ pkg = os.path.join('src', pkg)
+ os.mkdir(pkg)
+ self.write_file((pkg, '__init__.py'), '#')
+
+ # try to run the install command to see if foo is called
+ self.addCleanup(command._COMMANDS.__delitem__, 'foo')
+ dist = self.get_dist()
+ dist.run_command('install_dist')
+ cmd = dist.get_command_obj('foo')
+ self.assertEqual(cmd.__class__.__name__, 'FooBarBazTest')
+ self.assertEqual(cmd._record, ['foo has run'])
+
+
+def test_suite():
+ return unittest.makeSuite(ConfigTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_create.py b/Lib/packaging/tests/test_create.py
new file mode 100644
index 0000000..76bc331
--- /dev/null
+++ b/Lib/packaging/tests/test_create.py
@@ -0,0 +1,233 @@
+"""Tests for packaging.create."""
+import os
+import sys
+import sysconfig
+from textwrap import dedent
+from packaging import create
+from packaging.create import MainProgram, ask_yn, ask, main
+
+from packaging.tests import support, unittest
+from packaging.tests.support import Inputs
+
+
+class CreateTestCase(support.TempdirManager,
+ support.EnvironRestorer,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ maxDiff = None
+ restore_environ = ['PLAT']
+
+ def setUp(self):
+ super(CreateTestCase, self).setUp()
+ self.wdir = self.mkdtemp()
+ os.chdir(self.wdir)
+ # patch sysconfig
+ self._old_get_paths = sysconfig.get_paths
+ sysconfig.get_paths = lambda *args, **kwargs: {
+ 'man': sys.prefix + '/share/man',
+ 'doc': sys.prefix + '/share/doc/pyxfoil', }
+
+ def tearDown(self):
+ sysconfig.get_paths = self._old_get_paths
+ if hasattr(create, 'input'):
+ del create.input
+ super(CreateTestCase, self).tearDown()
+
+ def test_ask_yn(self):
+ create.input = Inputs('y')
+ self.assertEqual('y', ask_yn('is this a test'))
+
+ def test_ask(self):
+ create.input = Inputs('a', 'b')
+ self.assertEqual('a', ask('is this a test'))
+ self.assertEqual('b', ask(str(list(range(0, 70))), default='c',
+ lengthy=True))
+
+ def test_set_multi(self):
+ mainprogram = MainProgram()
+ create.input = Inputs('aaaaa')
+ mainprogram.data['author'] = []
+ mainprogram._set_multi('_set_multi test', 'author')
+ self.assertEqual(['aaaaa'], mainprogram.data['author'])
+
+ def test_find_files(self):
+ # making sure we scan a project dir correctly
+ mainprogram = MainProgram()
+
+ # building the structure
+ tempdir = self.wdir
+ dirs = ['pkg1', 'data', 'pkg2', 'pkg2/sub']
+ files = [
+ 'README',
+ 'data/data1',
+ 'foo.py',
+ 'pkg1/__init__.py',
+ 'pkg1/bar.py',
+ 'pkg2/__init__.py',
+ 'pkg2/sub/__init__.py',
+ ]
+
+ for dir_ in dirs:
+ os.mkdir(os.path.join(tempdir, dir_))
+
+ for file_ in files:
+ self.write_file((tempdir, file_), 'xxx')
+
+ mainprogram._find_files()
+ mainprogram.data['packages'].sort()
+
+ # do we have what we want?
+ self.assertEqual(mainprogram.data['packages'],
+ ['pkg1', 'pkg2', 'pkg2.sub'])
+ self.assertEqual(mainprogram.data['modules'], ['foo'])
+ data_fn = os.path.join('data', 'data1')
+ self.assertEqual(mainprogram.data['extra_files'],
+ ['README', data_fn])
+
+ def test_convert_setup_py_to_cfg(self):
+ self.write_file((self.wdir, 'setup.py'),
+ dedent("""
+ # coding: utf-8
+ from distutils.core import setup
+
+ long_description = '''My super Death-scription
+ barbar is now on the public domain,
+ ho, baby !'''
+
+ setup(name='pyxfoil',
+ version='0.2',
+ description='Python bindings for the Xfoil engine',
+ long_description=long_description,
+ maintainer='André Espaze',
+ maintainer_email='andre.espaze@logilab.fr',
+ url='http://www.python-science.org/project/pyxfoil',
+ license='GPLv2',
+ packages=['pyxfoil', 'babar', 'me'],
+ data_files=[
+ ('share/doc/pyxfoil', ['README.rst']),
+ ('share/man', ['pyxfoil.1']),
+ ],
+ py_modules=['my_lib', 'mymodule'],
+ package_dir={
+ 'babar': '',
+ 'me': 'Martinique/Lamentin',
+ },
+ package_data={
+ 'babar': ['Pom', 'Flora', 'Alexander'],
+ 'me': ['dady', 'mumy', 'sys', 'bro'],
+ 'pyxfoil': ['fengine.so'],
+ },
+ scripts=['my_script', 'bin/run'],
+ )
+ """), encoding='utf-8')
+ create.input = Inputs('y')
+ main()
+
+ path = os.path.join(self.wdir, 'setup.cfg')
+ with open(path, encoding='utf-8') as fp:
+ contents = fp.read()
+
+ self.assertEqual(contents, dedent("""\
+ [metadata]
+ name = pyxfoil
+ version = 0.2
+ summary = Python bindings for the Xfoil engine
+ download_url = UNKNOWN
+ home_page = http://www.python-science.org/project/pyxfoil
+ maintainer = André Espaze
+ maintainer_email = andre.espaze@logilab.fr
+ description = My super Death-scription
+ |barbar is now on the public domain,
+ |ho, baby !
+
+ [files]
+ packages = pyxfoil
+ babar
+ me
+ modules = my_lib
+ mymodule
+ scripts = my_script
+ bin/run
+ package_data =
+ babar = Pom
+ Flora
+ Alexander
+ me = dady
+ mumy
+ sys
+ bro
+ pyxfoil = fengine.so
+
+ resources =
+ README.rst = {doc}
+ pyxfoil.1 = {man}
+
+ """))
+
+ def test_convert_setup_py_to_cfg_with_description_in_readme(self):
+ self.write_file((self.wdir, 'setup.py'),
+ dedent("""
+ # coding: utf-8
+ from distutils.core import setup
+ with open('README.txt') as fp:
+ long_description = fp.read()
+
+ setup(name='pyxfoil',
+ version='0.2',
+ description='Python bindings for the Xfoil engine',
+ long_description=long_description,
+ maintainer='André Espaze',
+ maintainer_email='andre.espaze@logilab.fr',
+ url='http://www.python-science.org/project/pyxfoil',
+ license='GPLv2',
+ packages=['pyxfoil'],
+ package_data={'pyxfoil': ['fengine.so', 'babar.so']},
+ data_files=[
+ ('share/doc/pyxfoil', ['README.rst']),
+ ('share/man', ['pyxfoil.1']),
+ ],
+ )
+ """), encoding='utf-8')
+ self.write_file((self.wdir, 'README.txt'),
+ dedent('''
+My super Death-scription
+barbar is now in the public domain,
+ho, baby!
+ '''))
+ create.input = Inputs('y')
+ main()
+
+ path = os.path.join(self.wdir, 'setup.cfg')
+ with open(path, encoding='utf-8') as fp:
+ contents = fp.read()
+
+ self.assertEqual(contents, dedent("""\
+ [metadata]
+ name = pyxfoil
+ version = 0.2
+ summary = Python bindings for the Xfoil engine
+ download_url = UNKNOWN
+ home_page = http://www.python-science.org/project/pyxfoil
+ maintainer = André Espaze
+ maintainer_email = andre.espaze@logilab.fr
+ description-file = README.txt
+
+ [files]
+ packages = pyxfoil
+ package_data =
+ pyxfoil = fengine.so
+ babar.so
+
+ resources =
+ README.rst = {doc}
+ pyxfoil.1 = {man}
+
+ """))
+
+
+def test_suite():
+ return unittest.makeSuite(CreateTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_cygwinccompiler.py b/Lib/packaging/tests/test_cygwinccompiler.py
new file mode 100644
index 0000000..17c43cd
--- /dev/null
+++ b/Lib/packaging/tests/test_cygwinccompiler.py
@@ -0,0 +1,88 @@
+"""Tests for packaging.cygwinccompiler."""
+import os
+import sys
+import sysconfig
+from packaging.compiler.cygwinccompiler import (
+ check_config_h, get_msvcr,
+ CONFIG_H_OK, CONFIG_H_NOTOK, CONFIG_H_UNCERTAIN)
+
+from packaging.tests import unittest, support
+
+
+class CygwinCCompilerTestCase(support.TempdirManager,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(CygwinCCompilerTestCase, self).setUp()
+ self.version = sys.version
+ self.python_h = os.path.join(self.mkdtemp(), 'python.h')
+ self.old_get_config_h_filename = sysconfig.get_config_h_filename
+ sysconfig.get_config_h_filename = self._get_config_h_filename
+
+ def tearDown(self):
+ sys.version = self.version
+ sysconfig.get_config_h_filename = self.old_get_config_h_filename
+ super(CygwinCCompilerTestCase, self).tearDown()
+
+ def _get_config_h_filename(self):
+ return self.python_h
+
+ def test_check_config_h(self):
+ # check_config_h looks for "GCC" in sys.version first
+ # returns CONFIG_H_OK if found
+ sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
+ '4.0.1 (Apple Computer, Inc. build 5370)]')
+
+ self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+ # then it tries to see if it can find "__GNUC__" in pyconfig.h
+ sys.version = 'something without the *CC word'
+
+ # if the file doesn't exist it returns CONFIG_H_UNCERTAIN
+ self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
+
+ # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
+ self.write_file(self.python_h, 'xxx')
+ self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
+
+ # and CONFIG_H_OK if __GNUC__ is found
+ self.write_file(self.python_h, 'xxx __GNUC__ xxx')
+ self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+ def test_get_msvcr(self):
+ # none
+ sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) '
+ '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
+ self.assertEqual(get_msvcr(), None)
+
+ # MSVC 7.0
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1300 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr70'])
+
+ # MSVC 7.1
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1310 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr71'])
+
+ # VS2005 / MSVC 8.0
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1400 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr80'])
+
+ # VS2008 / MSVC 9.0
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1500 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr90'])
+
+ # unknown
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1999 32 bits (Intel)]')
+ self.assertRaises(ValueError, get_msvcr)
+
+
+def test_suite():
+ return unittest.makeSuite(CygwinCCompilerTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_database.py b/Lib/packaging/tests/test_database.py
new file mode 100644
index 0000000..ad91b94
--- /dev/null
+++ b/Lib/packaging/tests/test_database.py
@@ -0,0 +1,686 @@
+import os
+import io
+import csv
+import sys
+import shutil
+import tempfile
+from hashlib import md5
+from textwrap import dedent
+
+from packaging.tests.test_util import GlobTestCaseBase
+from packaging.tests.support import requires_zlib
+
+import packaging.database
+from packaging.config import get_resources_dests
+from packaging.errors import PackagingError
+from packaging.metadata import Metadata
+from packaging.tests import unittest, support
+from packaging.database import (
+ Distribution, EggInfoDistribution, get_distribution, get_distributions,
+ provides_distribution, obsoletes_distribution, get_file_users,
+ enable_cache, disable_cache, distinfo_dirname, _yield_distributions,
+ get_file, get_file_path)
+
+# TODO Add a test for getting a distribution provided by another distribution
+# TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini)
+# TODO Add tests from the former pep376 project (zipped site-packages, etc.)
+
+
+def get_hexdigest(filename):
+ with open(filename, 'rb') as file:
+ checksum = md5(file.read())
+ return checksum.hexdigest()
+
+
+def record_pieces(path):
+ path = os.path.join(*path)
+ digest = get_hexdigest(path)
+ size = os.path.getsize(path)
+ return path, digest, size
+
+
+class FakeDistsMixin:
+
+ def setUp(self):
+ super(FakeDistsMixin, self).setUp()
+ self.addCleanup(enable_cache)
+ disable_cache()
+
+ # make a copy that we can write into for our fake installed
+ # distributions
+ tmpdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmpdir)
+ self.fake_dists_path = os.path.realpath(
+ os.path.join(tmpdir, 'fake_dists'))
+ fake_dists_src = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), 'fake_dists'))
+ shutil.copytree(fake_dists_src, self.fake_dists_path)
+ # XXX ugly workaround: revert copystat calls done by shutil behind our
+ # back (to avoid getting a read-only copy of a read-only file). we
+ # could pass a custom copy_function to change the mode of files, but
+ # shutil gives no control over the mode of directories :(
+ # see http://bugs.python.org/issue1666318
+ for root, dirs, files in os.walk(self.fake_dists_path):
+ os.chmod(root, 0o755)
+ for f in files:
+ os.chmod(os.path.join(root, f), 0o644)
+ for d in dirs:
+ os.chmod(os.path.join(root, d), 0o755)
+
+
+class CommonDistributionTests(FakeDistsMixin):
+ """Mixin used to test the interface common to both Distribution classes.
+
+ Derived classes define cls, sample_dist, dirs and records. These
+ attributes are used in test methods. See source code for details.
+ """
+
+ def test_instantiation(self):
+ # check that useful attributes are here
+ name, version, distdir = self.sample_dist
+ here = os.path.abspath(os.path.dirname(__file__))
+ dist_path = os.path.join(here, 'fake_dists', distdir)
+
+ dist = self.dist = self.cls(dist_path)
+ self.assertEqual(dist.path, dist_path)
+ self.assertEqual(dist.name, name)
+ self.assertEqual(dist.metadata['Name'], name)
+ self.assertIsInstance(dist.metadata, Metadata)
+ self.assertEqual(dist.version, version)
+ self.assertEqual(dist.metadata['Version'], version)
+
+ @requires_zlib
+ def test_repr(self):
+ dist = self.cls(self.dirs[0])
+ # just check that the class name is in the repr
+ self.assertIn(self.cls.__name__, repr(dist))
+
+ @requires_zlib
+ def test_comparison(self):
+ # tests for __eq__ and __hash__
+ dist = self.cls(self.dirs[0])
+ dist2 = self.cls(self.dirs[0])
+ dist3 = self.cls(self.dirs[1])
+ self.assertIn(dist, {dist: True})
+ self.assertEqual(dist, dist)
+
+ self.assertIsNot(dist, dist2)
+ self.assertEqual(dist, dist2)
+ self.assertNotEqual(dist, dist3)
+ self.assertNotEqual(dist, ())
+
+ def test_list_installed_files(self):
+ for dir_ in self.dirs:
+ dist = self.cls(dir_)
+ for path, md5_, size in dist.list_installed_files():
+ record_data = self.records[dist.path]
+ self.assertIn(path, record_data)
+ self.assertEqual(md5_, record_data[path][0])
+ self.assertEqual(size, record_data[path][1])
+
+
+class TestDistribution(CommonDistributionTests, unittest.TestCase):
+
+ cls = Distribution
+ sample_dist = 'choxie', '2.0.0.9', 'choxie-2.0.0.9.dist-info'
+
+ def setUp(self):
+ super(TestDistribution, self).setUp()
+ self.dirs = [os.path.join(self.fake_dists_path, f)
+ for f in os.listdir(self.fake_dists_path)
+ if f.endswith('.dist-info')]
+
+ self.records = {}
+ for distinfo_dir in self.dirs:
+
+ record_file = os.path.join(distinfo_dir, 'RECORD')
+ with open(record_file, 'w') as file:
+ record_writer = csv.writer(
+ file, delimiter=',', quoting=csv.QUOTE_NONE,
+ lineterminator='\n')
+
+ dist_location = distinfo_dir.replace('.dist-info', '')
+
+ for path, dirs, files in os.walk(dist_location):
+ for f in files:
+ record_writer.writerow(record_pieces((path, f)))
+ for file in ('INSTALLER', 'METADATA', 'REQUESTED'):
+ record_writer.writerow(record_pieces((distinfo_dir, file)))
+ record_writer.writerow([record_file])
+
+ with open(record_file) as file:
+ record_reader = csv.reader(file, lineterminator='\n')
+ record_data = {}
+ for row in record_reader:
+ if row == []:
+ continue
+ path, md5_, size = (row[:] +
+ [None for i in range(len(row), 3)])
+ record_data[path] = md5_, size
+ self.records[distinfo_dir] = record_data
+
+ def test_instantiation(self):
+ super(TestDistribution, self).test_instantiation()
+ self.assertIsInstance(self.dist.requested, bool)
+
+ def test_uses(self):
+ # Test to determine if a distribution uses a specified file.
+ # Criteria to test against
+ distinfo_name = 'grammar-1.0a4'
+ distinfo_dir = os.path.join(self.fake_dists_path,
+ distinfo_name + '.dist-info')
+ true_path = [self.fake_dists_path, distinfo_name,
+ 'grammar', 'utils.py']
+ true_path = os.path.join(*true_path)
+ false_path = [self.fake_dists_path, 'towel_stuff-0.1', 'towel_stuff',
+ '__init__.py']
+ false_path = os.path.join(*false_path)
+
+ # Test if the distribution uses the file in question
+ dist = Distribution(distinfo_dir)
+ self.assertTrue(dist.uses(true_path), 'dist %r is supposed to use %r' %
+ (dist, true_path))
+ self.assertFalse(dist.uses(false_path), 'dist %r is not supposed to '
+ 'use %r' % (dist, true_path))
+
+ def test_get_distinfo_file(self):
+ # Test the retrieval of dist-info file objects.
+ distinfo_name = 'choxie-2.0.0.9'
+ other_distinfo_name = 'grammar-1.0a4'
+ distinfo_dir = os.path.join(self.fake_dists_path,
+ distinfo_name + '.dist-info')
+ dist = Distribution(distinfo_dir)
+ # Test for known good file matches
+ distinfo_files = [
+ # Relative paths
+ 'INSTALLER', 'METADATA',
+ # Absolute paths
+ os.path.join(distinfo_dir, 'RECORD'),
+ os.path.join(distinfo_dir, 'REQUESTED'),
+ ]
+
+ for distfile in distinfo_files:
+ with dist.get_distinfo_file(distfile) as value:
+ self.assertIsInstance(value, io.TextIOWrapper)
+ # Is it the correct file?
+ self.assertEqual(value.name,
+ os.path.join(distinfo_dir, distfile))
+
+ # Test an absolute path that is part of another distributions dist-info
+ other_distinfo_file = os.path.join(
+ self.fake_dists_path, other_distinfo_name + '.dist-info',
+ 'REQUESTED')
+ self.assertRaises(PackagingError, dist.get_distinfo_file,
+ other_distinfo_file)
+ # Test for a file that should not exist
+ self.assertRaises(PackagingError, dist.get_distinfo_file,
+ 'MAGICFILE')
+
+ def test_list_distinfo_files(self):
+ distinfo_name = 'towel_stuff-0.1'
+ distinfo_dir = os.path.join(self.fake_dists_path,
+ distinfo_name + '.dist-info')
+ dist = Distribution(distinfo_dir)
+ # Test for the iteration of the raw path
+ distinfo_files = [os.path.join(distinfo_dir, filename) for filename in
+ os.listdir(distinfo_dir)]
+ found = dist.list_distinfo_files()
+ self.assertEqual(sorted(found), sorted(distinfo_files))
+ # Test for the iteration of local absolute paths
+ distinfo_files = [os.path.join(sys.prefix, distinfo_dir, path) for
+ path in distinfo_files]
+ found = sorted(dist.list_distinfo_files(local=True))
+ if os.sep != '/':
+ self.assertNotIn('/', found[0])
+ self.assertIn(os.sep, found[0])
+ self.assertEqual(found, sorted(distinfo_files))
+
+ def test_get_resources_path(self):
+ distinfo_name = 'babar-0.1'
+ distinfo_dir = os.path.join(self.fake_dists_path,
+ distinfo_name + '.dist-info')
+ dist = Distribution(distinfo_dir)
+ resource_path = dist.get_resource_path('babar.png')
+ self.assertEqual(resource_path, 'babar.png')
+ self.assertRaises(KeyError, dist.get_resource_path, 'notexist')
+
+
+class TestEggInfoDistribution(CommonDistributionTests,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ cls = EggInfoDistribution
+ sample_dist = 'bacon', '0.1', 'bacon-0.1.egg-info'
+
+ def setUp(self):
+ super(TestEggInfoDistribution, self).setUp()
+
+ self.dirs = [os.path.join(self.fake_dists_path, f)
+ for f in os.listdir(self.fake_dists_path)
+ if f.endswith('.egg') or f.endswith('.egg-info')]
+
+ self.records = {}
+
+ @unittest.skip('not implemented yet')
+ def test_list_installed_files(self):
+ # EggInfoDistribution defines list_installed_files but there is no
+ # test for it yet; someone with setuptools expertise needs to add a
+ # file with the list of installed files for one of the egg fake dists
+ # and write the support code to populate self.records (and then delete
+ # this method)
+ pass
+
+
+class TestDatabase(support.LoggingCatcher,
+ FakeDistsMixin,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(TestDatabase, self).setUp()
+ sys.path.insert(0, self.fake_dists_path)
+ self.addCleanup(sys.path.remove, self.fake_dists_path)
+
+ def test_caches(self):
+ # sanity check for internal caches
+ for name in ('_cache_name', '_cache_name_egg',
+ '_cache_path', '_cache_path_egg'):
+ self.assertEqual(getattr(packaging.database, name), {})
+
+ def test_distinfo_dirname(self):
+ # Given a name and a version, we expect the distinfo_dirname function
+ # to return a standard distribution information directory name.
+
+ items = [
+ # (name, version, standard_dirname)
+ # Test for a very simple single word name and decimal version
+ # number
+ ('docutils', '0.5', 'docutils-0.5.dist-info'),
+ # Test for another except this time with a '-' in the name, which
+ # needs to be transformed during the name lookup
+ ('python-ldap', '2.5', 'python_ldap-2.5.dist-info'),
+ # Test for both '-' in the name and a funky version number
+ ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'),
+ ]
+
+ # Loop through the items to validate the results
+ for name, version, standard_dirname in items:
+ dirname = distinfo_dirname(name, version)
+ self.assertEqual(dirname, standard_dirname)
+
+ @requires_zlib
+ def test_get_distributions(self):
+ # Lookup all distributions found in the ``sys.path``.
+ # This test could potentially pick up other installed distributions
+ fake_dists = [('grammar', '1.0a4'), ('choxie', '2.0.0.9'),
+ ('towel-stuff', '0.1'), ('babar', '0.1')]
+ found_dists = []
+
+ # Verify the fake dists have been found.
+ dists = [dist for dist in get_distributions()]
+ for dist in dists:
+ self.assertIsInstance(dist, Distribution)
+ if (dist.name in dict(fake_dists) and
+ dist.path.startswith(self.fake_dists_path)):
+ found_dists.append((dist.name, dist.version))
+ else:
+ # check that it doesn't find anything more than this
+ self.assertFalse(dist.path.startswith(self.fake_dists_path))
+ # otherwise we don't care what other distributions are found
+
+ # Finally, test that we found all that we were looking for
+ self.assertEqual(sorted(found_dists), sorted(fake_dists))
+
+ # Now, test if the egg-info distributions are found correctly as well
+ fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2'),
+ ('coconuts-aster', '10.3'),
+ ('banana', '0.4'), ('strawberry', '0.6'),
+ ('truffles', '5.0'), ('nut', 'funkyversion')]
+ found_dists = []
+
+ dists = [dist for dist in get_distributions(use_egg_info=True)]
+ for dist in dists:
+ self.assertIsInstance(dist, (Distribution, EggInfoDistribution))
+ if (dist.name in dict(fake_dists) and
+ dist.path.startswith(self.fake_dists_path)):
+ found_dists.append((dist.name, dist.version))
+ else:
+ self.assertFalse(dist.path.startswith(self.fake_dists_path))
+
+ self.assertEqual(sorted(fake_dists), sorted(found_dists))
+
+ @requires_zlib
+ def test_get_distribution(self):
+ # Test for looking up a distribution by name.
+ # Test the lookup of the towel-stuff distribution
+ name = 'towel-stuff' # Note: This is different from the directory name
+
+ # Lookup the distribution
+ dist = get_distribution(name)
+ self.assertIsInstance(dist, Distribution)
+ self.assertEqual(dist.name, name)
+
+ # Verify that an unknown distribution returns None
+ self.assertIsNone(get_distribution('bogus'))
+
+ # Verify partial name matching doesn't work
+ self.assertIsNone(get_distribution('towel'))
+
+ # Verify that it does not find egg-info distributions, when not
+ # instructed to
+ self.assertIsNone(get_distribution('bacon'))
+ self.assertIsNone(get_distribution('cheese'))
+ self.assertIsNone(get_distribution('strawberry'))
+ self.assertIsNone(get_distribution('banana'))
+
+ # Now check that it works well in both situations, when egg-info
+ # is a file and directory respectively.
+ dist = get_distribution('cheese', use_egg_info=True)
+ self.assertIsInstance(dist, EggInfoDistribution)
+ self.assertEqual(dist.name, 'cheese')
+
+ dist = get_distribution('bacon', use_egg_info=True)
+ self.assertIsInstance(dist, EggInfoDistribution)
+ self.assertEqual(dist.name, 'bacon')
+
+ dist = get_distribution('banana', use_egg_info=True)
+ self.assertIsInstance(dist, EggInfoDistribution)
+ self.assertEqual(dist.name, 'banana')
+
+ dist = get_distribution('strawberry', use_egg_info=True)
+ self.assertIsInstance(dist, EggInfoDistribution)
+ self.assertEqual(dist.name, 'strawberry')
+
+ def test_get_file_users(self):
+ # Test the iteration of distributions that use a file.
+ name = 'towel_stuff-0.1'
+ path = os.path.join(self.fake_dists_path, name,
+ 'towel_stuff', '__init__.py')
+ for dist in get_file_users(path):
+ self.assertIsInstance(dist, Distribution)
+ self.assertEqual(dist.name, name)
+
+ @requires_zlib
+ def test_provides(self):
+ # Test for looking up distributions by what they provide
+ checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+ l = [dist.name for dist in provides_distribution('truffles')]
+ checkLists(l, ['choxie', 'towel-stuff'])
+
+ l = [dist.name for dist in provides_distribution('truffles', '1.0')]
+ checkLists(l, ['choxie'])
+
+ l = [dist.name for dist in provides_distribution('truffles', '1.0',
+ use_egg_info=True)]
+ checkLists(l, ['choxie', 'cheese'])
+
+ l = [dist.name for dist in provides_distribution('truffles', '1.1.2')]
+ checkLists(l, ['towel-stuff'])
+
+ l = [dist.name for dist in provides_distribution('truffles', '1.1')]
+ checkLists(l, ['towel-stuff'])
+
+ l = [dist.name for dist in provides_distribution('truffles',
+ '!=1.1,<=2.0')]
+ checkLists(l, ['choxie'])
+
+ l = [dist.name for dist in provides_distribution('truffles',
+ '!=1.1,<=2.0',
+ use_egg_info=True)]
+ checkLists(l, ['choxie', 'bacon', 'cheese'])
+
+ l = [dist.name for dist in provides_distribution('truffles', '>1.0')]
+ checkLists(l, ['towel-stuff'])
+
+ l = [dist.name for dist in provides_distribution('truffles', '>1.5')]
+ checkLists(l, [])
+
+ l = [dist.name for dist in provides_distribution('truffles', '>1.5',
+ use_egg_info=True)]
+ checkLists(l, ['bacon'])
+
+ l = [dist.name for dist in provides_distribution('truffles', '>=1.0')]
+ checkLists(l, ['choxie', 'towel-stuff'])
+
+ l = [dist.name for dist in provides_distribution('strawberry', '0.6',
+ use_egg_info=True)]
+ checkLists(l, ['coconuts-aster'])
+
+ l = [dist.name for dist in provides_distribution('strawberry', '>=0.5',
+ use_egg_info=True)]
+ checkLists(l, ['coconuts-aster'])
+
+ l = [dist.name for dist in provides_distribution('strawberry', '>0.6',
+ use_egg_info=True)]
+ checkLists(l, [])
+
+ l = [dist.name for dist in provides_distribution('banana', '0.4',
+ use_egg_info=True)]
+ checkLists(l, ['coconuts-aster'])
+
+ l = [dist.name for dist in provides_distribution('banana', '>=0.3',
+ use_egg_info=True)]
+ checkLists(l, ['coconuts-aster'])
+
+ l = [dist.name for dist in provides_distribution('banana', '!=0.4',
+ use_egg_info=True)]
+ checkLists(l, [])
+
+ @requires_zlib
+ def test_obsoletes(self):
+ # Test looking for distributions based on what they obsolete
+ checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+ l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')]
+ checkLists(l, [])
+
+ l = [dist.name for dist in obsoletes_distribution('truffles', '1.0',
+ use_egg_info=True)]
+ checkLists(l, ['cheese', 'bacon'])
+
+ l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')]
+ checkLists(l, ['choxie'])
+
+ l = [dist.name for dist in obsoletes_distribution('truffles', '0.8',
+ use_egg_info=True)]
+ checkLists(l, ['choxie', 'cheese'])
+
+ l = [dist.name for dist in obsoletes_distribution('truffles', '0.9.6')]
+ checkLists(l, ['choxie', 'towel-stuff'])
+
+ l = [dist.name for dist in obsoletes_distribution('truffles',
+ '0.5.2.3')]
+ checkLists(l, ['choxie', 'towel-stuff'])
+
+ l = [dist.name for dist in obsoletes_distribution('truffles', '0.2')]
+ checkLists(l, ['towel-stuff'])
+
+ @requires_zlib
+ def test_yield_distribution(self):
+ # tests the internal function _yield_distributions
+ checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+ eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'),
+ ('truffles', '5.0'), ('cheese', '2.0.2'),
+ ('coconuts-aster', '10.3'), ('nut', 'funkyversion')]
+ dists = [('choxie', '2.0.0.9'), ('grammar', '1.0a4'),
+ ('towel-stuff', '0.1'), ('babar', '0.1')]
+
+ checkLists([], _yield_distributions(False, False, sys.path))
+
+ found = [(dist.name, dist.version)
+ for dist in _yield_distributions(False, True, sys.path)
+ if dist.path.startswith(self.fake_dists_path)]
+ checkLists(eggs, found)
+
+ found = [(dist.name, dist.version)
+ for dist in _yield_distributions(True, False, sys.path)
+ if dist.path.startswith(self.fake_dists_path)]
+ checkLists(dists, found)
+
+ found = [(dist.name, dist.version)
+ for dist in _yield_distributions(True, True, sys.path)
+ if dist.path.startswith(self.fake_dists_path)]
+ checkLists(dists + eggs, found)
+
+
+class DataFilesTestCase(GlobTestCaseBase):
+
+ def assertRulesMatch(self, rules, spec):
+ tempdir = self.build_files_tree(spec)
+ expected = self.clean_tree(spec)
+ result = get_resources_dests(tempdir, rules)
+ self.assertEqual(expected, result)
+
+ def clean_tree(self, spec):
+ files = {}
+ for path, value in spec.items():
+ if value is not None:
+ files[path] = value
+ return files
+
+ def test_simple_glob(self):
+ rules = [('', '*.tpl', '{data}')]
+ spec = {'coucou.tpl': '{data}/coucou.tpl',
+ 'Donotwant': None}
+ self.assertRulesMatch(rules, spec)
+
+ def test_multiple_match(self):
+ rules = [('scripts', '*.bin', '{appdata}'),
+ ('scripts', '*', '{appscript}')]
+ spec = {'scripts/script.bin': '{appscript}/script.bin',
+ 'Babarlikestrawberry': None}
+ self.assertRulesMatch(rules, spec)
+
+ def test_set_match(self):
+ rules = [('scripts', '*.{bin,sh}', '{appscript}')]
+ spec = {'scripts/script.bin': '{appscript}/script.bin',
+ 'scripts/babar.sh': '{appscript}/babar.sh',
+ 'Babarlikestrawberry': None}
+ self.assertRulesMatch(rules, spec)
+
+ def test_set_match_multiple(self):
+ rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
+ spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+ 'scripts/script.sh': '{appscript}/script.sh',
+ 'Babarlikestrawberry': None}
+ self.assertRulesMatch(rules, spec)
+
+ def test_set_match_exclude(self):
+ rules = [('scripts', '*', '{appscript}'),
+ ('', os.path.join('**', '*.sh'), None)]
+ spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+ 'scripts/script.sh': None,
+ 'Babarlikestrawberry': None}
+ self.assertRulesMatch(rules, spec)
+
+ def test_glob_in_base(self):
+ rules = [('scrip*', '*.bin', '{appscript}')]
+ spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+ 'scripouille/babar.bin': '{appscript}/babar.bin',
+ 'scriptortu/lotus.bin': '{appscript}/lotus.bin',
+ 'Babarlikestrawberry': None}
+ self.assertRulesMatch(rules, spec)
+
+ def test_recursive_glob(self):
+ rules = [('', os.path.join('**', '*.bin'), '{binary}')]
+ spec = {'binary0.bin': '{binary}/binary0.bin',
+ 'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
+ 'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
+ 'you/kill/pandabear.guy': None}
+ self.assertRulesMatch(rules, spec)
+
+ def test_final_exemple_glob(self):
+ rules = [
+ ('mailman/database/schemas/', '*', '{appdata}/schemas'),
+ ('', os.path.join('**', '*.tpl'), '{appdata}/templates'),
+ ('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'),
+ ('', 'README', '{doc}'),
+ ('mailman/etc/', '*', '{config}'),
+ ('mailman/foo/', os.path.join('**', 'bar', '*.cfg'),
+ '{config}/baz'),
+ ('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'),
+ ('', 'some-new-semantic.sns', '{funky-crazy-category}'),
+ ]
+ spec = {
+ 'README': '{doc}/README',
+ 'some.tpl': '{appdata}/templates/some.tpl',
+ 'some-new-semantic.sns':
+ '{funky-crazy-category}/some-new-semantic.sns',
+ 'mailman/database/mailman.db': None,
+ 'mailman/database/schemas/blah.schema':
+ '{appdata}/schemas/blah.schema',
+ 'mailman/etc/my.cnf': '{config}/my.cnf',
+ 'mailman/foo/some/path/bar/my.cfg':
+ '{config}/hmm/some/path/bar/my.cfg',
+ 'mailman/foo/some/path/other.cfg':
+ '{config}/hmm/some/path/other.cfg',
+ 'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
+ 'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
+ }
+ self.maxDiff = None
+ self.assertRulesMatch(rules, spec)
+
+ def test_get_file(self):
+ # Create a fake dist
+ temp_site_packages = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, temp_site_packages)
+
+ dist_name = 'test'
+ dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
+ os.mkdir(dist_info)
+
+ metadata_path = os.path.join(dist_info, 'METADATA')
+ resources_path = os.path.join(dist_info, 'RESOURCES')
+
+ with open(metadata_path, 'w') as fp:
+ fp.write(dedent("""\
+ Metadata-Version: 1.2
+ Name: test
+ Version: 0.1
+ Summary: test
+ Author: me
+ """))
+
+ test_path = 'test.cfg'
+
+ fd, test_resource_path = tempfile.mkstemp()
+ os.close(fd)
+ self.addCleanup(os.remove, test_resource_path)
+
+ with open(test_resource_path, 'w') as fp:
+ fp.write('Config')
+
+ with open(resources_path, 'w') as fp:
+ fp.write('%s,%s' % (test_path, test_resource_path))
+
+ # Add fake site-packages to sys.path to retrieve fake dist
+ self.addCleanup(sys.path.remove, temp_site_packages)
+ sys.path.insert(0, temp_site_packages)
+
+ # Force packaging.database to rescan the sys.path
+ self.addCleanup(enable_cache)
+ disable_cache()
+
+ # Try to retrieve resources paths and files
+ self.assertEqual(get_file_path(dist_name, test_path),
+ test_resource_path)
+ self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist')
+
+ with get_file(dist_name, test_path) as fp:
+ self.assertEqual(fp.read(), 'Config')
+ self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist')
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ load = unittest.defaultTestLoader.loadTestsFromTestCase
+ suite.addTest(load(TestDistribution))
+ suite.addTest(load(TestEggInfoDistribution))
+ suite.addTest(load(TestDatabase))
+ suite.addTest(load(DataFilesTestCase))
+ return suite
+
+
+if __name__ == "__main__":
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_depgraph.py b/Lib/packaging/tests/test_depgraph.py
new file mode 100644
index 0000000..8833302
--- /dev/null
+++ b/Lib/packaging/tests/test_depgraph.py
@@ -0,0 +1,310 @@
+"""Tests for packaging.depgraph """
+import os
+import re
+import sys
+from io import StringIO
+
+from packaging import depgraph
+from packaging.database import get_distribution, enable_cache, disable_cache
+
+from packaging.tests import unittest, support
+from packaging.tests.support import requires_zlib
+
+
+class DepGraphTestCase(support.LoggingCatcher,
+ unittest.TestCase):
+
+ DISTROS_DIST = ('choxie', 'grammar', 'towel-stuff')
+ DISTROS_EGG = ('bacon', 'banana', 'strawberry', 'cheese')
+ BAD_EGGS = ('nut',)
+
+ EDGE = re.compile(
+ r'"(?P<from>.*)" -> "(?P<to>.*)" \[label="(?P<label>.*)"\]')
+
+ def checkLists(self, l1, l2):
+ """ Compare two lists without taking the order into consideration """
+ self.assertListEqual(sorted(l1), sorted(l2))
+
+ def setUp(self):
+ super(DepGraphTestCase, self).setUp()
+ path = os.path.join(os.path.dirname(__file__), 'fake_dists')
+ path = os.path.abspath(path)
+ sys.path.insert(0, path)
+ self.addCleanup(sys.path.remove, path)
+ self.addCleanup(enable_cache)
+ disable_cache()
+
+ def test_generate_graph(self):
+ dists = []
+ for name in self.DISTROS_DIST:
+ dist = get_distribution(name)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ choxie, grammar, towel = dists
+
+ graph = depgraph.generate_graph(dists)
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[choxie]]
+ self.checkLists([('towel-stuff', 'towel-stuff (0.1)')], deps)
+ self.assertIn(choxie, graph.reverse_list[towel])
+ self.checkLists(graph.missing[choxie], ['nut'])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[grammar]]
+ self.checkLists([], deps)
+ self.checkLists(graph.missing[grammar], ['truffles (>=1.2)'])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[towel]]
+ self.checkLists([], deps)
+ self.checkLists(graph.missing[towel], ['bacon (<=0.2)'])
+
+ @requires_zlib
+ def test_generate_graph_egg(self):
+ dists = []
+ for name in self.DISTROS_DIST + self.DISTROS_EGG:
+ dist = get_distribution(name, use_egg_info=True)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ choxie, grammar, towel, bacon, banana, strawberry, cheese = dists
+
+ graph = depgraph.generate_graph(dists)
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[choxie]]
+ self.checkLists([('towel-stuff', 'towel-stuff (0.1)')], deps)
+ self.assertIn(choxie, graph.reverse_list[towel])
+ self.checkLists(graph.missing[choxie], ['nut'])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[grammar]]
+ self.checkLists([('bacon', 'truffles (>=1.2)')], deps)
+ self.checkLists(graph.missing[grammar], [])
+ self.assertIn(grammar, graph.reverse_list[bacon])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[towel]]
+ self.checkLists([('bacon', 'bacon (<=0.2)')], deps)
+ self.checkLists(graph.missing[towel], [])
+ self.assertIn(towel, graph.reverse_list[bacon])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[bacon]]
+ self.checkLists([], deps)
+ self.checkLists(graph.missing[bacon], [])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[banana]]
+ self.checkLists([('strawberry', 'strawberry (>=0.5)')], deps)
+ self.checkLists(graph.missing[banana], [])
+ self.assertIn(banana, graph.reverse_list[strawberry])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[strawberry]]
+ self.checkLists([], deps)
+ self.checkLists(graph.missing[strawberry], [])
+
+ deps = [(x.name, y) for x, y in graph.adjacency_list[cheese]]
+ self.checkLists([], deps)
+ self.checkLists(graph.missing[cheese], [])
+
+ def test_dependent_dists(self):
+ dists = []
+ for name in self.DISTROS_DIST:
+ dist = get_distribution(name)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ choxie, grammar, towel = dists
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, choxie)]
+ self.checkLists([], deps)
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, grammar)]
+ self.checkLists([], deps)
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, towel)]
+ self.checkLists(['choxie'], deps)
+
+ @requires_zlib
+ def test_dependent_dists_egg(self):
+ dists = []
+ for name in self.DISTROS_DIST + self.DISTROS_EGG:
+ dist = get_distribution(name, use_egg_info=True)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ choxie, grammar, towel, bacon, banana, strawberry, cheese = dists
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, choxie)]
+ self.checkLists([], deps)
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, grammar)]
+ self.checkLists([], deps)
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, towel)]
+ self.checkLists(['choxie'], deps)
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, bacon)]
+ self.checkLists(['choxie', 'towel-stuff', 'grammar'], deps)
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, strawberry)]
+ self.checkLists(['banana'], deps)
+
+ deps = [d.name for d in depgraph.dependent_dists(dists, cheese)]
+ self.checkLists([], deps)
+
+ @requires_zlib
+ def test_graph_to_dot(self):
+ expected = (
+ ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+ ('grammar', 'bacon', 'truffles (>=1.2)'),
+ ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+ ('banana', 'strawberry', 'strawberry (>=0.5)'),
+ )
+
+ dists = []
+ for name in self.DISTROS_DIST + self.DISTROS_EGG:
+ dist = get_distribution(name, use_egg_info=True)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ graph = depgraph.generate_graph(dists)
+ buf = StringIO()
+ depgraph.graph_to_dot(graph, buf)
+ buf.seek(0)
+ matches = []
+ lines = buf.readlines()
+ for line in lines[1:-1]: # skip the first and the last lines
+ if line[-1] == '\n':
+ line = line[:-1]
+ match = self.EDGE.match(line.strip())
+ self.assertIsNot(match, None)
+ matches.append(match.groups())
+
+ self.checkLists(matches, expected)
+
+ @requires_zlib
+ def test_graph_disconnected_to_dot(self):
+ dependencies_expected = (
+ ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+ ('grammar', 'bacon', 'truffles (>=1.2)'),
+ ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+ ('banana', 'strawberry', 'strawberry (>=0.5)'),
+ )
+ disconnected_expected = ('cheese', 'bacon', 'strawberry')
+
+ dists = []
+ for name in self.DISTROS_DIST + self.DISTROS_EGG:
+ dist = get_distribution(name, use_egg_info=True)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ graph = depgraph.generate_graph(dists)
+ buf = StringIO()
+ depgraph.graph_to_dot(graph, buf, skip_disconnected=False)
+ buf.seek(0)
+ lines = buf.readlines()
+
+ dependencies_lines = []
+ disconnected_lines = []
+
+ # First sort output lines into dependencies and disconnected lines.
+ # We also skip the attribute lines, and don't include the "{" and "}"
+ # lines.
+ disconnected_active = False
+ for line in lines[1:-1]: # Skip first and last line
+ if line.startswith('subgraph disconnected'):
+ disconnected_active = True
+ continue
+ if line.startswith('}') and disconnected_active:
+ disconnected_active = False
+ continue
+
+ if disconnected_active:
+ # Skip the 'label = "Disconnected"', etc. attribute lines.
+ if ' = ' not in line:
+ disconnected_lines.append(line)
+ else:
+ dependencies_lines.append(line)
+
+ dependencies_matches = []
+ for line in dependencies_lines:
+ if line[-1] == '\n':
+ line = line[:-1]
+ match = self.EDGE.match(line.strip())
+ self.assertIsNot(match, None)
+ dependencies_matches.append(match.groups())
+
+ disconnected_matches = []
+ for line in disconnected_lines:
+ if line[-1] == '\n':
+ line = line[:-1]
+ line = line.strip('"')
+ disconnected_matches.append(line)
+
+ self.checkLists(dependencies_matches, dependencies_expected)
+ self.checkLists(disconnected_matches, disconnected_expected)
+
+ @requires_zlib
+ def test_graph_bad_version_to_dot(self):
+ expected = (
+ ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+ ('grammar', 'bacon', 'truffles (>=1.2)'),
+ ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+ ('banana', 'strawberry', 'strawberry (>=0.5)'),
+ )
+
+ dists = []
+ for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
+ dist = get_distribution(name, use_egg_info=True)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ graph = depgraph.generate_graph(dists)
+ buf = StringIO()
+ depgraph.graph_to_dot(graph, buf)
+ buf.seek(0)
+ matches = []
+ lines = buf.readlines()
+ for line in lines[1:-1]: # skip the first and the last lines
+ if line[-1] == '\n':
+ line = line[:-1]
+ match = self.EDGE.match(line.strip())
+ self.assertIsNot(match, None)
+ matches.append(match.groups())
+
+ self.checkLists(matches, expected)
+
+ @requires_zlib
+ def test_repr(self):
+ dists = []
+ for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
+ dist = get_distribution(name, use_egg_info=True)
+ self.assertNotEqual(dist, None)
+ dists.append(dist)
+
+ graph = depgraph.generate_graph(dists)
+ self.assertTrue(repr(graph))
+
+ @requires_zlib
+ def test_main(self):
+ tempout = StringIO()
+ old = sys.stdout
+ sys.stdout = tempout
+ oldargv = sys.argv[:]
+ sys.argv[:] = ['script.py']
+ try:
+ try:
+ depgraph.main()
+ except SystemExit:
+ pass
+ finally:
+ sys.stdout = old
+ sys.argv[:] = oldargv
+
+ # checks what main did XXX could do more here
+ tempout.seek(0)
+ res = tempout.read()
+ self.assertIn('towel', res)
+
+
+def test_suite():
+ return unittest.makeSuite(DepGraphTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_dist.py b/Lib/packaging/tests/test_dist.py
new file mode 100644
index 0000000..0623990
--- /dev/null
+++ b/Lib/packaging/tests/test_dist.py
@@ -0,0 +1,264 @@
+"""Tests for packaging.dist."""
+import os
+import sys
+import textwrap
+
+import packaging.dist
+
+from packaging.dist import Distribution
+from packaging.command.cmd import Command
+from packaging.errors import PackagingModuleError, PackagingOptionError
+from packaging.tests import support, unittest
+from packaging.tests.support import create_distribution, use_command
+from test.support import unload
+
+
+class test_dist(Command):
+ """Custom command used for testing."""
+
+ user_options = [
+ ('sample-option=', 'S',
+ "help text"),
+ ]
+
+ def initialize_options(self):
+ self.sample_option = None
+ self._record = []
+
+ def finalize_options(self):
+ if self.sample_option is None:
+ self.sample_option = 'default value'
+
+ def run(self):
+ self._record.append('test_dist has run')
+
+
+class DistributionTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ support.EnvironRestorer,
+ unittest.TestCase):
+
+ restore_environ = ['HOME', 'PLAT']
+
+ def setUp(self):
+ super(DistributionTestCase, self).setUp()
+ # XXX this is ugly, we should fix the functions to accept args
+ # (defaulting to sys.argv)
+ self.argv = sys.argv, sys.argv[:]
+ del sys.argv[1:]
+
+ def tearDown(self):
+ sys.argv = self.argv[0]
+ sys.argv[:] = self.argv[1]
+ super(DistributionTestCase, self).tearDown()
+
+ @unittest.skip('needs to be updated')
+ def test_debug_mode(self):
+ tmpdir = self.mkdtemp()
+ setupcfg = os.path.join(tmpdir, 'setup.cfg')
+ with open(setupcfg, "w") as f:
+ f.write("[global]\n")
+ f.write("command_packages = foo.bar, splat")
+
+ files = [setupcfg]
+ sys.argv.append("build")
+ __, stdout = captured_stdout(create_distribution, files)
+ self.assertEqual(stdout, '')
+ # XXX debug mode does not exist anymore, test logging levels in this
+ # test instead
+ packaging.dist.DEBUG = True
+ try:
+ __, stdout = captured_stdout(create_distribution, files)
+ self.assertEqual(stdout, '')
+ finally:
+ packaging.dist.DEBUG = False
+
+ def test_bad_attr(self):
+ Distribution(attrs={'author': 'xxx',
+ 'name': 'xxx',
+ 'version': '1.2',
+ 'home_page': 'xxxx',
+ 'badoptname': 'xxx'})
+ logs = self.get_logs()
+ self.assertEqual(len(logs), 1)
+ self.assertIn('unknown argument', logs[0])
+
+ def test_empty_options(self):
+ # an empty options dictionary should not stay in the
+ # list of attributes
+ dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx',
+ 'version': '1.2', 'home_page': 'xxxx',
+ 'options': {}})
+
+ self.assertEqual(self.get_logs(), [])
+ self.assertNotIn('options', dir(dist))
+
+ def test_non_empty_options(self):
+ # TODO: how to actually use options is not documented except
+ # for a few cryptic comments in dist.py. If this is to stay
+ # in the public API, it deserves some better documentation.
+
+ # Here is an example of how it's used out there:
+ # http://svn.pythonmac.org/py2app/py2app/trunk/doc/
+ # index.html#specifying-customizations
+ dist = Distribution(attrs={'author': 'xxx',
+ 'name': 'xxx',
+ 'version': 'xxx',
+ 'home_page': 'xxxx',
+ 'options': {'sdist': {'owner': 'root'}}})
+
+ self.assertIn('owner', dist.get_option_dict('sdist'))
+
+ def test_finalize_options(self):
+ attrs = {'keywords': 'one,two',
+ 'platform': 'one,two'}
+
+ dist = Distribution(attrs=attrs)
+ dist.finalize_options()
+
+ # finalize_option splits platforms and keywords
+ self.assertEqual(dist.metadata['platform'], ['one', 'two'])
+ self.assertEqual(dist.metadata['keywords'], ['one', 'two'])
+
+ def test_custom_pydistutils(self):
+ # Bug #2166: make sure pydistutils.cfg is found
+ if os.name == 'posix':
+ user_filename = ".pydistutils.cfg"
+ else:
+ user_filename = "pydistutils.cfg"
+
+ temp_dir = self.mkdtemp()
+ user_filename = os.path.join(temp_dir, user_filename)
+ with open(user_filename, 'w') as f:
+ f.write('.')
+
+ dist = Distribution()
+
+ os.environ['HOME'] = temp_dir
+ files = dist.find_config_files()
+ self.assertIn(user_filename, files)
+
+ def test_find_config_files_disable(self):
+ # Bug #1180: Allow users to disable their own config file.
+ temp_home = self.mkdtemp()
+ if os.name == 'posix':
+ user_filename = os.path.join(temp_home, ".pydistutils.cfg")
+ else:
+ user_filename = os.path.join(temp_home, "pydistutils.cfg")
+
+ with open(user_filename, 'w') as f:
+ f.write('[distutils2]\n')
+
+ def _expander(path):
+ return temp_home
+
+ old_expander = os.path.expanduser
+ os.path.expanduser = _expander
+ try:
+ d = packaging.dist.Distribution()
+ all_files = d.find_config_files()
+
+ d = packaging.dist.Distribution(attrs={'script_args':
+ ['--no-user-cfg']})
+ files = d.find_config_files()
+ finally:
+ os.path.expanduser = old_expander
+
+ # make sure --no-user-cfg disables the user cfg file
+ self.assertEqual((len(all_files) - 1), len(files))
+
+ def test_special_hooks_parsing(self):
+ temp_home = self.mkdtemp()
+ config_files = [os.path.join(temp_home, "config1.cfg"),
+ os.path.join(temp_home, "config2.cfg")]
+
+ # Store two aliased hooks in config files
+ self.write_file((temp_home, "config1.cfg"),
+ '[test_dist]\npre-hook.a = type')
+ self.write_file((temp_home, "config2.cfg"),
+ '[test_dist]\npre-hook.b = type')
+
+ use_command(self, 'packaging.tests.test_dist.test_dist')
+
+ dist = create_distribution(config_files)
+ cmd = dist.get_command_obj("test_dist")
+ self.assertEqual(cmd.pre_hook, {"a": 'type', "b": 'type'})
+
+ def test_hooks_get_run(self):
+ temp_home = self.mkdtemp()
+ module_name = os.path.split(temp_home)[-1]
+ pyname = '%s.py' % module_name
+ config_file = os.path.join(temp_home, "config1.cfg")
+ hooks_module = os.path.join(temp_home, pyname)
+
+ self.write_file(config_file, textwrap.dedent('''\
+ [test_dist]
+ pre-hook.test = %(modname)s.log_pre_call
+ post-hook.test = %(modname)s.log_post_call'''
+ % {'modname': module_name}))
+
+ self.write_file(hooks_module, textwrap.dedent('''\
+ record = []
+
+ def log_pre_call(cmd):
+ record.append('pre-%s' % cmd.get_command_name())
+
+ def log_post_call(cmd):
+ record.append('post-%s' % cmd.get_command_name())
+ '''))
+
+ use_command(self, 'packaging.tests.test_dist.test_dist')
+ d = create_distribution([config_file])
+ cmd = d.get_command_obj("test_dist")
+
+ # prepare the call recorders
+ sys.path.append(temp_home)
+ self.addCleanup(sys.path.remove, temp_home)
+ self.addCleanup(unload, module_name)
+ record = __import__(module_name).record
+
+ cmd.run = lambda: record.append('run')
+ cmd.finalize_options = lambda: record.append('finalize')
+ d.run_command('test_dist')
+
+ self.assertEqual(record, ['finalize',
+ 'pre-test_dist',
+ 'run',
+ 'post-test_dist'])
+
+ def test_hooks_importable(self):
+ temp_home = self.mkdtemp()
+ config_file = os.path.join(temp_home, "config1.cfg")
+
+ self.write_file(config_file, textwrap.dedent('''\
+ [test_dist]
+ pre-hook.test = nonexistent.dotted.name'''))
+
+ use_command(self, 'packaging.tests.test_dist.test_dist')
+ d = create_distribution([config_file])
+ cmd = d.get_command_obj("test_dist")
+ cmd.ensure_finalized()
+
+ self.assertRaises(PackagingModuleError, d.run_command, 'test_dist')
+
+ def test_hooks_callable(self):
+ temp_home = self.mkdtemp()
+ config_file = os.path.join(temp_home, "config1.cfg")
+
+ self.write_file(config_file, textwrap.dedent('''\
+ [test_dist]
+ pre-hook.test = packaging.tests.test_dist.__doc__'''))
+
+ use_command(self, 'packaging.tests.test_dist.test_dist')
+ d = create_distribution([config_file])
+ cmd = d.get_command_obj("test_dist")
+ cmd.ensure_finalized()
+
+ self.assertRaises(PackagingOptionError, d.run_command, 'test_dist')
+
+
+def test_suite():
+ return unittest.makeSuite(DistributionTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_extension.py b/Lib/packaging/tests/test_extension.py
new file mode 100644
index 0000000..41182e5
--- /dev/null
+++ b/Lib/packaging/tests/test_extension.py
@@ -0,0 +1,15 @@
+"""Tests for packaging.extension."""
+import os
+
+from packaging.compiler.extension import Extension
+from packaging.tests import unittest
+
+class ExtensionTestCase(unittest.TestCase):
+
+ pass
+
+def test_suite():
+ return unittest.makeSuite(ExtensionTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_install.py b/Lib/packaging/tests/test_install.py
new file mode 100644
index 0000000..cc1f5d3
--- /dev/null
+++ b/Lib/packaging/tests/test_install.py
@@ -0,0 +1,391 @@
+"""Tests for the packaging.install module."""
+import os
+import logging
+from tempfile import mkstemp
+from sysconfig import is_python_build
+
+from packaging import install
+from packaging.pypi.xmlrpc import Client
+from packaging.metadata import Metadata
+from packaging.tests.support import (LoggingCatcher, TempdirManager, unittest,
+ fake_dec)
+try:
+ import threading
+ from packaging.tests.pypi_server import use_xmlrpc_server
+except ImportError:
+ threading = None
+ use_xmlrpc_server = fake_dec
+
+
+class InstalledDist:
+ """Distribution object, represent distributions currently installed on the
+ system"""
+ def __init__(self, name, version, deps):
+ self.metadata = Metadata()
+ self.name = name
+ self.version = version
+ self.metadata['Name'] = name
+ self.metadata['Version'] = version
+ self.metadata['Requires-Dist'] = deps
+
+ def __repr__(self):
+ return '<InstalledDist %r>' % self.metadata['Name']
+
+
+class ToInstallDist:
+ """Distribution that will be installed"""
+
+ def __init__(self, files=False):
+ self._files = files
+ self.install_called = False
+ self.install_called_with = {}
+ self.uninstall_called = False
+ self._real_files = []
+ self.name = "fake"
+ self.version = "fake"
+ if files:
+ for f in range(0, 3):
+ fp, fn = mkstemp()
+ os.close(fp)
+ self._real_files.append(fn)
+
+ def _unlink_installed_files(self):
+ if self._files:
+ for fn in self._real_files:
+ os.unlink(fn)
+
+ def list_installed_files(self, **args):
+ if self._files:
+ return self._real_files
+
+ def get_install(self, **args):
+ return self.list_installed_files()
+
+
+class MagicMock:
+ def __init__(self, return_value=None, raise_exception=False):
+ self.called = False
+ self._times_called = 0
+ self._called_with = []
+ self._return_value = return_value
+ self._raise = raise_exception
+
+ def __call__(self, *args, **kwargs):
+ self.called = True
+ self._times_called = self._times_called + 1
+ self._called_with.append((args, kwargs))
+ iterable = hasattr(self._raise, '__iter__')
+ if self._raise:
+ if ((not iterable and self._raise)
+ or self._raise[self._times_called - 1]):
+ raise Exception
+ return self._return_value
+
+ def called_with(self, *args, **kwargs):
+ return (args, kwargs) in self._called_with
+
+
+def get_installed_dists(dists):
+ """Return a list of fake installed dists.
+ The list is name, version, deps"""
+ objects = []
+ for name, version, deps in dists:
+ objects.append(InstalledDist(name, version, deps))
+ return objects
+
+
+class TestInstall(LoggingCatcher, TempdirManager, unittest.TestCase):
+ def _get_client(self, server, *args, **kwargs):
+ return Client(server.full_address, *args, **kwargs)
+
+ def _get_results(self, output):
+ """return a list of results"""
+ installed = [(o.name, str(o.version)) for o in output['install']]
+ remove = [(o.name, str(o.version)) for o in output['remove']]
+ conflict = [(o.name, str(o.version)) for o in output['conflict']]
+ return installed, remove, conflict
+
+ @unittest.skipIf(threading is None, 'needs threading')
+ @use_xmlrpc_server()
+ def test_existing_deps(self, server):
+ # Test that the installer get the dependencies from the metadatas
+ # and ask the index for this dependencies.
+ # In this test case, we have choxie that is dependent from towel-stuff
+ # 0.1, which is in-turn dependent on bacon <= 0.2:
+ # choxie -> towel-stuff -> bacon.
+ # Each release metadata is not provided in metadata 1.2.
+ client = self._get_client(server)
+ archive_path = '%s/distribution.tar.gz' % server.full_address
+ server.xmlrpc.set_distributions([
+ {'name': 'choxie',
+ 'version': '2.0.0.9',
+ 'requires_dist': ['towel-stuff (0.1)'],
+ 'url': archive_path},
+ {'name': 'towel-stuff',
+ 'version': '0.1',
+ 'requires_dist': ['bacon (<= 0.2)'],
+ 'url': archive_path},
+ {'name': 'bacon',
+ 'version': '0.1',
+ 'requires_dist': [],
+ 'url': archive_path},
+ ])
+ installed = get_installed_dists([('bacon', '0.1', [])])
+ output = install.get_infos("choxie", index=client,
+ installed=installed)
+
+ # we don't have installed bacon as it's already installed system-wide
+ self.assertEqual(0, len(output['remove']))
+ self.assertEqual(2, len(output['install']))
+ readable_output = [(o.name, str(o.version))
+ for o in output['install']]
+ self.assertIn(('towel-stuff', '0.1'), readable_output)
+ self.assertIn(('choxie', '2.0.0.9'), readable_output)
+
+ @unittest.skipIf(threading is None, 'needs threading')
+ @use_xmlrpc_server()
+ def test_upgrade_existing_deps(self, server):
+ client = self._get_client(server)
+ archive_path = '%s/distribution.tar.gz' % server.full_address
+ server.xmlrpc.set_distributions([
+ {'name': 'choxie',
+ 'version': '2.0.0.9',
+ 'requires_dist': ['towel-stuff (0.1)'],
+ 'url': archive_path},
+ {'name': 'towel-stuff',
+ 'version': '0.1',
+ 'requires_dist': ['bacon (>= 0.2)'],
+ 'url': archive_path},
+ {'name': 'bacon',
+ 'version': '0.2',
+ 'requires_dist': [],
+ 'url': archive_path},
+ ])
+
+ output = install.get_infos("choxie", index=client,
+ installed=get_installed_dists([('bacon', '0.1', [])]))
+ installed = [(o.name, str(o.version)) for o in output['install']]
+
+ # we need bacon 0.2, but 0.1 is installed.
+ # So we expect to remove 0.1 and to install 0.2 instead.
+ remove = [(o.name, str(o.version)) for o in output['remove']]
+ self.assertIn(('choxie', '2.0.0.9'), installed)
+ self.assertIn(('towel-stuff', '0.1'), installed)
+ self.assertIn(('bacon', '0.2'), installed)
+ self.assertIn(('bacon', '0.1'), remove)
+ self.assertEqual(0, len(output['conflict']))
+
+ @unittest.skipIf(threading is None, 'needs threading')
+ @use_xmlrpc_server()
+ def test_conflicts(self, server):
+ # Tests that conflicts are detected
+ client = self._get_client(server)
+ archive_path = '%s/distribution.tar.gz' % server.full_address
+
+ # choxie depends on towel-stuff, which depends on bacon.
+ server.xmlrpc.set_distributions([
+ {'name': 'choxie',
+ 'version': '2.0.0.9',
+ 'requires_dist': ['towel-stuff (0.1)'],
+ 'url': archive_path},
+ {'name': 'towel-stuff',
+ 'version': '0.1',
+ 'requires_dist': ['bacon (>= 0.2)'],
+ 'url': archive_path},
+ {'name': 'bacon',
+ 'version': '0.2',
+ 'requires_dist': [],
+ 'url': archive_path},
+ ])
+
+ # name, version, deps.
+ already_installed = [('bacon', '0.1', []),
+ ('chicken', '1.1', ['bacon (0.1)'])]
+ output = install.get_infos(
+ 'choxie', index=client,
+ installed=get_installed_dists(already_installed))
+
+ # we need bacon 0.2, but 0.1 is installed.
+ # So we expect to remove 0.1 and to install 0.2 instead.
+ installed, remove, conflict = self._get_results(output)
+ self.assertIn(('choxie', '2.0.0.9'), installed)
+ self.assertIn(('towel-stuff', '0.1'), installed)
+ self.assertIn(('bacon', '0.2'), installed)
+ self.assertIn(('bacon', '0.1'), remove)
+ self.assertIn(('chicken', '1.1'), conflict)
+
+ @unittest.skipIf(threading is None, 'needs threading')
+ @use_xmlrpc_server()
+ def test_installation_unexisting_project(self, server):
+ # Test that the isntalled raises an exception if the project does not
+ # exists.
+ client = self._get_client(server)
+ self.assertRaises(install.InstallationException,
+ install.get_infos,
+ 'unexisting project', index=client)
+
+ def test_move_files(self):
+ # test that the files are really moved, and that the new path is
+ # returned.
+ path = self.mkdtemp()
+ newpath = self.mkdtemp()
+ files = [os.path.join(path, str(x)) for x in range(1, 20)]
+ for f in files:
+ open(f, 'ab+').close()
+ output = [o for o in install._move_files(files, newpath)]
+
+ # check that output return the list of old/new places
+ for file_ in files:
+ name = os.path.split(file_)[-1]
+ newloc = os.path.join(newpath, name)
+ self.assertIn((file_, newloc), output)
+
+ # remove the files
+ for f in [o[1] for o in output]: # o[1] is the new place
+ os.remove(f)
+
+ def test_update_infos(self):
+ tests = [[
+ {'foo': ['foobar', 'foo', 'baz'], 'baz': ['foo', 'foo']},
+ {'foo': ['additional_content', 'yeah'], 'baz': ['test', 'foo']},
+ {'foo': ['foobar', 'foo', 'baz', 'additional_content', 'yeah'],
+ 'baz': ['foo', 'foo', 'test', 'foo']},
+ ]]
+
+ for dict1, dict2, expect in tests:
+ install._update_infos(dict1, dict2)
+ for key in expect:
+ self.assertEqual(expect[key], dict1[key])
+
+ def test_install_dists_rollback(self):
+ # if one of the distribution installation fails, call uninstall on all
+ # installed distributions.
+
+ old_install_dist = install._install_dist
+ old_uninstall = getattr(install, 'uninstall', None)
+
+ install._install_dist = MagicMock(return_value=[],
+ raise_exception=(False, True))
+ install.remove = MagicMock()
+ try:
+ d1 = ToInstallDist()
+ d2 = ToInstallDist()
+ path = self.mkdtemp()
+ self.assertRaises(Exception, install.install_dists, [d1, d2], path)
+ self.assertTrue(install._install_dist.called_with(d1, path))
+ self.assertTrue(install.remove.called)
+ finally:
+ install._install_dist = old_install_dist
+ install.remove = old_uninstall
+
+ def test_install_dists_success(self):
+ old_install_dist = install._install_dist
+ install._install_dist = MagicMock(return_value=[])
+ try:
+ # test that the install method is called on each distributions
+ d1 = ToInstallDist()
+ d2 = ToInstallDist()
+
+ # should call install
+ path = self.mkdtemp()
+ install.install_dists([d1, d2], path)
+ for dist in (d1, d2):
+ self.assertTrue(install._install_dist.called_with(dist, path))
+ finally:
+ install._install_dist = old_install_dist
+
+ def test_install_from_infos_conflict(self):
+ # assert conflicts raise an exception
+ self.assertRaises(install.InstallationConflict,
+ install.install_from_infos,
+ conflicts=[ToInstallDist()])
+
+ def test_install_from_infos_remove_success(self):
+ old_install_dists = install.install_dists
+ install.install_dists = lambda x, y=None: None
+ try:
+ dists = []
+ for i in range(2):
+ dists.append(ToInstallDist(files=True))
+ install.install_from_infos(remove=dists)
+
+ # assert that the files have been removed
+ for dist in dists:
+ for f in dist.list_installed_files():
+ self.assertFalse(os.path.exists(f))
+ finally:
+ install.install_dists = old_install_dists
+
+ def test_install_from_infos_remove_rollback(self):
+ old_install_dist = install._install_dist
+ old_uninstall = getattr(install, 'uninstall', None)
+
+ install._install_dist = MagicMock(return_value=[],
+ raise_exception=(False, True))
+ install.uninstall = MagicMock()
+ try:
+ # assert that if an error occurs, the removed files are restored.
+ remove = []
+ for i in range(2):
+ remove.append(ToInstallDist(files=True))
+ to_install = [ToInstallDist(), ToInstallDist()]
+ temp_dir = self.mkdtemp()
+
+ self.assertRaises(Exception, install.install_from_infos,
+ install_path=temp_dir, install=to_install,
+ remove=remove)
+ # assert that the files are in the same place
+ # assert that the files have been removed
+ for dist in remove:
+ for f in dist.list_installed_files():
+ self.assertTrue(os.path.exists(f))
+ dist._unlink_installed_files()
+ finally:
+ install._install_dist = old_install_dist
+ install.uninstall = old_uninstall
+
+ def test_install_from_infos_install_succes(self):
+ old_install_dist = install._install_dist
+ install._install_dist = MagicMock([])
+ try:
+ # assert that the distribution can be installed
+ install_path = "my_install_path"
+ to_install = [ToInstallDist(), ToInstallDist()]
+
+ install.install_from_infos(install_path, install=to_install)
+ for dist in to_install:
+ install._install_dist.called_with(install_path)
+ finally:
+ install._install_dist = old_install_dist
+
+ def test_install_permission_denied(self):
+ # if we don't have access to the installation path, we should abort
+ # immediately
+ project = os.path.join(os.path.dirname(__file__), 'package.tgz')
+
+ # when running from an uninstalled build, a warning is emitted and the
+ # installation is not attempted
+ if is_python_build():
+ self.assertFalse(install.install(project))
+ self.assertEqual(1, len(self.get_logs(logging.ERROR)))
+ return
+
+ install_path = self.mkdtemp()
+ old_get_path = install.get_path
+ install.get_path = lambda path: install_path
+ old_mod = os.stat(install_path).st_mode
+ os.chmod(install_path, 0)
+ try:
+ self.assertFalse(install.install(project))
+ finally:
+ os.chmod(install_path, old_mod)
+ install.get_path = old_get_path
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.makeSuite(TestInstall))
+ return suite
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_manifest.py b/Lib/packaging/tests/test_manifest.py
new file mode 100644
index 0000000..7aa59c1
--- /dev/null
+++ b/Lib/packaging/tests/test_manifest.py
@@ -0,0 +1,331 @@
+"""Tests for packaging.manifest."""
+import os
+import re
+from io import StringIO
+from packaging.errors import PackagingTemplateError
+from packaging.manifest import Manifest, _translate_pattern, _glob_to_re
+
+from packaging.tests import unittest, support
+
+MANIFEST_IN = """\
+include ok
+include xo
+exclude xo
+include foo.tmp
+include buildout.cfg
+global-include *.x
+global-include *.txt
+global-exclude *.tmp
+recursive-include f *.oo
+recursive-exclude global *.x
+graft dir
+prune dir3
+"""
+
+MANIFEST_IN_2 = """\
+recursive-include foo *.py # ok
+# nothing here
+
+#
+
+recursive-include bar \\
+ *.dat *.txt
+"""
+
+MANIFEST_IN_3 = """\
+README
+file1
+"""
+
+
+def make_local_path(s):
+ """Converts '/' in a string to os.sep"""
+ return s.replace('/', os.sep)
+
+
+class ManifestTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def assertNoWarnings(self):
+ self.assertEqual(self.get_logs(), [])
+
+ def assertWarnings(self):
+ self.assertNotEqual(self.get_logs(), [])
+
+ def test_manifest_reader(self):
+ tmpdir = self.mkdtemp()
+ MANIFEST = os.path.join(tmpdir, 'MANIFEST.in')
+ with open(MANIFEST, 'w') as f:
+ f.write(MANIFEST_IN_2)
+
+ manifest = Manifest()
+ manifest.read_template(MANIFEST)
+
+ warnings = self.get_logs()
+ # the manifest should have been read and 3 warnings issued
+ # (we didn't provide the files)
+ self.assertEqual(3, len(warnings))
+ for warning in warnings:
+ self.assertIn('no files found matching', warning)
+
+ # manifest also accepts file-like objects
+ with open(MANIFEST) as f:
+ manifest.read_template(f)
+
+ # the manifest should have been read and 3 warnings issued
+ # (we didn't provide the files)
+ self.assertEqual(3, len(warnings))
+
+ def test_default_actions(self):
+ tmpdir = self.mkdtemp()
+ self.addCleanup(os.chdir, os.getcwd())
+ os.chdir(tmpdir)
+ self.write_file('README', 'xxx')
+ self.write_file('file1', 'xxx')
+ content = StringIO(MANIFEST_IN_3)
+ manifest = Manifest()
+ manifest.read_template(content)
+ self.assertEqual(['README', 'file1'], manifest.files)
+
+ def test_glob_to_re(self):
+ sep = os.sep
+ if os.sep == '\\':
+ sep = r'\\'
+
+ for glob, regex in (
+ # simple cases
+ ('foo*', r'foo[^%(sep)s]*\Z(?ms)'),
+ ('foo?', r'foo[^%(sep)s]\Z(?ms)'),
+ ('foo??', r'foo[^%(sep)s][^%(sep)s]\Z(?ms)'),
+ # special cases
+ (r'foo\\*', r'foo\\\\[^%(sep)s]*\Z(?ms)'),
+ (r'foo\\\*', r'foo\\\\\\[^%(sep)s]*\Z(?ms)'),
+ ('foo????', r'foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s]\Z(?ms)'),
+ (r'foo\\??', r'foo\\\\[^%(sep)s][^%(sep)s]\Z(?ms)'),
+ ):
+ regex = regex % {'sep': sep}
+ self.assertEqual(_glob_to_re(glob), regex)
+
+ def test_process_template_line(self):
+ # testing all MANIFEST.in template patterns
+ manifest = Manifest()
+ l = make_local_path
+
+ # simulated file list
+ manifest.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt',
+ 'buildout.cfg',
+ # filelist does not filter out VCS directories,
+ # it's sdist that does
+ l('.hg/last-message.txt'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ l('global/files.x'),
+ l('global/here.tmp'),
+ l('f/o/f.oo'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ l('dir3/ok'),
+ l('dir3/sub/ok.txt'),
+ ]
+
+ for line in MANIFEST_IN.split('\n'):
+ if line.strip() == '':
+ continue
+ manifest._process_template_line(line)
+
+ wanted = ['ok',
+ 'buildout.cfg',
+ 'four.txt',
+ l('.hg/last-message.txt'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ l('f/o/f.oo'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ ]
+
+ self.assertEqual(manifest.files, wanted)
+
+ def test_remove_duplicates(self):
+ manifest = Manifest()
+ manifest.files = ['a', 'b', 'a', 'g', 'c', 'g']
+ # files must be sorted beforehand (like sdist does)
+ manifest.sort()
+ manifest.remove_duplicates()
+ self.assertEqual(manifest.files, ['a', 'b', 'c', 'g'])
+
+ def test_translate_pattern(self):
+ # blackbox test of a private function
+
+ # not regex
+ pattern = _translate_pattern('a', anchor=True, is_regex=False)
+ self.assertTrue(hasattr(pattern, 'search'))
+
+ # is a regex
+ regex = re.compile('a')
+ pattern = _translate_pattern(regex, anchor=True, is_regex=True)
+ self.assertEqual(pattern, regex)
+
+ # plain string flagged as regex
+ pattern = _translate_pattern('a', anchor=True, is_regex=True)
+ self.assertTrue(hasattr(pattern, 'search'))
+
+ # glob support
+ pattern = _translate_pattern('*.py', anchor=True, is_regex=False)
+ self.assertTrue(pattern.search('filelist.py'))
+
+ def test_exclude_pattern(self):
+ # return False if no match
+ manifest = Manifest()
+ self.assertFalse(manifest.exclude_pattern('*.py'))
+
+ # return True if files match
+ manifest = Manifest()
+ manifest.files = ['a.py', 'b.py']
+ self.assertTrue(manifest.exclude_pattern('*.py'))
+
+ # test excludes
+ manifest = Manifest()
+ manifest.files = ['a.py', 'a.txt']
+ manifest.exclude_pattern('*.py')
+ self.assertEqual(manifest.files, ['a.txt'])
+
+ def test_include_pattern(self):
+ # return False if no match
+ manifest = Manifest()
+ manifest.allfiles = []
+ self.assertFalse(manifest._include_pattern('*.py'))
+
+ # return True if files match
+ manifest = Manifest()
+ manifest.allfiles = ['a.py', 'b.txt']
+ self.assertTrue(manifest._include_pattern('*.py'))
+
+ # test * matches all files
+ manifest = Manifest()
+ self.assertIsNone(manifest.allfiles)
+ manifest.allfiles = ['a.py', 'b.txt']
+ manifest._include_pattern('*')
+ self.assertEqual(manifest.allfiles, ['a.py', 'b.txt'])
+
+ def test_process_template(self):
+ l = make_local_path
+ # invalid lines
+ manifest = Manifest()
+ for action in ('include', 'exclude', 'global-include',
+ 'global-exclude', 'recursive-include',
+ 'recursive-exclude', 'graft', 'prune'):
+ self.assertRaises(PackagingTemplateError,
+ manifest._process_template_line, action)
+
+ # implicit include
+ manifest = Manifest()
+ manifest.allfiles = ['a.py', 'b.txt', l('d/c.py')]
+
+ manifest._process_template_line('*.py')
+ self.assertEqual(manifest.files, ['a.py'])
+ self.assertNoWarnings()
+
+ # include
+ manifest = Manifest()
+ manifest.allfiles = ['a.py', 'b.txt', l('d/c.py')]
+
+ manifest._process_template_line('include *.py')
+ self.assertEqual(manifest.files, ['a.py'])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('include *.rb')
+ self.assertEqual(manifest.files, ['a.py'])
+ self.assertWarnings()
+
+ # exclude
+ manifest = Manifest()
+ manifest.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ manifest._process_template_line('exclude *.py')
+ self.assertEqual(manifest.files, ['b.txt', l('d/c.py')])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('exclude *.rb')
+ self.assertEqual(manifest.files, ['b.txt', l('d/c.py')])
+ self.assertWarnings()
+
+ # global-include
+ manifest = Manifest()
+ manifest.allfiles = ['a.py', 'b.txt', l('d/c.py')]
+
+ manifest._process_template_line('global-include *.py')
+ self.assertEqual(manifest.files, ['a.py', l('d/c.py')])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('global-include *.rb')
+ self.assertEqual(manifest.files, ['a.py', l('d/c.py')])
+ self.assertWarnings()
+
+ # global-exclude
+ manifest = Manifest()
+ manifest.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ manifest._process_template_line('global-exclude *.py')
+ self.assertEqual(manifest.files, ['b.txt'])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('global-exclude *.rb')
+ self.assertEqual(manifest.files, ['b.txt'])
+ self.assertWarnings()
+
+ # recursive-include
+ manifest = Manifest()
+ manifest.allfiles = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
+
+ manifest._process_template_line('recursive-include d *.py')
+ self.assertEqual(manifest.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('recursive-include e *.py')
+ self.assertEqual(manifest.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertWarnings()
+
+ # recursive-exclude
+ manifest = Manifest()
+ manifest.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
+
+ manifest._process_template_line('recursive-exclude d *.py')
+ self.assertEqual(manifest.files, ['a.py', l('d/c.txt')])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('recursive-exclude e *.py')
+ self.assertEqual(manifest.files, ['a.py', l('d/c.txt')])
+ self.assertWarnings()
+
+ # graft
+ manifest = Manifest()
+ manifest.allfiles = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
+
+ manifest._process_template_line('graft d')
+ self.assertEqual(manifest.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('graft e')
+ self.assertEqual(manifest.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertWarnings()
+
+ # prune
+ manifest = Manifest()
+ manifest.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
+
+ manifest._process_template_line('prune d')
+ self.assertEqual(manifest.files, ['a.py', l('f/f.py')])
+ self.assertNoWarnings()
+
+ manifest._process_template_line('prune e')
+ self.assertEqual(manifest.files, ['a.py', l('f/f.py')])
+ self.assertWarnings()
+
+
+def test_suite():
+ return unittest.makeSuite(ManifestTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_markers.py b/Lib/packaging/tests/test_markers.py
new file mode 100644
index 0000000..a494c6b
--- /dev/null
+++ b/Lib/packaging/tests/test_markers.py
@@ -0,0 +1,75 @@
+"""Tests for packaging.markers."""
+import os
+import sys
+import platform
+from packaging.markers import interpret
+
+from packaging.tests import unittest
+from packaging.tests.support import LoggingCatcher
+
+
+class MarkersTestCase(LoggingCatcher,
+ unittest.TestCase):
+
+ def test_interpret(self):
+ sys_platform = sys.platform
+ version = sys.version.split()[0]
+ os_name = os.name
+ platform_version = platform.version()
+ platform_machine = platform.machine()
+ platform_python_implementation = platform.python_implementation()
+
+ self.assertTrue(interpret("sys.platform == '%s'" % sys_platform))
+ self.assertTrue(interpret(
+ "sys.platform == '%s' and python_full_version == '%s'" %
+ (sys_platform, version)))
+ self.assertTrue(interpret("'%s' == sys.platform" % sys_platform))
+ self.assertTrue(interpret('os.name == "%s"' % os_name))
+ self.assertTrue(interpret(
+ 'platform.version == "%s" and platform.machine == "%s"' %
+ (platform_version, platform_machine)))
+ self.assertTrue(interpret('platform.python_implementation == "%s"' %
+ platform_python_implementation))
+
+ # stuff that need to raise a syntax error
+ ops = ('os.name == os.name', 'os.name == 2', "'2' == '2'",
+ 'okpjonon', '', 'os.name ==', 'python_version == 2.4')
+ for op in ops:
+ self.assertRaises(SyntaxError, interpret, op)
+
+ # combined operations
+ OP = 'os.name == "%s"' % os_name
+ FALSEOP = 'os.name == "buuuu"'
+ AND = ' and '
+ OR = ' or '
+ self.assertTrue(interpret(OP + AND + OP))
+ self.assertTrue(interpret(OP + AND + OP + AND + OP))
+ self.assertTrue(interpret(OP + OR + OP))
+ self.assertTrue(interpret(OP + OR + FALSEOP))
+ self.assertTrue(interpret(OP + OR + OP + OR + FALSEOP))
+ self.assertTrue(interpret(OP + OR + FALSEOP + OR + FALSEOP))
+ self.assertTrue(interpret(FALSEOP + OR + OP))
+ self.assertFalse(interpret(FALSEOP + AND + FALSEOP))
+ self.assertFalse(interpret(FALSEOP + OR + FALSEOP))
+
+ # other operators
+ self.assertTrue(interpret("os.name != 'buuuu'"))
+ self.assertTrue(interpret("python_version > '1.0'"))
+ self.assertTrue(interpret("python_version < '5.0'"))
+ self.assertTrue(interpret("python_version <= '5.0'"))
+ self.assertTrue(interpret("python_version >= '1.0'"))
+ self.assertTrue(interpret("'%s' in os.name" % os_name))
+ self.assertTrue(interpret("'buuuu' not in os.name"))
+ self.assertTrue(interpret(
+ "'buuuu' not in os.name and '%s' in os.name" % os_name))
+
+ # execution context
+ self.assertTrue(interpret('python_version == "0.1"',
+ {'python_version': '0.1'}))
+
+
+def test_suite():
+ return unittest.makeSuite(MarkersTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_metadata.py b/Lib/packaging/tests/test_metadata.py
new file mode 100644
index 0000000..54a7af3
--- /dev/null
+++ b/Lib/packaging/tests/test_metadata.py
@@ -0,0 +1,454 @@
+"""Tests for packaging.metadata."""
+import os
+import sys
+from textwrap import dedent
+from io import StringIO
+
+from packaging.errors import (MetadataConflictError, MetadataMissingError,
+ MetadataUnrecognizedVersionError)
+from packaging.metadata import Metadata, PKG_INFO_PREFERRED_VERSION
+
+from packaging.tests import unittest
+from packaging.tests.support import (LoggingCatcher, TempdirManager,
+ EnvironRestorer)
+
+
+class MetadataTestCase(LoggingCatcher,
+ TempdirManager,
+ EnvironRestorer,
+ unittest.TestCase):
+
+ maxDiff = None
+ restore_environ = ['HOME']
+
+ def setUp(self):
+ super(MetadataTestCase, self).setUp()
+ self.argv = sys.argv, sys.argv[:]
+
+ def tearDown(self):
+ sys.argv = self.argv[0]
+ sys.argv[:] = self.argv[1]
+ super(MetadataTestCase, self).tearDown()
+
+ #### Test various methods of the Metadata class
+
+ def test_instantiation(self):
+ PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+ with open(PKG_INFO, 'r', encoding='utf-8') as f:
+ contents = f.read()
+ fp = StringIO(contents)
+
+ m = Metadata()
+ self.assertRaises(MetadataUnrecognizedVersionError, m.items)
+
+ m = Metadata(PKG_INFO)
+ self.assertEqual(len(m.items()), 22)
+
+ m = Metadata(fileobj=fp)
+ self.assertEqual(len(m.items()), 22)
+
+ m = Metadata(mapping=dict(name='Test', version='1.0'))
+ self.assertEqual(len(m.items()), 11)
+
+ d = dict(m.items())
+ self.assertRaises(TypeError, Metadata,
+ PKG_INFO, fileobj=fp)
+ self.assertRaises(TypeError, Metadata,
+ PKG_INFO, mapping=d)
+ self.assertRaises(TypeError, Metadata,
+ fileobj=fp, mapping=d)
+ self.assertRaises(TypeError, Metadata,
+ PKG_INFO, mapping=m, fileobj=fp)
+
+ def test_metadata_markers(self):
+ # see if we can be platform-aware
+ PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+ with open(PKG_INFO, 'r', encoding='utf-8') as f:
+ content = f.read() % sys.platform
+ metadata = Metadata(platform_dependent=True)
+
+ metadata.read_file(StringIO(content))
+ self.assertEqual(metadata['Requires-Dist'], ['bar'])
+ metadata['Name'] = "baz; sys.platform == 'blah'"
+ # FIXME is None or 'UNKNOWN' correct here?
+ # where is that documented?
+ self.assertEqual(metadata['Name'], None)
+
+ # test with context
+ context = {'sys.platform': 'okook'}
+ metadata = Metadata(platform_dependent=True, execution_context=context)
+ metadata.read_file(StringIO(content))
+ self.assertEqual(metadata['Requires-Dist'], ['foo'])
+
+ def test_mapping_api(self):
+ PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+ with open(PKG_INFO, 'r', encoding='utf-8') as f:
+ content = f.read() % sys.platform
+ metadata = Metadata(fileobj=StringIO(content))
+ self.assertIn('Version', metadata.keys())
+ self.assertIn('0.5', metadata.values())
+ self.assertIn(('Version', '0.5'), metadata.items())
+
+ metadata.update({'version': '0.6'})
+ self.assertEqual(metadata['Version'], '0.6')
+ metadata.update([('version', '0.7')])
+ self.assertEqual(metadata['Version'], '0.7')
+
+ # make sure update method checks values like the set method does
+ metadata.update({'version': '1--2'})
+ self.assertEqual(len(self.get_logs()), 1)
+
+ # XXX caveat: the keys method and friends are not 3.x-style views
+ # should be changed or documented
+ self.assertEqual(list(metadata), metadata.keys())
+
+ def test_read_metadata(self):
+ fields = {'name': 'project',
+ 'version': '1.0',
+ 'description': 'desc',
+ 'summary': 'xxx',
+ 'download_url': 'http://example.com',
+ 'keywords': ['one', 'two'],
+ 'requires_dist': ['foo']}
+
+ metadata = Metadata(mapping=fields)
+ PKG_INFO = StringIO()
+ metadata.write_file(PKG_INFO)
+ PKG_INFO.seek(0)
+
+ metadata = Metadata(fileobj=PKG_INFO)
+
+ self.assertEqual(metadata['name'], 'project')
+ self.assertEqual(metadata['version'], '1.0')
+ self.assertEqual(metadata['summary'], 'xxx')
+ self.assertEqual(metadata['download_url'], 'http://example.com')
+ self.assertEqual(metadata['keywords'], ['one', 'two'])
+ self.assertEqual(metadata['platform'], [])
+ self.assertEqual(metadata['obsoletes'], [])
+ self.assertEqual(metadata['requires-dist'], ['foo'])
+
+ def test_write_metadata(self):
+ # check support of non-ASCII values
+ tmp_dir = self.mkdtemp()
+ my_file = os.path.join(tmp_dir, 'f')
+
+ metadata = Metadata(mapping={'author': 'Mister Café',
+ 'name': 'my.project',
+ 'author': 'Café Junior',
+ 'summary': 'Café torréfié',
+ 'description': 'Héhéhé',
+ 'keywords': ['café', 'coffee']})
+ metadata.write(my_file)
+
+ # the file should use UTF-8
+ metadata2 = Metadata()
+ with open(my_file, encoding='utf-8') as fp:
+ metadata2.read_file(fp)
+
+ # XXX when keywords are not defined, metadata will have
+ # 'Keywords': [] but metadata2 will have 'Keywords': ['']
+ # because of a value.split(',') in Metadata.get
+ self.assertEqual(metadata.items(), metadata2.items())
+
+ # ASCII also works, it's a subset of UTF-8
+ metadata = Metadata(mapping={'author': 'Mister Cafe',
+ 'name': 'my.project',
+ 'author': 'Cafe Junior',
+ 'summary': 'Cafe torrefie',
+ 'description': 'Hehehe'})
+ metadata.write(my_file)
+
+ metadata2 = Metadata()
+ with open(my_file, encoding='utf-8') as fp:
+ metadata2.read_file(fp)
+
+ def test_metadata_read_write(self):
+ PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+ metadata = Metadata(PKG_INFO)
+ out = StringIO()
+ metadata.write_file(out)
+
+ out.seek(0)
+ res = Metadata()
+ res.read_file(out)
+ self.assertEqual(metadata.values(), res.values())
+
+ #### Test checks
+
+ def test_check_version(self):
+ metadata = Metadata()
+ metadata['Name'] = 'vimpdb'
+ metadata['Home-page'] = 'http://pypi.python.org'
+ metadata['Author'] = 'Monty Python'
+ metadata.docutils_support = False
+ missing, warnings = metadata.check()
+ self.assertEqual(missing, ['Version'])
+
+ def test_check_version_strict(self):
+ metadata = Metadata()
+ metadata['Name'] = 'vimpdb'
+ metadata['Home-page'] = 'http://pypi.python.org'
+ metadata['Author'] = 'Monty Python'
+ metadata.docutils_support = False
+ self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+ def test_check_name(self):
+ metadata = Metadata()
+ metadata['Version'] = '1.0'
+ metadata['Home-page'] = 'http://pypi.python.org'
+ metadata['Author'] = 'Monty Python'
+ metadata.docutils_support = False
+ missing, warnings = metadata.check()
+ self.assertEqual(missing, ['Name'])
+
+ def test_check_name_strict(self):
+ metadata = Metadata()
+ metadata['Version'] = '1.0'
+ metadata['Home-page'] = 'http://pypi.python.org'
+ metadata['Author'] = 'Monty Python'
+ metadata.docutils_support = False
+ self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+ def test_check_author(self):
+ metadata = Metadata()
+ metadata['Version'] = '1.0'
+ metadata['Name'] = 'vimpdb'
+ metadata['Home-page'] = 'http://pypi.python.org'
+ metadata.docutils_support = False
+ missing, warnings = metadata.check()
+ self.assertEqual(missing, ['Author'])
+
+ def test_check_homepage(self):
+ metadata = Metadata()
+ metadata['Version'] = '1.0'
+ metadata['Name'] = 'vimpdb'
+ metadata['Author'] = 'Monty Python'
+ metadata.docutils_support = False
+ missing, warnings = metadata.check()
+ self.assertEqual(missing, ['Home-page'])
+
+ def test_check_predicates(self):
+ metadata = Metadata()
+ metadata['Version'] = 'rr'
+ metadata['Name'] = 'vimpdb'
+ metadata['Home-page'] = 'http://pypi.python.org'
+ metadata['Author'] = 'Monty Python'
+ metadata['Requires-dist'] = ['Foo (a)']
+ metadata['Obsoletes-dist'] = ['Foo (a)']
+ metadata['Provides-dist'] = ['Foo (a)']
+ missing, warnings = metadata.check()
+ self.assertEqual(len(warnings), 4)
+
+ #### Test fields and metadata versions
+
+ def test_metadata_versions(self):
+ metadata = Metadata(mapping={'name': 'project', 'version': '1.0'})
+ self.assertEqual(metadata['Metadata-Version'],
+ PKG_INFO_PREFERRED_VERSION)
+ self.assertNotIn('Provides', metadata)
+ self.assertNotIn('Requires', metadata)
+ self.assertNotIn('Obsoletes', metadata)
+
+ metadata['Classifier'] = ['ok']
+ self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+ metadata = Metadata()
+ metadata['Download-URL'] = 'ok'
+ self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+ metadata = Metadata()
+ metadata['Obsoletes'] = 'ok'
+ self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+ del metadata['Obsoletes']
+ metadata['Obsoletes-Dist'] = 'ok'
+ self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+ self.assertRaises(MetadataConflictError, metadata.set,
+ 'Obsoletes', 'ok')
+
+ del metadata['Obsoletes']
+ del metadata['Obsoletes-Dist']
+ metadata['Version'] = '1'
+ self.assertEqual(metadata['Metadata-Version'], '1.0')
+
+ # make sure the _best_version function works okay with
+ # non-conflicting fields from 1.1 and 1.2 (i.e. we want only the
+ # requires/requires-dist and co. pairs to cause a conflict, not all
+ # fields in _314_MARKERS)
+ metadata = Metadata()
+ metadata['Requires-Python'] = '3'
+ metadata['Classifier'] = ['Programming language :: Python :: 3']
+ self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+ PKG_INFO = os.path.join(os.path.dirname(__file__),
+ 'SETUPTOOLS-PKG-INFO')
+ metadata = Metadata(PKG_INFO)
+ self.assertEqual(metadata['Metadata-Version'], '1.0')
+
+ PKG_INFO = os.path.join(os.path.dirname(__file__),
+ 'SETUPTOOLS-PKG-INFO2')
+ metadata = Metadata(PKG_INFO)
+ self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+ # Update the _fields dict directly to prevent 'Metadata-Version'
+ # from being updated by the _set_best_version() method.
+ metadata._fields['Metadata-Version'] = '1.618'
+ self.assertRaises(MetadataUnrecognizedVersionError, metadata.keys)
+
+ def test_version(self):
+ Metadata(mapping={'author': 'xxx',
+ 'name': 'xxx',
+ 'version': 'xxx',
+ 'home_page': 'xxxx'})
+ logs = self.get_logs()
+ self.assertEqual(1, len(logs))
+ self.assertIn('not a valid version', logs[0])
+
+ def test_description(self):
+ PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+ with open(PKG_INFO, 'r', encoding='utf-8') as f:
+ content = f.read() % sys.platform
+ metadata = Metadata()
+ metadata.read_file(StringIO(content))
+
+ # see if we can read the description now
+ DESC = os.path.join(os.path.dirname(__file__), 'LONG_DESC.txt')
+ with open(DESC) as f:
+ wanted = f.read()
+ self.assertEqual(wanted, metadata['Description'])
+
+ # save the file somewhere and make sure we can read it back
+ out = StringIO()
+ metadata.write_file(out)
+ out.seek(0)
+
+ out.seek(0)
+ metadata = Metadata()
+ metadata.read_file(out)
+ self.assertEqual(wanted, metadata['Description'])
+
+ def test_description_folding(self):
+ # make sure the indentation is preserved
+ out = StringIO()
+ desc = dedent("""\
+ example::
+ We start here
+ and continue here
+ and end here.
+ """)
+
+ metadata = Metadata()
+ metadata['description'] = desc
+ metadata.write_file(out)
+
+ folded_desc = desc.replace('\n', '\n' + (7 * ' ') + '|')
+ self.assertIn(folded_desc, out.getvalue())
+
+ def test_project_url(self):
+ metadata = Metadata()
+ metadata['Project-URL'] = [('one', 'http://ok')]
+ self.assertEqual(metadata['Project-URL'], [('one', 'http://ok')])
+ self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+ # make sure this particular field is handled properly when written
+ fp = StringIO()
+ metadata.write_file(fp)
+ self.assertIn('Project-URL: one,http://ok', fp.getvalue().split('\n'))
+
+ fp.seek(0)
+ metadata = Metadata()
+ metadata.read_file(fp)
+ self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
+
+ # TODO copy tests for v1.1 requires, obsoletes and provides from distutils
+ # (they're useless but we support them so we should test them anyway)
+
+ def test_provides_dist(self):
+ fields = {'name': 'project',
+ 'version': '1.0',
+ 'provides_dist': ['project', 'my.project']}
+ metadata = Metadata(mapping=fields)
+ self.assertEqual(metadata['Provides-Dist'],
+ ['project', 'my.project'])
+ self.assertEqual(metadata['Metadata-Version'], '1.2', metadata)
+ self.assertNotIn('Requires', metadata)
+ self.assertNotIn('Obsoletes', metadata)
+
+ @unittest.skip('needs to be implemented')
+ def test_provides_illegal(self):
+ # TODO check the versions (like distutils does for old provides field)
+ self.assertRaises(ValueError, Metadata,
+ mapping={'name': 'project',
+ 'version': '1.0',
+ 'provides_dist': ['my.pkg (splat)']})
+
+ def test_requires_dist(self):
+ fields = {'name': 'project',
+ 'version': '1.0',
+ 'requires_dist': ['other', 'another (==1.0)']}
+ metadata = Metadata(mapping=fields)
+ self.assertEqual(metadata['Requires-Dist'],
+ ['other', 'another (==1.0)'])
+ self.assertEqual(metadata['Metadata-Version'], '1.2')
+ self.assertNotIn('Provides', metadata)
+ self.assertEqual(metadata['Requires-Dist'],
+ ['other', 'another (==1.0)'])
+ self.assertNotIn('Obsoletes', metadata)
+
+ # make sure write_file uses one RFC 822 header per item
+ fp = StringIO()
+ metadata.write_file(fp)
+ lines = fp.getvalue().split('\n')
+ self.assertIn('Requires-Dist: other', lines)
+ self.assertIn('Requires-Dist: another (==1.0)', lines)
+
+ # test warnings for invalid version predicates
+ # XXX this would cause no warnings if we used update (or the mapping
+ # argument of the constructor), see comment in Metadata.update
+ metadata = Metadata()
+ metadata['Requires-Dist'] = 'Funky (Groovie)'
+ metadata['Requires-Python'] = '1-4'
+ self.assertEqual(len(self.get_logs()), 2)
+
+ # test multiple version predicates
+ metadata = Metadata()
+
+ # XXX check PEP and see if 3 == 3.0
+ metadata['Requires-Python'] = '>=2.6, <3.0'
+ metadata['Requires-Dist'] = ['Foo (>=2.6, <3.0)']
+ self.assertEqual(self.get_logs(), [])
+
+ @unittest.skip('needs to be implemented')
+ def test_requires_illegal(self):
+ self.assertRaises(ValueError, Metadata,
+ mapping={'name': 'project',
+ 'version': '1.0',
+ 'requires': ['my.pkg (splat)']})
+
+ def test_obsoletes_dist(self):
+ fields = {'name': 'project',
+ 'version': '1.0',
+ 'obsoletes_dist': ['other', 'another (<1.0)']}
+ metadata = Metadata(mapping=fields)
+ self.assertEqual(metadata['Obsoletes-Dist'],
+ ['other', 'another (<1.0)'])
+ self.assertEqual(metadata['Metadata-Version'], '1.2')
+ self.assertNotIn('Provides', metadata)
+ self.assertNotIn('Requires', metadata)
+ self.assertEqual(metadata['Obsoletes-Dist'],
+ ['other', 'another (<1.0)'])
+
+ @unittest.skip('needs to be implemented')
+ def test_obsoletes_illegal(self):
+ self.assertRaises(ValueError, Metadata,
+ mapping={'name': 'project',
+ 'version': '1.0',
+ 'obsoletes': ['my.pkg (splat)']})
+
+
+def test_suite():
+ return unittest.makeSuite(MetadataTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_mixin2to3.py b/Lib/packaging/tests/test_mixin2to3.py
new file mode 100644
index 0000000..08a102b
--- /dev/null
+++ b/Lib/packaging/tests/test_mixin2to3.py
@@ -0,0 +1,87 @@
+import textwrap
+
+from packaging.tests import unittest, support
+from packaging.compat import Mixin2to3
+
+
+class Mixin2to3TestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(Mixin2to3TestCase, self).setUp()
+ self.filename = self.mktempfile().name
+
+ def check(self, source, wanted, **kwargs):
+ source = textwrap.dedent(source)
+ with open(self.filename, 'w') as fp:
+ fp.write(source)
+
+ Mixin2to3()._run_2to3(**kwargs)
+
+ wanted = textwrap.dedent(wanted)
+ with open(self.filename) as fp:
+ converted = fp.read()
+ self.assertMultiLineEqual(converted, wanted)
+
+ def test_conversion(self):
+ # check that code and doctests get converted
+ self.check('''\
+ """Example docstring.
+
+ >>> print test
+ test
+
+ It works.
+ """
+ print 'test'
+ ''',
+ '''\
+ """Example docstring.
+
+ >>> print(test)
+ test
+
+ It works.
+ """
+ print('test')
+
+ ''', # 2to3 adds a newline here
+ files=[self.filename])
+
+ def test_doctests_conversion(self):
+ # check that doctest files are converted
+ self.check('''\
+ Welcome to the doc.
+
+ >>> print test
+ test
+ ''',
+ '''\
+ Welcome to the doc.
+
+ >>> print(test)
+ test
+
+ ''',
+ doctests=[self.filename])
+
+ def test_additional_fixers(self):
+ # make sure the fixers argument works
+ self.check("""\
+ echo('42')
+ echo2('oh no')
+ """,
+ """\
+ print('42')
+ print('oh no')
+ """,
+ files=[self.filename],
+ fixers=['packaging.tests.fixer'])
+
+
+def test_suite():
+ return unittest.makeSuite(Mixin2to3TestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_msvc9compiler.py b/Lib/packaging/tests/test_msvc9compiler.py
new file mode 100644
index 0000000..dc3ae65
--- /dev/null
+++ b/Lib/packaging/tests/test_msvc9compiler.py
@@ -0,0 +1,140 @@
+"""Tests for packaging.compiler.msvc9compiler."""
+import os
+import sys
+
+from packaging.errors import PackagingPlatformError
+
+from packaging.tests import unittest, support
+
+_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+ manifestVersion="1.0">
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false">
+ </requestedExecutionLevel>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
+ version="9.0.21022.8" processorArchitecture="x86"
+ publicKeyToken="XXXX">
+ </assemblyIdentity>
+ </dependentAssembly>
+ </dependency>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+ version="9.0.21022.8" processorArchitecture="x86"
+ publicKeyToken="XXXX"></assemblyIdentity>
+ </dependentAssembly>
+ </dependency>
+</assembly>
+"""
+
+_CLEANED_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+ manifestVersion="1.0">
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false">
+ </requestedExecutionLevel>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+ <dependency>
+
+ </dependency>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+ version="9.0.21022.8" processorArchitecture="x86"
+ publicKeyToken="XXXX"></assemblyIdentity>
+ </dependentAssembly>
+ </dependency>
+</assembly>"""
+
+
+class msvc9compilerTestCase(support.TempdirManager,
+ unittest.TestCase):
+
+ @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+ def test_no_compiler(self):
+ # make sure query_vcvarsall raises a PackagingPlatformError if
+ # the compiler is not found
+ from packaging.compiler.msvccompiler import get_build_version
+ if get_build_version() < 8.0:
+ raise unittest.SkipTest('only for MSVC8.0 or above')
+
+ from packaging.compiler import msvc9compiler
+ from packaging.compiler.msvc9compiler import query_vcvarsall
+
+ def _find_vcvarsall(version):
+ return None
+
+ old_find_vcvarsall = msvc9compiler.find_vcvarsall
+ msvc9compiler.find_vcvarsall = _find_vcvarsall
+ try:
+ self.assertRaises(PackagingPlatformError, query_vcvarsall,
+ 'wont find this version')
+ finally:
+ msvc9compiler.find_vcvarsall = old_find_vcvarsall
+
+ @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+ def test_reg_class(self):
+ from packaging.compiler.msvccompiler import get_build_version
+ if get_build_version() < 8.0:
+ raise unittest.SkipTest("requires MSVC 8.0 or later")
+
+ from packaging.compiler.msvc9compiler import Reg
+ self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
+
+ # looking for values that should exist on all
+ # windows registeries versions.
+ path = r'Control Panel\Desktop'
+ v = Reg.get_value(path, 'dragfullwindows')
+ self.assertIn(v, ('0', '1', '2'))
+
+ import winreg
+ HKCU = winreg.HKEY_CURRENT_USER
+ keys = Reg.read_keys(HKCU, 'xxxx')
+ self.assertEqual(keys, None)
+
+ keys = Reg.read_keys(HKCU, r'Control Panel')
+ self.assertIn('Desktop', keys)
+
+ @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+ def test_remove_visual_c_ref(self):
+ from packaging.compiler.msvccompiler import get_build_version
+ if get_build_version() < 8.0:
+ raise unittest.SkipTest("requires MSVC 8.0 or later")
+
+ from packaging.compiler.msvc9compiler import MSVCCompiler
+ tempdir = self.mkdtemp()
+ manifest = os.path.join(tempdir, 'manifest')
+ with open(manifest, 'w') as f:
+ f.write(_MANIFEST)
+
+ compiler = MSVCCompiler()
+ compiler._remove_visual_c_ref(manifest)
+
+ # see what we got
+ with open(manifest) as f:
+ # removing trailing spaces
+ content = '\n'.join(line.rstrip() for line in f.readlines())
+
+ # makes sure the manifest was properly cleaned
+ self.assertEqual(content, _CLEANED_MANIFEST)
+
+
+def test_suite():
+ return unittest.makeSuite(msvc9compilerTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_dist.py b/Lib/packaging/tests/test_pypi_dist.py
new file mode 100644
index 0000000..43c8cbe
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_dist.py
@@ -0,0 +1,287 @@
+"""Tests for the packaging.pypi.dist module."""
+
+import os
+import shutil
+from packaging.version import VersionPredicate
+from packaging.pypi.dist import (ReleaseInfo, ReleasesList, DistInfo,
+ split_archive_name, get_infos_from_url)
+from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
+
+from packaging.tests import unittest
+from packaging.tests.support import TempdirManager, requires_zlib, fake_dec
+try:
+ import threading
+ from packaging.tests.pypi_server import use_pypi_server
+except ImportError:
+ threading = None
+ use_pypi_server = fake_dec
+
+
+def Dist(*args, **kwargs):
+ # DistInfo takes a release as a first parameter, avoid this in tests.
+ return DistInfo(None, *args, **kwargs)
+
+
+class TestReleaseInfo(unittest.TestCase):
+
+ def test_instantiation(self):
+ # Test the DistInfo class provides us the good attributes when
+ # given on construction
+ release = ReleaseInfo("FooBar", "1.1")
+ self.assertEqual("FooBar", release.name)
+ self.assertEqual("1.1", "%s" % release.version)
+
+ def test_add_dist(self):
+ # empty distribution type should assume "sdist"
+ release = ReleaseInfo("FooBar", "1.1")
+ release.add_distribution(url="http://example.org/")
+ # should not fail
+ release['sdist']
+
+ def test_get_unknown_distribution(self):
+ # should raise a KeyError
+ pass
+
+ def test_get_infos_from_url(self):
+ # Test that the the URLs are parsed the right way
+ url_list = {
+ 'FooBar-1.1.0.tar.gz': {
+ 'name': 'foobar', # lowercase the name
+ 'version': '1.1.0',
+ },
+ 'Foo-Bar-1.1.0.zip': {
+ 'name': 'foo-bar', # keep the dash
+ 'version': '1.1.0',
+ },
+ 'foobar-1.1b2.tar.gz#md5=123123123123123': {
+ 'name': 'foobar',
+ 'version': '1.1b2',
+ 'url': 'http://example.org/foobar-1.1b2.tar.gz', # no hash
+ 'hashval': '123123123123123',
+ 'hashname': 'md5',
+ },
+ 'foobar-1.1-rc2.tar.gz': { # use suggested name
+ 'name': 'foobar',
+ 'version': '1.1c2',
+ 'url': 'http://example.org/foobar-1.1-rc2.tar.gz',
+ }
+ }
+
+ for url, attributes in url_list.items():
+ # for each url
+ infos = get_infos_from_url("http://example.org/" + url)
+ for attribute, expected in attributes.items():
+ got = infos.get(attribute)
+ if attribute == "version":
+ self.assertEqual("%s" % got, expected)
+ else:
+ self.assertEqual(got, expected)
+
+ def test_split_archive_name(self):
+ # Test we can split the archive names
+ names = {
+ 'foo-bar-baz-1.0-rc2': ('foo-bar-baz', '1.0c2'),
+ 'foo-bar-baz-1.0': ('foo-bar-baz', '1.0'),
+ 'foobarbaz-1.0': ('foobarbaz', '1.0'),
+ }
+ for name, results in names.items():
+ self.assertEqual(results, split_archive_name(name))
+
+
+class TestDistInfo(TempdirManager, unittest.TestCase):
+ srcpath = "/packages/source/f/foobar/foobar-0.1.tar.gz"
+
+ def test_get_url(self):
+ # Test that the url property works well
+
+ d = Dist(url="test_url")
+ self.assertDictEqual(d.url, {
+ "url": "test_url",
+ "is_external": True,
+ "hashname": None,
+ "hashval": None,
+ })
+
+ # add a new url
+ d.add_url(url="internal_url", is_external=False)
+ self.assertEqual(d._url, None)
+ self.assertDictEqual(d.url, {
+ "url": "internal_url",
+ "is_external": False,
+ "hashname": None,
+ "hashval": None,
+ })
+ self.assertEqual(2, len(d.urls))
+
+ def test_comparison(self):
+ # Test that we can compare DistInfoributionInfoList
+ foo1 = ReleaseInfo("foo", "1.0")
+ foo2 = ReleaseInfo("foo", "2.0")
+ bar = ReleaseInfo("bar", "2.0")
+ # assert we use the version to compare
+ self.assertTrue(foo1 < foo2)
+ self.assertFalse(foo1 > foo2)
+ self.assertFalse(foo1 == foo2)
+
+ # assert we can't compare dists with different names
+ self.assertRaises(TypeError, foo1.__eq__, bar)
+
+ @unittest.skipIf(threading is None, 'needs threading')
+ @use_pypi_server("downloads_with_md5")
+ def test_download(self, server):
+ # Download is possible, and the md5 is checked if given
+
+ url = server.full_address + self.srcpath
+
+ # check that a md5 if given
+ dist = Dist(url=url, hashname="md5",
+ hashval="fe18804c5b722ff024cabdf514924fc4")
+ dist.download(self.mkdtemp())
+
+ # a wrong md5 fails
+ dist2 = Dist(url=url, hashname="md5", hashval="wrongmd5")
+
+ self.assertRaises(HashDoesNotMatch, dist2.download, self.mkdtemp())
+
+ # we can omit the md5 hash
+ dist3 = Dist(url=url)
+ dist3.download(self.mkdtemp())
+
+ # and specify a temporary location
+ # for an already downloaded dist
+ path1 = self.mkdtemp()
+ dist3.download(path=path1)
+ # and for a new one
+ path2_base = self.mkdtemp()
+ dist4 = Dist(url=url)
+ path2 = dist4.download(path=path2_base)
+ self.assertIn(path2_base, path2)
+
+ def test_hashname(self):
+ # Invalid hashnames raises an exception on assignation
+ Dist(hashname="md5", hashval="value")
+
+ self.assertRaises(UnsupportedHashName, Dist,
+ hashname="invalid_hashname",
+ hashval="value")
+
+ @unittest.skipIf(threading is None, 'needs threading')
+ @requires_zlib
+ @use_pypi_server('downloads_with_md5')
+ def test_unpack(self, server):
+ url = server.full_address + self.srcpath
+ dist1 = Dist(url=url)
+
+ # unpack the distribution in a specfied folder
+ dist1_here = self.mkdtemp()
+ dist1_there = dist1.unpack(path=dist1_here)
+
+ # assert we unpack to the path provided
+ self.assertEqual(dist1_here, dist1_there)
+ dist1_result = os.listdir(dist1_there)
+ self.assertIn('paf', dist1_result)
+ os.remove(os.path.join(dist1_there, 'paf'))
+
+ # Test unpack works without a path argument
+ dist2 = Dist(url=url)
+ # doing an unpack
+ dist2_there = dist2.unpack()
+ self.addCleanup(shutil.rmtree, dist2_there)
+ dist2_result = os.listdir(dist2_there)
+ self.assertIn('paf', dist2_result)
+ os.remove(os.path.join(dist2_there, 'paf'))
+
+
+class TestReleasesList(unittest.TestCase):
+
+ def test_filter(self):
+ # Test we filter the distributions the right way, using version
+ # predicate match method
+ releases = ReleasesList('FooBar', (
+ ReleaseInfo("FooBar", "1.1"),
+ ReleaseInfo("FooBar", "1.1.1"),
+ ReleaseInfo("FooBar", "1.2"),
+ ReleaseInfo("FooBar", "1.2.1"),
+ ))
+ filtered = releases.filter(VersionPredicate("FooBar (<1.2)"))
+ self.assertNotIn(releases[2], filtered)
+ self.assertNotIn(releases[3], filtered)
+ self.assertIn(releases[0], filtered)
+ self.assertIn(releases[1], filtered)
+
+ def test_append(self):
+ # When adding a new item to the list, the behavior is to test if
+ # a release with the same name and version number already exists,
+ # and if so, to add a new distribution for it. If the distribution type
+ # is already defined too, add url informations to the existing DistInfo
+ # object.
+
+ releases = ReleasesList("FooBar", [
+ ReleaseInfo("FooBar", "1.1", url="external_url",
+ dist_type="sdist"),
+ ])
+ self.assertEqual(1, len(releases))
+ releases.add_release(release=ReleaseInfo("FooBar", "1.1",
+ url="internal_url",
+ is_external=False,
+ dist_type="sdist"))
+ self.assertEqual(1, len(releases))
+ self.assertEqual(2, len(releases[0]['sdist'].urls))
+
+ releases.add_release(release=ReleaseInfo("FooBar", "1.1.1",
+ dist_type="sdist"))
+ self.assertEqual(2, len(releases))
+
+ # when adding a distribution whith a different type, a new distribution
+ # has to be added.
+ releases.add_release(release=ReleaseInfo("FooBar", "1.1.1",
+ dist_type="bdist"))
+ self.assertEqual(2, len(releases))
+ self.assertEqual(2, len(releases[1].dists))
+
+ def test_prefer_final(self):
+ # Can order the distributions using prefer_final
+ fb10 = ReleaseInfo("FooBar", "1.0") # final distribution
+ fb11a = ReleaseInfo("FooBar", "1.1a1") # alpha
+ fb12a = ReleaseInfo("FooBar", "1.2a1") # alpha
+ fb12b = ReleaseInfo("FooBar", "1.2b1") # beta
+ dists = ReleasesList("FooBar", [fb10, fb11a, fb12a, fb12b])
+
+ dists.sort_releases(prefer_final=True)
+ self.assertEqual(fb10, dists[0])
+
+ dists.sort_releases(prefer_final=False)
+ self.assertEqual(fb12b, dists[0])
+
+ @unittest.skip('method not implemented yet')
+ def test_prefer_source(self):
+ # Ordering supports prefer_source
+ fb_source = Dist("FooBar", "1.0", type="source")
+ fb_binary = Dist("FooBar", "1.0", type="binary")
+ fb2_binary = Dist("FooBar", "2.0", type="binary")
+ dists = ReleasesList([fb_binary, fb_source])
+
+ dists.sort_distributions(prefer_source=True)
+ self.assertEqual(fb_source, dists[0])
+
+ dists.sort_distributions(prefer_source=False)
+ self.assertEqual(fb_binary, dists[0])
+
+ dists.append(fb2_binary)
+ dists.sort_distributions(prefer_source=True)
+ self.assertEqual(fb2_binary, dists[0])
+
+ def test_get_last(self):
+ dists = ReleasesList('Foo')
+ self.assertEqual(dists.get_last('Foo 1.0'), None)
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.makeSuite(TestDistInfo))
+ suite.addTest(unittest.makeSuite(TestReleaseInfo))
+ suite.addTest(unittest.makeSuite(TestReleasesList))
+ return suite
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_pypi_server.py b/Lib/packaging/tests/test_pypi_server.py
new file mode 100644
index 0000000..057c494
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_server.py
@@ -0,0 +1,88 @@
+"""Tests for packaging.command.bdist."""
+import urllib.request
+import urllib.parse
+import urllib.error
+
+try:
+ import threading
+ from packaging.tests.pypi_server import (
+ PyPIServer, PYPI_DEFAULT_STATIC_PATH)
+except ImportError:
+ threading = None
+ PyPIServer = None
+ PYPI_DEFAULT_STATIC_PATH = None
+
+from packaging.tests import unittest
+
+
+@unittest.skipIf(threading is None, "Needs threading")
+class PyPIServerTest(unittest.TestCase):
+
+ def test_records_requests(self):
+ # We expect that PyPIServer can log our requests
+ server = PyPIServer()
+ server.default_response_status = 200
+
+ try:
+ server.start()
+ self.assertEqual(len(server.requests), 0)
+
+ data = b'Rock Around The Bunker'
+
+ headers = {"X-test-header": "Mister Iceberg"}
+
+ request = urllib.request.Request(
+ server.full_address, data, headers)
+ urllib.request.urlopen(request)
+ self.assertEqual(len(server.requests), 1)
+ handler, request_data = server.requests[-1]
+ self.assertIn(data, request_data)
+ self.assertIn("x-test-header", handler.headers)
+ self.assertEqual(handler.headers["x-test-header"],
+ "Mister Iceberg")
+
+ finally:
+ server.stop()
+
+ def test_serve_static_content(self):
+ # PYPI Mocked server can serve static content from disk.
+
+ def uses_local_files_for(server, url_path):
+ """Test that files are served statically (eg. the output from the
+ server is the same than the one made by a simple file read.
+ """
+ url = server.full_address + url_path
+ request = urllib.request.Request(url)
+ response = urllib.request.urlopen(request)
+ with open(PYPI_DEFAULT_STATIC_PATH + "/test_pypi_server"
+ + url_path) as file:
+ return response.read().decode() == file.read()
+
+ server = PyPIServer(static_uri_paths=["simple", "external"],
+ static_filesystem_paths=["test_pypi_server"])
+ server.start()
+ try:
+ # the file does not exists on the disc, so it might not be served
+ url = server.full_address + "/simple/unexisting_page"
+ request = urllib.request.Request(url)
+ try:
+ urllib.request.urlopen(request)
+ except urllib.error.HTTPError as e:
+ self.assertEqual(e.code, 404)
+
+ # now try serving a content that do exists
+ self.assertTrue(uses_local_files_for(server, "/simple/index.html"))
+
+ # and another one in another root path
+ self.assertTrue(uses_local_files_for(server,
+ "/external/index.html"))
+
+ finally:
+ server.stop()
+
+
+def test_suite():
+ return unittest.makeSuite(PyPIServerTest)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_simple.py b/Lib/packaging/tests/test_pypi_simple.py
new file mode 100644
index 0000000..59204c4
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_simple.py
@@ -0,0 +1,353 @@
+"""Tests for the packaging.pypi.simple module."""
+import re
+import os
+import sys
+import http.client
+import urllib.error
+import urllib.parse
+import urllib.request
+
+from packaging.pypi.simple import Crawler
+
+from packaging.tests import unittest
+from packaging.tests.support import (TempdirManager, LoggingCatcher,
+ fake_dec)
+
+try:
+ import _thread
+ from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
+ PYPI_DEFAULT_STATIC_PATH)
+except ImportError:
+ _thread = None
+ use_pypi_server = fake_dec
+ PYPI_DEFAULT_STATIC_PATH = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
+
+
+
+class SimpleCrawlerTestCase(TempdirManager,
+ LoggingCatcher,
+ unittest.TestCase):
+
+ def _get_simple_crawler(self, server, base_url="/simple/", hosts=None,
+ *args, **kwargs):
+ """Build and return a SimpleIndex with the test server urls"""
+ if hosts is None:
+ hosts = (server.full_address.replace("http://", ""),)
+ kwargs['hosts'] = hosts
+ return Crawler(server.full_address + base_url, *args,
+ **kwargs)
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ @use_pypi_server()
+ def test_bad_urls(self, server):
+ crawler = Crawler()
+ url = 'http://127.0.0.1:0/nonesuch/test_simple'
+ try:
+ v = crawler._open_url(url)
+ except Exception as v:
+ self.assertIn(url, str(v))
+ else:
+ v.close()
+ self.assertIsInstance(v, urllib.error.HTTPError)
+
+ # issue 16
+ # easy_install inquant.contentmirror.plone breaks because of a typo
+ # in its home URL
+ crawler = Crawler(hosts=('example.org',))
+ url = ('url:%20https://svn.plone.org/svn/collective/'
+ 'inquant.contentmirror.plone/trunk')
+ try:
+ v = crawler._open_url(url)
+ except Exception as v:
+ self.assertIn(url, str(v))
+ else:
+ v.close()
+ self.assertIsInstance(v, urllib.error.HTTPError)
+
+ def _urlopen(*args):
+ raise http.client.BadStatusLine('line')
+
+ old_urlopen = urllib.request.urlopen
+ urllib.request.urlopen = _urlopen
+ url = 'http://example.org'
+ try:
+ v = crawler._open_url(url)
+ except Exception as v:
+ self.assertIn('line', str(v))
+ else:
+ v.close()
+ # TODO use self.assertRaises
+ raise AssertionError('Should have raise here!')
+ finally:
+ urllib.request.urlopen = old_urlopen
+
+ # issue 20
+ url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
+ try:
+ crawler._open_url(url)
+ except Exception as v:
+ self.assertIn('Download error', str(v))
+
+ # issue #160
+ url = server.full_address
+ page = ('<a href="http://www.famfamfam.com]('
+ 'http://www.famfamfam.com/">')
+ crawler._process_url(url, page)
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ @use_pypi_server("test_found_links")
+ def test_found_links(self, server):
+ # Browse the index, asking for a specified release version
+ # The PyPI index contains links for version 1.0, 1.1, 2.0 and 2.0.1
+ crawler = self._get_simple_crawler(server)
+ last_release = crawler.get_release("foobar")
+
+ # we have scanned the index page
+ self.assertIn(server.full_address + "/simple/foobar/",
+ crawler._processed_urls)
+
+ # we have found 4 releases in this page
+ self.assertEqual(len(crawler._projects["foobar"]), 4)
+
+ # and returned the most recent one
+ self.assertEqual("%s" % last_release.version, '2.0.1')
+
+ def test_is_browsable(self):
+ crawler = Crawler(follow_externals=False)
+ self.assertTrue(crawler._is_browsable(crawler.index_url + "test"))
+
+ # Now, when following externals, we can have a list of hosts to trust.
+ # and don't follow other external links than the one described here.
+ crawler = Crawler(hosts=["pypi.python.org", "example.org"],
+ follow_externals=True)
+ good_urls = (
+ "http://pypi.python.org/foo/bar",
+ "http://pypi.python.org/simple/foobar",
+ "http://example.org",
+ "http://example.org/",
+ "http://example.org/simple/",
+ )
+ bad_urls = (
+ "http://python.org",
+ "http://example.tld",
+ )
+
+ for url in good_urls:
+ self.assertTrue(crawler._is_browsable(url))
+
+ for url in bad_urls:
+ self.assertFalse(crawler._is_browsable(url))
+
+ # allow all hosts
+ crawler = Crawler(follow_externals=True, hosts=("*",))
+ self.assertTrue(crawler._is_browsable("http://an-external.link/path"))
+ self.assertTrue(crawler._is_browsable("pypi.example.org/a/path"))
+
+ # specify a list of hosts we want to allow
+ crawler = Crawler(follow_externals=True,
+ hosts=("*.example.org",))
+ self.assertFalse(crawler._is_browsable("http://an-external.link/path"))
+ self.assertTrue(
+ crawler._is_browsable("http://pypi.example.org/a/path"))
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ @use_pypi_server("with_externals")
+ def test_follow_externals(self, server):
+ # Include external pages
+ # Try to request the package index, wich contains links to "externals"
+ # resources. They have to be scanned too.
+ crawler = self._get_simple_crawler(server, follow_externals=True)
+ crawler.get_release("foobar")
+ self.assertIn(server.full_address + "/external/external.html",
+ crawler._processed_urls)
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ @use_pypi_server("with_real_externals")
+ def test_restrict_hosts(self, server):
+ # Only use a list of allowed hosts is possible
+ # Test that telling the simple pyPI client to not retrieve external
+ # works
+ crawler = self._get_simple_crawler(server, follow_externals=False)
+ crawler.get_release("foobar")
+ self.assertNotIn(server.full_address + "/external/external.html",
+ crawler._processed_urls)
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ @use_pypi_server(static_filesystem_paths=["with_externals"],
+ static_uri_paths=["simple", "external"])
+ def test_links_priority(self, server):
+ # Download links from the pypi simple index should be used before
+ # external download links.
+ # http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
+ #
+ # Usecase :
+ # - someone uploads a package on pypi, a md5 is generated
+ # - someone manually coindexes this link (with the md5 in the url) onto
+ # an external page accessible from the package page.
+ # - someone reuploads the package (with a different md5)
+ # - while easy_installing, an MD5 error occurs because the external
+ # link is used
+ # -> The index should use the link from pypi, not the external one.
+
+ # start an index server
+ index_url = server.full_address + '/simple/'
+
+ # scan a test index
+ crawler = Crawler(index_url, follow_externals=True)
+ releases = crawler.get_releases("foobar")
+ server.stop()
+
+ # we have only one link, because links are compared without md5
+ self.assertEqual(1, len(releases))
+ self.assertEqual(1, len(releases[0].dists))
+ # the link should be from the index
+ self.assertEqual(2, len(releases[0].dists['sdist'].urls))
+ self.assertEqual('12345678901234567',
+ releases[0].dists['sdist'].url['hashval'])
+ self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ @use_pypi_server(static_filesystem_paths=["with_norel_links"],
+ static_uri_paths=["simple", "external"])
+ def test_not_scan_all_links(self, server):
+ # Do not follow all index page links.
+ # The links not tagged with rel="download" and rel="homepage" have
+ # to not be processed by the package index, while processing "pages".
+
+ # process the pages
+ crawler = self._get_simple_crawler(server, follow_externals=True)
+ crawler.get_releases("foobar")
+ # now it should have processed only pages with links rel="download"
+ # and rel="homepage"
+ self.assertIn("%s/simple/foobar/" % server.full_address,
+ crawler._processed_urls) # it's the simple index page
+ self.assertIn("%s/external/homepage.html" % server.full_address,
+ crawler._processed_urls) # the external homepage is rel="homepage"
+ self.assertNotIn("%s/external/nonrel.html" % server.full_address,
+ crawler._processed_urls) # this link contains no rel=*
+ self.assertNotIn("%s/unrelated-0.2.tar.gz" % server.full_address,
+ crawler._processed_urls) # linked from simple index (no rel)
+ self.assertIn("%s/foobar-0.1.tar.gz" % server.full_address,
+ crawler._processed_urls) # linked from simple index (rel)
+ self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
+ crawler._processed_urls) # linked from external homepage (rel)
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ def test_uses_mirrors(self):
+ # When the main repository seems down, try using the given mirrors"""
+ server = PyPIServer("foo_bar_baz")
+ mirror = PyPIServer("foo_bar_baz")
+ mirror.start() # we dont start the server here
+
+ try:
+ # create the index using both servers
+ crawler = Crawler(server.full_address + "/simple/", hosts=('*',),
+ # set the timeout to 1s for the tests
+ timeout=1, mirrors=[mirror.full_address])
+
+ # this should not raise a timeout
+ self.assertEqual(4, len(crawler.get_releases("foo")))
+ finally:
+ mirror.stop()
+ server.stop()
+
+ def test_simple_link_matcher(self):
+ # Test that the simple link matcher finds the right links"""
+ crawler = Crawler(follow_externals=False)
+
+ # Here, we define:
+ # 1. one link that must be followed, cause it's a download one
+ # 2. one link that must *not* be followed, cause the is_browsable
+ # returns false for it.
+ # 3. one link that must be followed cause it's a homepage that is
+ # browsable
+ # 4. one link that must be followed, because it contain a md5 hash
+ self.assertTrue(crawler._is_browsable("%stest" % crawler.index_url))
+ self.assertFalse(crawler._is_browsable("http://dl-link2"))
+ content = """
+ <a href="http://dl-link1" rel="download">download_link1</a>
+ <a href="http://dl-link2" rel="homepage">homepage_link1</a>
+ <a href="%(index_url)stest" rel="homepage">homepage_link2</a>
+ <a href="%(index_url)stest/foobar-1.tar.gz#md5=abcdef>download_link2</a>
+ """ % {'index_url': crawler.index_url}
+
+ # Test that the simple link matcher yield the good links.
+ generator = crawler._simple_link_matcher(content, crawler.index_url)
+ self.assertEqual(('%stest/foobar-1.tar.gz#md5=abcdef' %
+ crawler.index_url, True), next(generator))
+ self.assertEqual(('http://dl-link1', True), next(generator))
+ self.assertEqual(('%stest' % crawler.index_url, False),
+ next(generator))
+ self.assertRaises(StopIteration, generator.__next__)
+
+ # Follow the external links is possible (eg. homepages)
+ crawler.follow_externals = True
+ generator = crawler._simple_link_matcher(content, crawler.index_url)
+ self.assertEqual(('%stest/foobar-1.tar.gz#md5=abcdef' %
+ crawler.index_url, True), next(generator))
+ self.assertEqual(('http://dl-link1', True), next(generator))
+ self.assertEqual(('http://dl-link2', False), next(generator))
+ self.assertEqual(('%stest' % crawler.index_url, False),
+ next(generator))
+ self.assertRaises(StopIteration, generator.__next__)
+
+ def test_browse_local_files(self):
+ # Test that we can browse local files"""
+ index_url = "file://" + PYPI_DEFAULT_STATIC_PATH
+ if sys.platform == 'win32':
+ # under windows the correct syntax is:
+ # file:///C|\the\path\here
+ # instead of
+ # file://C:\the\path\here
+ fix = re.compile(r'^(file://)([A-Za-z])(:)')
+ index_url = fix.sub('\\1/\\2|', index_url)
+
+ index_path = os.sep.join([index_url, "test_found_links", "simple"])
+ crawler = Crawler(index_path)
+ dists = crawler.get_releases("foobar")
+ self.assertEqual(4, len(dists))
+
+ def test_get_link_matcher(self):
+ crawler = Crawler("http://example.org")
+ self.assertEqual('_simple_link_matcher', crawler._get_link_matcher(
+ "http://example.org/some/file").__name__)
+ self.assertEqual('_default_link_matcher', crawler._get_link_matcher(
+ "http://other-url").__name__)
+
+ def test_default_link_matcher(self):
+ crawler = Crawler("http://example.org", mirrors=[])
+ crawler.follow_externals = True
+ crawler._is_browsable = lambda *args: True
+ base_url = "http://example.org/some/file/"
+ content = """
+<a href="../homepage" rel="homepage">link</a>
+<a href="../download" rel="download">link2</a>
+<a href="../simpleurl">link2</a>
+ """
+ found_links = set(uri for uri, _ in
+ crawler._default_link_matcher(content, base_url))
+ self.assertIn('http://example.org/some/homepage', found_links)
+ self.assertIn('http://example.org/some/simpleurl', found_links)
+ self.assertIn('http://example.org/some/download', found_links)
+
+ @unittest.skipIf(_thread is None, 'needs threads')
+ @use_pypi_server("project_list")
+ def test_search_projects(self, server):
+ # we can search the index for some projects, on their names
+ # the case used no matters here
+ crawler = self._get_simple_crawler(server)
+ tests = (('Foobar', ['FooBar-bar', 'Foobar-baz', 'Baz-FooBar']),
+ ('foobar*', ['FooBar-bar', 'Foobar-baz']),
+ ('*foobar', ['Baz-FooBar']))
+
+ for search, expected in tests:
+ projects = [p.name for p in crawler.search_projects(search)]
+ self.assertListEqual(expected, projects)
+
+
+def test_suite():
+ return unittest.makeSuite(SimpleCrawlerTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_xmlrpc.py b/Lib/packaging/tests/test_pypi_xmlrpc.py
new file mode 100644
index 0000000..b7b382d
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_xmlrpc.py
@@ -0,0 +1,101 @@
+"""Tests for the packaging.pypi.xmlrpc module."""
+
+from packaging.pypi.xmlrpc import Client, InvalidSearchField, ProjectNotFound
+
+from packaging.tests import unittest
+from packaging.tests.support import fake_dec
+
+try:
+ import threading
+ from packaging.tests.pypi_server import use_xmlrpc_server
+except ImportError:
+ threading = None
+ use_xmlrpc_server = fake_dec
+
+
+@unittest.skipIf(threading is None, "Needs threading")
+class TestXMLRPCClient(unittest.TestCase):
+ def _get_client(self, server, *args, **kwargs):
+ return Client(server.full_address, *args, **kwargs)
+
+ @use_xmlrpc_server()
+ def test_search_projects(self, server):
+ client = self._get_client(server)
+ server.xmlrpc.set_search_result(['FooBar', 'Foo', 'FooFoo'])
+ results = [r.name for r in client.search_projects(name='Foo')]
+ self.assertEqual(3, len(results))
+ self.assertIn('FooBar', results)
+ self.assertIn('Foo', results)
+ self.assertIn('FooFoo', results)
+
+ def test_search_projects_bad_fields(self):
+ client = Client()
+ self.assertRaises(InvalidSearchField, client.search_projects,
+ invalid="test")
+
+ @use_xmlrpc_server()
+ def test_get_releases(self, server):
+ client = self._get_client(server)
+ server.xmlrpc.set_distributions([
+ {'name': 'FooBar', 'version': '1.1'},
+ {'name': 'FooBar', 'version': '1.2', 'url': 'http://some/url/'},
+ {'name': 'FooBar', 'version': '1.3', 'url': 'http://other/url/'},
+ ])
+
+ # use a lambda here to avoid an useless mock call
+ server.xmlrpc.list_releases = lambda *a, **k: ['1.1', '1.2', '1.3']
+
+ releases = client.get_releases('FooBar (<=1.2)')
+ # dont call release_data and release_url; just return name and version.
+ self.assertEqual(2, len(releases))
+ versions = releases.get_versions()
+ self.assertIn('1.1', versions)
+ self.assertIn('1.2', versions)
+ self.assertNotIn('1.3', versions)
+
+ self.assertRaises(ProjectNotFound, client.get_releases, 'Foo')
+
+ @use_xmlrpc_server()
+ def test_get_distributions(self, server):
+ client = self._get_client(server)
+ server.xmlrpc.set_distributions([
+ {'name': 'FooBar', 'version': '1.1',
+ 'url': 'http://example.org/foobar-1.1-sdist.tar.gz',
+ 'digest': '1234567',
+ 'type': 'sdist', 'python_version': 'source'},
+ {'name':'FooBar', 'version': '1.1',
+ 'url': 'http://example.org/foobar-1.1-bdist.tar.gz',
+ 'digest': '8912345', 'type': 'bdist'},
+ ])
+
+ releases = client.get_releases('FooBar', '1.1')
+ client.get_distributions('FooBar', '1.1')
+ release = releases.get_release('1.1')
+ self.assertTrue('http://example.org/foobar-1.1-sdist.tar.gz',
+ release['sdist'].url['url'])
+ self.assertTrue('http://example.org/foobar-1.1-bdist.tar.gz',
+ release['bdist'].url['url'])
+ self.assertEqual(release['sdist'].python_version, 'source')
+
+ @use_xmlrpc_server()
+ def test_get_metadata(self, server):
+ client = self._get_client(server)
+ server.xmlrpc.set_distributions([
+ {'name': 'FooBar',
+ 'version': '1.1',
+ 'keywords': '',
+ 'obsoletes_dist': ['FooFoo'],
+ 'requires_external': ['Foo'],
+ }])
+ release = client.get_metadata('FooBar', '1.1')
+ self.assertEqual(['Foo'], release.metadata['requires_external'])
+ self.assertEqual(['FooFoo'], release.metadata['obsoletes_dist'])
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.makeSuite(TestXMLRPCClient))
+ return suite
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_run.py b/Lib/packaging/tests/test_run.py
new file mode 100644
index 0000000..14e7b07
--- /dev/null
+++ b/Lib/packaging/tests/test_run.py
@@ -0,0 +1,92 @@
+"""Tests for packaging.run."""
+
+import os
+import sys
+from io import StringIO
+
+from packaging import install
+from packaging.tests import unittest, support
+from packaging.run import main
+
+from test.script_helper import assert_python_ok
+
+# setup script that uses __file__
+setup_using___file__ = """\
+__file__
+
+from packaging.run import setup
+setup()
+"""
+
+setup_prints_cwd = """\
+import os
+print os.getcwd()
+
+from packaging.run import setup
+setup()
+"""
+
+
+class RunTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(RunTestCase, self).setUp()
+ self.old_argv = sys.argv, sys.argv[:]
+
+ def tearDown(self):
+ sys.argv = self.old_argv[0]
+ sys.argv[:] = self.old_argv[1]
+ super(RunTestCase, self).tearDown()
+
+ # TODO restore the tests removed six months ago and port them to pysetup
+
+ def test_install(self):
+ # making sure install returns 0 or 1 exit codes
+ project = os.path.join(os.path.dirname(__file__), 'package.tgz')
+ install_path = self.mkdtemp()
+ old_get_path = install.get_path
+ install.get_path = lambda path: install_path
+ old_mod = os.stat(install_path).st_mode
+ os.chmod(install_path, 0)
+ old_stderr = sys.stderr
+ sys.stderr = StringIO()
+ try:
+ self.assertFalse(install.install(project))
+ self.assertEqual(main(['install', 'blabla']), 1)
+ finally:
+ sys.stderr = old_stderr
+ os.chmod(install_path, old_mod)
+ install.get_path = old_get_path
+
+ def test_show_help(self):
+ # smoke test, just makes sure some help is displayed
+ status, out, err = assert_python_ok('-m', 'packaging.run', '--help')
+ self.assertEqual(status, 0)
+ self.assertGreater(out, b'')
+ self.assertEqual(err, b'')
+
+ def test_list_commands(self):
+ status, out, err = assert_python_ok('-m', 'packaging.run', 'run',
+ '--list-commands')
+ # check that something is displayed
+ self.assertEqual(status, 0)
+ self.assertGreater(out, b'')
+ self.assertEqual(err, b'')
+
+ # make sure the manual grouping of commands is respected
+ check_position = out.find(b' check: ')
+ build_position = out.find(b' build: ')
+ self.assertTrue(check_position, out) # "out" printed as debugging aid
+ self.assertTrue(build_position, out)
+ self.assertLess(check_position, build_position, out)
+
+ # TODO test that custom commands don't break --list-commands
+
+
+def test_suite():
+ return unittest.makeSuite(RunTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_support.py b/Lib/packaging/tests/test_support.py
new file mode 100644
index 0000000..0ae9e1b
--- /dev/null
+++ b/Lib/packaging/tests/test_support.py
@@ -0,0 +1,78 @@
+import os
+import tempfile
+
+from packaging.dist import Distribution
+from packaging.tests import support, unittest
+
+
+class TestingSupportTestCase(unittest.TestCase):
+
+ def test_fake_dec(self):
+ @support.fake_dec(1, 2, k=3)
+ def func(arg0, *args, **kargs):
+ return arg0, args, kargs
+ self.assertEqual(func(-1, -2, k=-3), (-1, (-2,), {'k': -3}))
+
+ def test_TempdirManager(self):
+ files = {}
+
+ class Tester(support.TempdirManager, unittest.TestCase):
+
+ def test_mktempfile(self2):
+ tmpfile = self2.mktempfile()
+ files['test_mktempfile'] = tmpfile.name
+ self.assertTrue(os.path.isfile(tmpfile.name))
+
+ def test_mkdtemp(self2):
+ tmpdir = self2.mkdtemp()
+ files['test_mkdtemp'] = tmpdir
+ self.assertTrue(os.path.isdir(tmpdir))
+
+ def test_write_file(self2):
+ tmpdir = self2.mkdtemp()
+ files['test_write_file'] = tmpdir
+ self2.write_file((tmpdir, 'file1'), 'me file 1')
+ file1 = os.path.join(tmpdir, 'file1')
+ self.assertTrue(os.path.isfile(file1))
+ text = ''
+ with open(file1, 'r') as f:
+ text = f.read()
+ self.assertEqual(text, 'me file 1')
+
+ def test_create_dist(self2):
+ project_dir, dist = self2.create_dist()
+ files['test_create_dist'] = project_dir
+ self.assertTrue(os.path.isdir(project_dir))
+ self.assertIsInstance(dist, Distribution)
+
+ def test_assertIsFile(self2):
+ fd, fn = tempfile.mkstemp()
+ os.close(fd)
+ self.addCleanup(support.unlink, fn)
+ self2.assertIsFile(fn)
+ self.assertRaises(AssertionError, self2.assertIsFile, 'foO')
+
+ def test_assertIsNotFile(self2):
+ tmpdir = self2.mkdtemp()
+ self2.assertIsNotFile(tmpdir)
+
+ tester = Tester()
+ for name in ('test_mktempfile', 'test_mkdtemp', 'test_write_file',
+ 'test_create_dist', 'test_assertIsFile',
+ 'test_assertIsNotFile'):
+ tester.setUp()
+ try:
+ getattr(tester, name)()
+ finally:
+ tester.tearDown()
+
+ # check clean-up
+ if name in files:
+ self.assertFalse(os.path.exists(files[name]))
+
+
+def test_suite():
+ return unittest.makeSuite(TestingSupportTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_uninstall.py b/Lib/packaging/tests/test_uninstall.py
new file mode 100644
index 0000000..b0d9ba7
--- /dev/null
+++ b/Lib/packaging/tests/test_uninstall.py
@@ -0,0 +1,124 @@
+"""Tests for the packaging.uninstall module."""
+import os
+import logging
+import packaging.util
+
+from packaging.errors import PackagingError
+from packaging.install import remove
+from packaging.database import disable_cache, enable_cache
+
+from packaging.tests import unittest, support
+
+SETUP_CFG = """
+[metadata]
+name = %(name)s
+version = %(version)s
+
+[files]
+packages =
+ %(pkg)s
+ %(pkg)s.sub
+"""
+
+
+class UninstallTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ support.EnvironRestorer,
+ unittest.TestCase):
+
+ restore_environ = ['PLAT']
+
+ def setUp(self):
+ super(UninstallTestCase, self).setUp()
+ self.addCleanup(enable_cache)
+ self.addCleanup(packaging.util._path_created.clear)
+ disable_cache()
+
+ def get_path(self, dist, name):
+ # the dist argument must contain an install_dist command correctly
+ # initialized with a prefix option and finalized befored this method
+ # can be called successfully; practically, this means that you should
+ # call self.install_dist before self.get_path
+ cmd = dist.get_command_obj('install_dist')
+ return getattr(cmd, 'install_' + name)
+
+ def make_dist(self, name='Foo', **kw):
+ kw['name'] = name
+ pkg = name.lower()
+ if 'version' not in kw:
+ kw['version'] = '0.1'
+ project_dir, dist = self.create_dist(**kw)
+ kw['pkg'] = pkg
+
+ pkg_dir = os.path.join(project_dir, pkg)
+ os.makedirs(os.path.join(pkg_dir, 'sub'))
+
+ self.write_file((project_dir, 'setup.cfg'), SETUP_CFG % kw)
+ self.write_file((pkg_dir, '__init__.py'), '#')
+ self.write_file((pkg_dir, pkg + '_utils.py'), '#')
+ self.write_file((pkg_dir, 'sub', '__init__.py'), '#')
+ self.write_file((pkg_dir, 'sub', pkg + '_utils.py'), '#')
+
+ return project_dir
+
+ def install_dist(self, name='Foo', dirname=None, **kw):
+ if not dirname:
+ dirname = self.make_dist(name, **kw)
+ os.chdir(dirname)
+
+ dist = support.TestDistribution()
+ # for some unfathomable reason, the tests will fail horribly if the
+ # parse_config_files method is not called, even if it doesn't do
+ # anything useful; trying to build and use a command object manually
+ # also fails
+ dist.parse_config_files()
+ dist.finalize_options()
+ dist.run_command('install_dist',
+ {'prefix': ('command line', self.mkdtemp())})
+
+ site_packages = self.get_path(dist, 'purelib')
+ return dist, site_packages
+
+ def test_uninstall_unknown_distribution(self):
+ dist, site_packages = self.install_dist('Foospam')
+ self.assertRaises(PackagingError, remove, 'Foo',
+ paths=[site_packages])
+
+ def test_uninstall(self):
+ dist, site_packages = self.install_dist()
+ self.assertIsFile(site_packages, 'foo', '__init__.py')
+ self.assertIsFile(site_packages, 'foo', 'sub', '__init__.py')
+ self.assertIsFile(site_packages, 'Foo-0.1.dist-info', 'RECORD')
+ self.assertTrue(remove('Foo', paths=[site_packages]))
+ self.assertIsNotFile(site_packages, 'foo', 'sub', '__init__.py')
+ self.assertIsNotFile(site_packages, 'Foo-0.1.dist-info', 'RECORD')
+
+ def test_uninstall_error_handling(self):
+ # makes sure if there are OSErrors (like permission denied)
+ # remove() stops and displays a clean error
+ dist, site_packages = self.install_dist('Meh')
+
+ # breaking os.rename
+ old = os.rename
+
+ def _rename(source, target):
+ raise OSError(42, 'impossible operation')
+
+ os.rename = _rename
+ try:
+ self.assertFalse(remove('Meh', paths=[site_packages]))
+ finally:
+ os.rename = old
+
+ logs = [log for log in self.get_logs(logging.INFO)
+ if log.startswith('Error:')]
+ self.assertEqual(logs, ['Error: [Errno 42] impossible operation'])
+
+ self.assertTrue(remove('Meh', paths=[site_packages]))
+
+
+def test_suite():
+ return unittest.makeSuite(UninstallTestCase)
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_unixccompiler.py b/Lib/packaging/tests/test_unixccompiler.py
new file mode 100644
index 0000000..16a1af3
--- /dev/null
+++ b/Lib/packaging/tests/test_unixccompiler.py
@@ -0,0 +1,132 @@
+"""Tests for packaging.unixccompiler."""
+import sys
+
+import sysconfig
+from packaging.compiler.unixccompiler import UnixCCompiler
+from packaging.tests import unittest
+
+
+class UnixCCompilerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._backup_platform = sys.platform
+ self._backup_get_config_var = sysconfig.get_config_var
+
+ class CompilerWrapper(UnixCCompiler):
+ def rpath_foo(self):
+ return self.runtime_library_dir_option('/foo')
+ self.cc = CompilerWrapper()
+
+ def tearDown(self):
+ sys.platform = self._backup_platform
+ sysconfig.get_config_var = self._backup_get_config_var
+
+ @unittest.skipIf(sys.platform == 'win32', 'irrelevant on win32')
+ def test_runtime_libdir_option(self):
+
+ # Issue #5900: Ensure RUNPATH is added to extension
+ # modules with RPATH if GNU ld is used
+
+ # darwin
+ sys.platform = 'darwin'
+ self.assertEqual(self.cc.rpath_foo(), '-L/foo')
+
+ # hp-ux
+ sys.platform = 'hp-ux'
+ old_gcv = sysconfig.get_config_var
+
+ def gcv(v):
+ return 'xxx'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo'])
+
+ def gcv(v):
+ return 'gcc'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+ def gcv(v):
+ return 'g++'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+ sysconfig.get_config_var = old_gcv
+
+ # irix646
+ sys.platform = 'irix646'
+ self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
+
+ # osf1V5
+ sys.platform = 'osf1V5'
+ self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
+
+ # GCC GNULD
+ sys.platform = 'bar'
+
+ def gcv(v):
+ if v == 'CC':
+ return 'gcc'
+ elif v == 'GNULD':
+ return 'yes'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+ # GCC non-GNULD
+ sys.platform = 'bar'
+
+ def gcv(v):
+ if v == 'CC':
+ return 'gcc'
+ elif v == 'GNULD':
+ return 'no'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo')
+
+ # GCC GNULD with fully qualified configuration prefix
+ # see #7617
+ sys.platform = 'bar'
+
+ def gcv(v):
+ if v == 'CC':
+ return 'x86_64-pc-linux-gnu-gcc-4.4.2'
+ elif v == 'GNULD':
+ return 'yes'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+ # non-GCC GNULD
+ sys.platform = 'bar'
+
+ def gcv(v):
+ if v == 'CC':
+ return 'cc'
+ elif v == 'GNULD':
+ return 'yes'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-R/foo')
+
+ # non-GCC non-GNULD
+ sys.platform = 'bar'
+
+ def gcv(v):
+ if v == 'CC':
+ return 'cc'
+ elif v == 'GNULD':
+ return 'no'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-R/foo')
+
+ # AIX C/C++ linker
+ sys.platform = 'aix'
+
+ def gcv(v):
+ return 'xxx'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-blibpath:/foo')
+
+
+def test_suite():
+ return unittest.makeSuite(UnixCCompilerTestCase)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_util.py b/Lib/packaging/tests/test_util.py
new file mode 100644
index 0000000..7f7ed18
--- /dev/null
+++ b/Lib/packaging/tests/test_util.py
@@ -0,0 +1,1013 @@
+"""Tests for packaging.util."""
+import os
+import sys
+import time
+import logging
+import tempfile
+import textwrap
+import warnings
+import subprocess
+from io import StringIO
+
+from packaging.errors import (
+ PackagingPlatformError, PackagingFileError,
+ PackagingExecError, InstallationException)
+from packaging import util
+from packaging.dist import Distribution
+from packaging.util import (
+ convert_path, change_root, split_quoted, strtobool, run_2to3,
+ get_compiler_versions, _MAC_OS_X_LD_VERSION, byte_compile, find_packages,
+ spawn, get_pypirc_path, generate_pypirc, read_pypirc, resolve_name, iglob,
+ RICH_GLOB, egginfo_to_distinfo, is_setuptools, is_distutils, is_packaging,
+ get_install_method, cfg_to_args, generate_setup_py, encode_multipart)
+
+from packaging.tests import support, unittest
+from packaging.tests.test_config import SETUP_CFG
+from test.script_helper import assert_python_ok, assert_python_failure
+
+
+PYPIRC = """\
+[distutils]
+index-servers =
+ pypi
+ server1
+
+[pypi]
+username:me
+password:xxxx
+
+[server1]
+repository:http://example.com
+username:tarek
+password:secret
+"""
+
+PYPIRC_OLD = """\
+[server-login]
+username:tarek
+password:secret
+"""
+
+WANTED = """\
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username:tarek
+password:xxx
+"""
+
+EXPECTED_MULTIPART_OUTPUT = [
+ b'---x',
+ b'Content-Disposition: form-data; name="username"',
+ b'',
+ b'wok',
+ b'---x',
+ b'Content-Disposition: form-data; name="password"',
+ b'',
+ b'secret',
+ b'---x',
+ b'Content-Disposition: form-data; name="picture"; filename="wok.png"',
+ b'',
+ b'PNG89',
+ b'---x--',
+ b'',
+]
+
+
+class FakePopen:
+ test_class = None
+
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False,
+ shell=False, cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0,
+ restore_signals=True, start_new_session=False,
+ pass_fds=()):
+ if isinstance(args, str):
+ args = args.split()
+ self.cmd = args[0]
+ exes = self.test_class._exes
+ if self.cmd not in exes:
+ # we don't want to call the system, returning an empty
+ # output so it doesn't match
+ self.stdout = StringIO()
+ self.stderr = StringIO()
+ else:
+ self.stdout = StringIO(exes[self.cmd])
+ self.stderr = StringIO()
+
+ def communicate(self, input=None, timeout=None):
+ return self.stdout.read(), self.stderr.read()
+
+ def wait(self, timeout=None):
+ return 0
+
+
+class UtilTestCase(support.EnvironRestorer,
+ support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ restore_environ = ['HOME', 'PLAT']
+
+ def setUp(self):
+ super(UtilTestCase, self).setUp()
+ self.addCleanup(os.chdir, os.getcwd())
+ tempdir = self.mkdtemp()
+ self.rc = os.path.join(tempdir, '.pypirc')
+ os.environ['HOME'] = tempdir
+ os.chdir(tempdir)
+ # saving the environment
+ self.name = os.name
+ self.platform = sys.platform
+ self.version = sys.version
+ self.sep = os.sep
+ self.join = os.path.join
+ self.isabs = os.path.isabs
+ self.splitdrive = os.path.splitdrive
+
+ # patching os.uname
+ if hasattr(os, 'uname'):
+ self.uname = os.uname
+ self._uname = os.uname()
+ else:
+ self.uname = None
+ self._uname = None
+ os.uname = self._get_uname
+
+ def _get_uname(self):
+ return self._uname
+
+ def tearDown(self):
+ # getting back the environment
+ os.name = self.name
+ sys.platform = self.platform
+ sys.version = self.version
+ os.sep = self.sep
+ os.path.join = self.join
+ os.path.isabs = self.isabs
+ os.path.splitdrive = self.splitdrive
+ if self.uname is not None:
+ os.uname = self.uname
+ else:
+ del os.uname
+ super(UtilTestCase, self).tearDown()
+
+ def mock_popen(self):
+ self.old_find_executable = util.find_executable
+ util.find_executable = self._find_executable
+ self._exes = {}
+ self.old_popen = subprocess.Popen
+ self.old_stdout = sys.stdout
+ self.old_stderr = sys.stderr
+ FakePopen.test_class = self
+ subprocess.Popen = FakePopen
+ self.addCleanup(self.unmock_popen)
+
+ def unmock_popen(self):
+ util.find_executable = self.old_find_executable
+ subprocess.Popen = self.old_popen
+ sys.stdout = self.old_stdout
+ sys.stderr = self.old_stderr
+
+ def test_set_platform(self):
+ self.addCleanup(util.set_platform, util.get_platform())
+ util.set_platform("fake")
+ self.assertEqual("fake", util.get_platform())
+
+ def test_convert_path(self):
+ # linux/mac
+ os.sep = '/'
+
+ def _join(path):
+ return '/'.join(path)
+ os.path.join = _join
+
+ self.assertEqual(convert_path('/home/to/my/stuff'),
+ '/home/to/my/stuff')
+
+ # win
+ os.sep = '\\'
+
+ def _join(*path):
+ return '\\'.join(path)
+ os.path.join = _join
+
+ self.assertRaises(ValueError, convert_path, '/home/to/my/stuff')
+ self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/')
+
+ self.assertEqual(convert_path('home/to/my/stuff'),
+ 'home\\to\\my\\stuff')
+ self.assertEqual(convert_path('.'),
+ os.curdir)
+
+ def test_change_root(self):
+ # linux/mac
+ os.name = 'posix'
+
+ def _isabs(path):
+ return path[0] == '/'
+ os.path.isabs = _isabs
+
+ def _join(*path):
+ return '/'.join(path)
+ os.path.join = _join
+
+ self.assertEqual(change_root('/root', '/old/its/here'),
+ '/root/old/its/here')
+ self.assertEqual(change_root('/root', 'its/here'),
+ '/root/its/here')
+
+ # windows
+ os.name = 'nt'
+
+ def _isabs(path):
+ return path.startswith('c:\\')
+ os.path.isabs = _isabs
+
+ def _splitdrive(path):
+ if path.startswith('c:'):
+ return '', path.replace('c:', '')
+ return '', path
+ os.path.splitdrive = _splitdrive
+
+ def _join(*path):
+ return '\\'.join(path)
+ os.path.join = _join
+
+ self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'),
+ 'c:\\root\\old\\its\\here')
+ self.assertEqual(change_root('c:\\root', 'its\\here'),
+ 'c:\\root\\its\\here')
+
+ # BugsBunny os (it's a great os)
+ os.name = 'BugsBunny'
+ self.assertRaises(PackagingPlatformError,
+ change_root, 'c:\\root', 'its\\here')
+
+ # XXX platforms to be covered: os2, mac
+
+ def test_split_quoted(self):
+ self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'),
+ ['one', 'two', 'three', 'four'])
+
+ def test_strtobool(self):
+ yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
+ no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N')
+
+ for y in yes:
+ self.assertTrue(strtobool(y))
+
+ for n in no:
+ self.assertFalse(strtobool(n))
+
+ def test_find_exe_version(self):
+ # the ld version scheme under MAC OS is:
+ # ^@(#)PROGRAM:ld PROJECT:ld64-VERSION
+ #
+ # where VERSION is a 2-digit number for major
+ # revisions. For instance under Leopard, it's
+ # currently 77
+ #
+ # Dots are used when branching is done.
+ #
+ # The SnowLeopard ld64 is currently 95.2.12
+
+ for output, version in (('@(#)PROGRAM:ld PROJECT:ld64-77', '77'),
+ ('@(#)PROGRAM:ld PROJECT:ld64-95.2.12',
+ '95.2.12')):
+ result = _MAC_OS_X_LD_VERSION.search(output)
+ self.assertEqual(result.group(1), version)
+
+ def _find_executable(self, name):
+ if name in self._exes:
+ return name
+ return None
+
+ def test_get_compiler_versions(self):
+ self.mock_popen()
+ # get_versions calls distutils.spawn.find_executable on
+ # 'gcc', 'ld' and 'dllwrap'
+ self.assertEqual(get_compiler_versions(), (None, None, None))
+
+ # Let's fake we have 'gcc' and it returns '3.4.5'
+ self._exes['gcc'] = 'gcc (GCC) 3.4.5 (mingw special)\nFSF'
+ res = get_compiler_versions()
+ self.assertEqual(str(res[0]), '3.4.5')
+
+ # and let's see what happens when the version
+ # doesn't match the regular expression
+ # (\d+\.\d+(\.\d+)*)
+ self._exes['gcc'] = 'very strange output'
+ res = get_compiler_versions()
+ self.assertEqual(res[0], None)
+
+ # same thing for ld
+ if sys.platform != 'darwin':
+ self._exes['ld'] = 'GNU ld version 2.17.50 20060824'
+ res = get_compiler_versions()
+ self.assertEqual(str(res[1]), '2.17.50')
+ self._exes['ld'] = '@(#)PROGRAM:ld PROJECT:ld64-77'
+ res = get_compiler_versions()
+ self.assertEqual(res[1], None)
+ else:
+ self._exes['ld'] = 'GNU ld version 2.17.50 20060824'
+ res = get_compiler_versions()
+ self.assertEqual(res[1], None)
+ self._exes['ld'] = '@(#)PROGRAM:ld PROJECT:ld64-77'
+ res = get_compiler_versions()
+ self.assertEqual(str(res[1]), '77')
+
+ # and dllwrap
+ self._exes['dllwrap'] = 'GNU dllwrap 2.17.50 20060824\nFSF'
+ res = get_compiler_versions()
+ self.assertEqual(str(res[2]), '2.17.50')
+ self._exes['dllwrap'] = 'Cheese Wrap'
+ res = get_compiler_versions()
+ self.assertEqual(res[2], None)
+
+ def test_byte_compile_under_B(self):
+ # make sure byte compilation works under -B (dont_write_bytecode)
+ self.addCleanup(setattr, sys, 'dont_write_bytecode',
+ sys.dont_write_bytecode)
+ sys.dont_write_bytecode = True
+ byte_compile([])
+
+ def test_newer(self):
+ self.assertRaises(PackagingFileError, util.newer, 'xxx', 'xxx')
+ self.newer_f1 = self.mktempfile()
+ time.sleep(1)
+ self.newer_f2 = self.mktempfile()
+ self.assertTrue(util.newer(self.newer_f2.name, self.newer_f1.name))
+
+ def test_find_packages(self):
+ # let's create a structure we want to scan:
+ #
+ # pkg1
+ # __init__
+ # pkg2
+ # __init__
+ # pkg3
+ # __init__
+ # pkg6
+ # __init__
+ # pkg4 <--- not a pkg
+ # pkg8
+ # __init__
+ # pkg5
+ # __init__
+ #
+ root = self.mkdtemp()
+ pkg1 = os.path.join(root, 'pkg1')
+ os.makedirs(os.path.join(pkg1, 'pkg2'))
+ os.makedirs(os.path.join(pkg1, 'pkg3', 'pkg6'))
+ os.makedirs(os.path.join(pkg1, 'pkg4', 'pkg8'))
+ os.makedirs(os.path.join(root, 'pkg5'))
+ self.write_file((pkg1, '__init__.py'))
+ self.write_file((pkg1, 'pkg2', '__init__.py'))
+ self.write_file((pkg1, 'pkg3', '__init__.py'))
+ self.write_file((pkg1, 'pkg3', 'pkg6', '__init__.py'))
+ self.write_file((pkg1, 'pkg4', 'pkg8', '__init__.py'))
+ self.write_file((root, 'pkg5', '__init__.py'))
+
+ res = find_packages([root], ['pkg1.pkg2'])
+ self.assertEqual(sorted(res),
+ ['pkg1', 'pkg1.pkg3', 'pkg1.pkg3.pkg6', 'pkg5'])
+
+ def test_resolve_name(self):
+ # test raw module name
+ tmpdir = self.mkdtemp()
+ sys.path.append(tmpdir)
+ self.addCleanup(sys.path.remove, tmpdir)
+ self.write_file((tmpdir, 'hello.py'), '')
+
+ os.makedirs(os.path.join(tmpdir, 'a', 'b'))
+ self.write_file((tmpdir, 'a', '__init__.py'), '')
+ self.write_file((tmpdir, 'a', 'b', '__init__.py'), '')
+ self.write_file((tmpdir, 'a', 'b', 'c.py'), 'class Foo: pass')
+ self.write_file((tmpdir, 'a', 'b', 'd.py'), textwrap.dedent("""\
+ class FooBar:
+ class Bar:
+ def baz(self):
+ pass
+ """))
+
+ # check Python, C and built-in module
+ self.assertEqual(resolve_name('hello').__name__, 'hello')
+ self.assertEqual(resolve_name('_csv').__name__, '_csv')
+ self.assertEqual(resolve_name('sys').__name__, 'sys')
+
+ # test module.attr
+ self.assertIs(resolve_name('builtins.str'), str)
+ self.assertIsNone(resolve_name('hello.__doc__'))
+ self.assertEqual(resolve_name('a.b.c.Foo').__name__, 'Foo')
+ self.assertEqual(resolve_name('a.b.d.FooBar.Bar.baz').__name__, 'baz')
+
+ # error if module not found
+ self.assertRaises(ImportError, resolve_name, 'nonexistent')
+ self.assertRaises(ImportError, resolve_name, 'non.existent')
+ self.assertRaises(ImportError, resolve_name, 'a.no')
+ self.assertRaises(ImportError, resolve_name, 'a.b.no')
+ self.assertRaises(ImportError, resolve_name, 'a.b.no.no')
+ self.assertRaises(ImportError, resolve_name, 'inva-lid')
+
+ # looking up built-in names is not supported
+ self.assertRaises(ImportError, resolve_name, 'str')
+
+ # error if module found but not attr
+ self.assertRaises(ImportError, resolve_name, 'a.b.Spam')
+ self.assertRaises(ImportError, resolve_name, 'a.b.c.Spam')
+
+ @support.skip_2to3_optimize
+ def test_run_2to3_on_code(self):
+ content = "print 'test'"
+ converted_content = "print('test')"
+ file_handle = self.mktempfile()
+ file_name = file_handle.name
+ file_handle.write(content)
+ file_handle.flush()
+ file_handle.seek(0)
+ run_2to3([file_name])
+ new_content = "".join(file_handle.read())
+ file_handle.close()
+ self.assertEqual(new_content, converted_content)
+
+ @support.skip_2to3_optimize
+ def test_run_2to3_on_doctests(self):
+ # to check if text files containing doctests only get converted.
+ content = ">>> print 'test'\ntest\n"
+ converted_content = ">>> print('test')\ntest\n\n"
+ file_handle = self.mktempfile()
+ file_name = file_handle.name
+ file_handle.write(content)
+ file_handle.flush()
+ file_handle.seek(0)
+ run_2to3([file_name], doctests_only=True)
+ new_content = "".join(file_handle.readlines())
+ file_handle.close()
+ self.assertEqual(new_content, converted_content)
+
+ @unittest.skipUnless(os.name in ('nt', 'posix'),
+ 'runs only under posix or nt')
+ def test_spawn(self):
+ tmpdir = self.mkdtemp()
+
+ # creating something executable
+ # through the shell that returns 1
+ if os.name == 'posix':
+ exe = os.path.join(tmpdir, 'foo.sh')
+ self.write_file(exe, '#!/bin/sh\nexit 1')
+ os.chmod(exe, 0o777)
+ else:
+ exe = os.path.join(tmpdir, 'foo.bat')
+ self.write_file(exe, 'exit 1')
+
+ os.chmod(exe, 0o777)
+ self.assertRaises(PackagingExecError, spawn, [exe])
+
+ # now something that works
+ if os.name == 'posix':
+ exe = os.path.join(tmpdir, 'foo.sh')
+ self.write_file(exe, '#!/bin/sh\nexit 0')
+ os.chmod(exe, 0o777)
+ else:
+ exe = os.path.join(tmpdir, 'foo.bat')
+ self.write_file(exe, 'exit 0')
+
+ os.chmod(exe, 0o777)
+ spawn([exe]) # should work without any error
+
+ def test_server_registration(self):
+ # This test makes sure we know how to:
+ # 1. handle several sections in .pypirc
+ # 2. handle the old format
+
+ # new format
+ self.write_file(self.rc, PYPIRC)
+ config = read_pypirc()
+
+ config = sorted(config.items())
+ expected = [('password', 'xxxx'), ('realm', 'pypi'),
+ ('repository', 'http://pypi.python.org/pypi'),
+ ('server', 'pypi'), ('username', 'me')]
+ self.assertEqual(config, expected)
+
+ # old format
+ self.write_file(self.rc, PYPIRC_OLD)
+ config = read_pypirc()
+ config = sorted(config.items())
+ expected = [('password', 'secret'), ('realm', 'pypi'),
+ ('repository', 'http://pypi.python.org/pypi'),
+ ('server', 'server-login'), ('username', 'tarek')]
+ self.assertEqual(config, expected)
+
+ def test_server_empty_registration(self):
+ rc = get_pypirc_path()
+ self.assertFalse(os.path.exists(rc))
+ generate_pypirc('tarek', 'xxx')
+ self.assertTrue(os.path.exists(rc))
+ with open(rc) as f:
+ content = f.read()
+ self.assertEqual(content, WANTED)
+
+ def test_cfg_to_args(self):
+ opts = {'description-file': 'README', 'extra-files': '',
+ 'setup-hooks': 'packaging.tests.test_config.version_hook'}
+ self.write_file('setup.cfg', SETUP_CFG % opts, encoding='utf-8')
+ self.write_file('README', 'loooong description')
+
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore', DeprecationWarning)
+ args = cfg_to_args()
+ # use Distribution to get the contents of the setup.cfg file
+ dist = Distribution()
+ dist.parse_config_files()
+ metadata = dist.metadata
+
+ self.assertEqual(args['name'], metadata['Name'])
+ # + .dev1 because the test SETUP_CFG also tests a hook function in
+ # test_config.py for appending to the version string
+ self.assertEqual(args['version'] + '.dev1', metadata['Version'])
+ self.assertEqual(args['author'], metadata['Author'])
+ self.assertEqual(args['author_email'], metadata['Author-Email'])
+ self.assertEqual(args['maintainer'], metadata['Maintainer'])
+ self.assertEqual(args['maintainer_email'],
+ metadata['Maintainer-Email'])
+ self.assertEqual(args['description'], metadata['Summary'])
+ self.assertEqual(args['long_description'], metadata['Description'])
+ self.assertEqual(args['classifiers'], metadata['Classifier'])
+ self.assertEqual(args['requires'], metadata['Requires-Dist'])
+ self.assertEqual(args['provides'], metadata['Provides-Dist'])
+
+ self.assertEqual(args['package_dir'].get(''), dist.package_dir)
+ self.assertEqual(args['packages'], dist.packages)
+ self.assertEqual(args['scripts'], dist.scripts)
+ self.assertEqual(args['py_modules'], dist.py_modules)
+
+ def test_generate_setup_py(self):
+ os.chdir(self.mkdtemp())
+ self.write_file('setup.cfg', textwrap.dedent("""\
+ [metadata]
+ name = SPAM
+ classifier = Programming Language :: Python
+ """))
+ generate_setup_py()
+ self.assertTrue(os.path.exists('setup.py'), 'setup.py not created')
+ rc, out, err = assert_python_ok('setup.py', '--name')
+ self.assertEqual(out, b'SPAM\n')
+ self.assertEqual(err, b'')
+
+ # a generated setup.py should complain if no setup.cfg is present
+ os.unlink('setup.cfg')
+ rc, out, err = assert_python_failure('setup.py', '--name')
+ self.assertIn(b'setup.cfg', err)
+
+ def test_encode_multipart(self):
+ fields = [('username', 'wok'), ('password', 'secret')]
+ files = [('picture', 'wok.png', b'PNG89')]
+ content_type, body = encode_multipart(fields, files, b'-x')
+ self.assertEqual(b'multipart/form-data; boundary=-x', content_type)
+ self.assertEqual(EXPECTED_MULTIPART_OUTPUT, body.split(b'\r\n'))
+
+
+class GlobTestCaseBase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def build_files_tree(self, files):
+ tempdir = self.mkdtemp()
+ for filepath in files:
+ is_dir = filepath.endswith('/')
+ filepath = os.path.join(tempdir, *filepath.split('/'))
+ if is_dir:
+ dirname = filepath
+ else:
+ dirname = os.path.dirname(filepath)
+ if dirname and not os.path.exists(dirname):
+ os.makedirs(dirname)
+ if not is_dir:
+ self.write_file(filepath, 'babar')
+ return tempdir
+
+ @staticmethod
+ def os_dependent_path(path):
+ path = path.rstrip('/').split('/')
+ return os.path.join(*path)
+
+ def clean_tree(self, spec):
+ files = []
+ for path, includes in spec.items():
+ if includes:
+ files.append(self.os_dependent_path(path))
+ return files
+
+
+class GlobTestCase(GlobTestCaseBase):
+
+ def assertGlobMatch(self, glob, spec):
+ tempdir = self.build_files_tree(spec)
+ expected = self.clean_tree(spec)
+ os.chdir(tempdir)
+ result = list(iglob(glob))
+ self.assertCountEqual(expected, result)
+
+ def test_regex_rich_glob(self):
+ matches = RICH_GLOB.findall(
+ r"babar aime les {fraises} est les {huitres}")
+ self.assertEqual(["fraises", "huitres"], matches)
+
+ def test_simple_glob(self):
+ glob = '*.tp?'
+ spec = {'coucou.tpl': True,
+ 'coucou.tpj': True,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_simple_glob_in_dir(self):
+ glob = os.path.join('babar', '*.tp?')
+ spec = {'babar/coucou.tpl': True,
+ 'babar/coucou.tpj': True,
+ 'babar/toto.bin': False,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_recursive_glob_head(self):
+ glob = os.path.join('**', 'tip', '*.t?l')
+ spec = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+ 'babar/z/tip/coucou.tpl': True,
+ 'babar/tip/coucou.tpl': True,
+ 'babar/zeop/tip/babar/babar.tpl': False,
+ 'babar/z/tip/coucou.bin': False,
+ 'babar/toto.bin': False,
+ 'zozo/zuzu/tip/babar.tpl': True,
+ 'zozo/tip/babar.tpl': True,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_recursive_glob_tail(self):
+ glob = os.path.join('babar', '**')
+ spec = {'babar/zaza/': True,
+ 'babar/zaza/zuzu/': True,
+ 'babar/zaza/zuzu/babar.xml': True,
+ 'babar/zaza/zuzu/toto.xml': True,
+ 'babar/zaza/zuzu/toto.csv': True,
+ 'babar/zaza/coucou.tpl': True,
+ 'babar/bubu.tpl': True,
+ 'zozo/zuzu/tip/babar.tpl': False,
+ 'zozo/tip/babar.tpl': False,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_recursive_glob_middle(self):
+ glob = os.path.join('babar', '**', 'tip', '*.t?l')
+ spec = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+ 'babar/z/tip/coucou.tpl': True,
+ 'babar/tip/coucou.tpl': True,
+ 'babar/zeop/tip/babar/babar.tpl': False,
+ 'babar/z/tip/coucou.bin': False,
+ 'babar/toto.bin': False,
+ 'zozo/zuzu/tip/babar.tpl': False,
+ 'zozo/tip/babar.tpl': False,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_glob_set_tail(self):
+ glob = os.path.join('bin', '*.{bin,sh,exe}')
+ spec = {'bin/babar.bin': True,
+ 'bin/zephir.sh': True,
+ 'bin/celestine.exe': True,
+ 'bin/cornelius.bat': False,
+ 'bin/cornelius.xml': False,
+ 'toto/yurg': False,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_glob_set_middle(self):
+ glob = os.path.join('xml', '{babar,toto}.xml')
+ spec = {'xml/babar.xml': True,
+ 'xml/toto.xml': True,
+ 'xml/babar.xslt': False,
+ 'xml/cornelius.sgml': False,
+ 'xml/zephir.xml': False,
+ 'toto/yurg.xml': False,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_glob_set_head(self):
+ glob = os.path.join('{xml,xslt}', 'babar.*')
+ spec = {'xml/babar.xml': True,
+ 'xml/toto.xml': False,
+ 'xslt/babar.xslt': True,
+ 'xslt/toto.xslt': False,
+ 'toto/yurg.xml': False,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_glob_all(self):
+ dirs = '{%s,%s}' % (os.path.join('xml', '*'),
+ os.path.join('xslt', '**'))
+ glob = os.path.join(dirs, 'babar.xml')
+ spec = {'xml/a/babar.xml': True,
+ 'xml/b/babar.xml': True,
+ 'xml/a/c/babar.xml': False,
+ 'xslt/a/babar.xml': True,
+ 'xslt/b/babar.xml': True,
+ 'xslt/a/c/babar.xml': True,
+ 'toto/yurg.xml': False,
+ 'Donotwant': False}
+ self.assertGlobMatch(glob, spec)
+
+ def test_invalid_glob_pattern(self):
+ invalids = [
+ 'ppooa**',
+ 'azzaeaz4**/',
+ '/**ddsfs',
+ '**##1e"&e',
+ 'DSFb**c009',
+ '{',
+ '{aaQSDFa',
+ '}',
+ 'aQSDFSaa}',
+ '{**a,',
+ ',**a}',
+ '{a**,',
+ ',b**}',
+ '{a**a,babar}',
+ '{bob,b**z}',
+ ]
+ for pattern in invalids:
+ self.assertRaises(ValueError, iglob, pattern)
+
+
+class EggInfoToDistInfoTestCase(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def get_metadata_file_paths(self, distinfo_path):
+ req_metadata_files = ['METADATA', 'RECORD', 'INSTALLER']
+ metadata_file_paths = []
+ for metadata_file in req_metadata_files:
+ path = os.path.join(distinfo_path, metadata_file)
+ metadata_file_paths.append(path)
+ return metadata_file_paths
+
+ def test_egginfo_to_distinfo_setuptools(self):
+ distinfo = 'hello-0.1.1-py3.3.dist-info'
+ egginfo = 'hello-0.1.1-py3.3.egg-info'
+ dirs = [egginfo]
+ files = ['hello.py', 'hello.pyc']
+ extra_metadata = ['dependency_links.txt', 'entry_points.txt',
+ 'not-zip-safe', 'PKG-INFO', 'top_level.txt',
+ 'SOURCES.txt']
+ for f in extra_metadata:
+ files.append(os.path.join(egginfo, f))
+
+ tempdir, record_file = self.build_dist_tree(files, dirs)
+ distinfo_path = os.path.join(tempdir, distinfo)
+ egginfo_path = os.path.join(tempdir, egginfo)
+ metadata_file_paths = self.get_metadata_file_paths(distinfo_path)
+
+ egginfo_to_distinfo(record_file)
+ # test that directories and files get created
+ self.assertTrue(os.path.isdir(distinfo_path))
+ self.assertTrue(os.path.isdir(egginfo_path))
+
+ for mfile in metadata_file_paths:
+ self.assertTrue(os.path.isfile(mfile))
+
+ def test_egginfo_to_distinfo_distutils(self):
+ distinfo = 'hello-0.1.1-py3.3.dist-info'
+ egginfo = 'hello-0.1.1-py3.3.egg-info'
+ # egginfo is a file in distutils which contains the metadata
+ files = ['hello.py', 'hello.pyc', egginfo]
+
+ tempdir, record_file = self.build_dist_tree(files, dirs=[])
+ distinfo_path = os.path.join(tempdir, distinfo)
+ egginfo_path = os.path.join(tempdir, egginfo)
+ metadata_file_paths = self.get_metadata_file_paths(distinfo_path)
+
+ egginfo_to_distinfo(record_file)
+ # test that directories and files get created
+ self.assertTrue(os.path.isdir(distinfo_path))
+ self.assertTrue(os.path.isfile(egginfo_path))
+
+ for mfile in metadata_file_paths:
+ self.assertTrue(os.path.isfile(mfile))
+
+ def build_dist_tree(self, files, dirs):
+ tempdir = self.mkdtemp()
+ record_file_path = os.path.join(tempdir, 'RECORD')
+ file_paths, dir_paths = ([], [])
+ for d in dirs:
+ path = os.path.join(tempdir, d)
+ os.makedirs(path)
+ dir_paths.append(path)
+ for f in files:
+ path = os.path.join(tempdir, f)
+ with open(path, 'w') as _f:
+ _f.write(f)
+ file_paths.append(path)
+
+ with open(record_file_path, 'w') as record_file:
+ for fpath in file_paths:
+ record_file.write(fpath + '\n')
+ for dpath in dir_paths:
+ record_file.write(dpath + '\n')
+
+ return (tempdir, record_file_path)
+
+
+class PackagingLibChecks(support.TempdirManager,
+ support.LoggingCatcher,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(PackagingLibChecks, self).setUp()
+ self._empty_dir = self.mkdtemp()
+
+ def test_empty_package_is_not_based_on_anything(self):
+ self.assertFalse(is_setuptools(self._empty_dir))
+ self.assertFalse(is_distutils(self._empty_dir))
+ self.assertFalse(is_packaging(self._empty_dir))
+
+ def test_setup_py_importing_setuptools_is_setuptools_based(self):
+ self.assertTrue(is_setuptools(self._setuptools_setup_py_pkg()))
+
+ def test_egg_info_dir_and_setup_py_is_setuptools_based(self):
+ self.assertTrue(is_setuptools(self._setuptools_egg_info_pkg()))
+
+ def test_egg_info_and_non_setuptools_setup_py_is_setuptools_based(self):
+ self.assertTrue(is_setuptools(self._egg_info_with_no_setuptools()))
+
+ def test_setup_py_not_importing_setuptools_is_not_setuptools_based(self):
+ self.assertFalse(is_setuptools(self._random_setup_py_pkg()))
+
+ def test_setup_py_importing_distutils_is_distutils_based(self):
+ self.assertTrue(is_distutils(self._distutils_setup_py_pkg()))
+
+ def test_pkg_info_file_and_setup_py_is_distutils_based(self):
+ self.assertTrue(is_distutils(self._distutils_pkg_info()))
+
+ def test_pkg_info_and_non_distutils_setup_py_is_distutils_based(self):
+ self.assertTrue(is_distutils(self._pkg_info_with_no_distutils()))
+
+ def test_setup_py_not_importing_distutils_is_not_distutils_based(self):
+ self.assertFalse(is_distutils(self._random_setup_py_pkg()))
+
+ def test_setup_cfg_with_no_metadata_section_is_not_packaging_based(self):
+ self.assertFalse(is_packaging(self._setup_cfg_with_no_metadata_pkg()))
+
+ def test_setup_cfg_with_valid_metadata_section_is_packaging_based(self):
+ self.assertTrue(is_packaging(self._valid_setup_cfg_pkg()))
+
+ def test_setup_cfg_and_invalid_setup_cfg_is_not_packaging_based(self):
+ self.assertFalse(is_packaging(self._invalid_setup_cfg_pkg()))
+
+ def test_get_install_method_with_setuptools_pkg(self):
+ path = self._setuptools_setup_py_pkg()
+ self.assertEqual("setuptools", get_install_method(path))
+
+ def test_get_install_method_with_distutils_pkg(self):
+ path = self._distutils_pkg_info()
+ self.assertEqual("distutils", get_install_method(path))
+
+ def test_get_install_method_with_packaging_pkg(self):
+ path = self._valid_setup_cfg_pkg()
+ self.assertEqual("packaging", get_install_method(path))
+
+ def test_get_install_method_with_unknown_pkg(self):
+ path = self._invalid_setup_cfg_pkg()
+ self.assertRaises(InstallationException, get_install_method, path)
+
+ def test_is_setuptools_logs_setup_py_text_found(self):
+ is_setuptools(self._setuptools_setup_py_pkg())
+ expected = ['setup.py file found.',
+ 'No egg-info directory found.',
+ 'Found setuptools text in setup.py.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def test_is_setuptools_logs_setup_py_text_not_found(self):
+ directory = self._random_setup_py_pkg()
+ is_setuptools(directory)
+ expected = ['setup.py file found.', 'No egg-info directory found.',
+ 'No setuptools text found in setup.py.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def test_is_setuptools_logs_egg_info_dir_found(self):
+ is_setuptools(self._setuptools_egg_info_pkg())
+ expected = ['setup.py file found.', 'Found egg-info directory.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def test_is_distutils_logs_setup_py_text_found(self):
+ is_distutils(self._distutils_setup_py_pkg())
+ expected = ['setup.py file found.',
+ 'No PKG-INFO file found.',
+ 'Found distutils text in setup.py.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def test_is_distutils_logs_setup_py_text_not_found(self):
+ directory = self._random_setup_py_pkg()
+ is_distutils(directory)
+ expected = ['setup.py file found.', 'No PKG-INFO file found.',
+ 'No distutils text found in setup.py.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def test_is_distutils_logs_pkg_info_file_found(self):
+ is_distutils(self._distutils_pkg_info())
+ expected = ['setup.py file found.', 'PKG-INFO file found.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def test_is_packaging_logs_setup_cfg_found(self):
+ is_packaging(self._valid_setup_cfg_pkg())
+ expected = ['setup.cfg file found.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def test_is_packaging_logs_setup_cfg_not_found(self):
+ is_packaging(self._empty_dir)
+ expected = ['No setup.cfg file found.']
+ self.assertEqual(expected, self.get_logs(logging.DEBUG))
+
+ def _write_setuptools_setup_py(self, directory):
+ self.write_file((directory, 'setup.py'),
+ "from setuptools import setup")
+
+ def _write_distutils_setup_py(self, directory):
+ self.write_file([directory, 'setup.py'],
+ "from distutils.core import setup")
+
+ def _write_packaging_setup_cfg(self, directory):
+ self.write_file([directory, 'setup.cfg'],
+ ("[metadata]\n"
+ "name = mypackage\n"
+ "version = 0.1.0\n"))
+
+ def _setuptools_setup_py_pkg(self):
+ tmp = self.mkdtemp()
+ self._write_setuptools_setup_py(tmp)
+ return tmp
+
+ def _distutils_setup_py_pkg(self):
+ tmp = self.mkdtemp()
+ self._write_distutils_setup_py(tmp)
+ return tmp
+
+ def _valid_setup_cfg_pkg(self):
+ tmp = self.mkdtemp()
+ self._write_packaging_setup_cfg(tmp)
+ return tmp
+
+ def _setuptools_egg_info_pkg(self):
+ tmp = self.mkdtemp()
+ self._write_setuptools_setup_py(tmp)
+ tempfile.mkdtemp(suffix='.egg-info', dir=tmp)
+ return tmp
+
+ def _distutils_pkg_info(self):
+ tmp = self._distutils_setup_py_pkg()
+ self.write_file([tmp, 'PKG-INFO'], '', encoding='UTF-8')
+ return tmp
+
+ def _setup_cfg_with_no_metadata_pkg(self):
+ tmp = self.mkdtemp()
+ self.write_file([tmp, 'setup.cfg'],
+ ("[othersection]\n"
+ "foo = bar\n"))
+ return tmp
+
+ def _invalid_setup_cfg_pkg(self):
+ tmp = self.mkdtemp()
+ self.write_file([tmp, 'setup.cfg'],
+ ("[metadata]\n"
+ "name = john\n"
+ "last_name = doe\n"))
+ return tmp
+
+ def _egg_info_with_no_setuptools(self):
+ tmp = self._random_setup_py_pkg()
+ tempfile.mkdtemp(suffix='.egg-info', dir=tmp)
+ return tmp
+
+ def _pkg_info_with_no_distutils(self):
+ tmp = self._random_setup_py_pkg()
+ self.write_file([tmp, 'PKG-INFO'], '', encoding='UTF-8')
+ return tmp
+
+ def _random_setup_py_pkg(self):
+ tmp = self.mkdtemp()
+ self.write_file((tmp, 'setup.py'), "from mypackage import setup")
+ return tmp
+
+
+def test_suite():
+ suite = unittest.makeSuite(UtilTestCase)
+ suite.addTest(unittest.makeSuite(GlobTestCase))
+ suite.addTest(unittest.makeSuite(EggInfoToDistInfoTestCase))
+ suite.addTest(unittest.makeSuite(PackagingLibChecks))
+ return suite
+
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_version.py b/Lib/packaging/tests/test_version.py
new file mode 100644
index 0000000..2d178bf
--- /dev/null
+++ b/Lib/packaging/tests/test_version.py
@@ -0,0 +1,271 @@
+"""Tests for packaging.version."""
+import doctest
+
+from packaging.version import NormalizedVersion as V
+from packaging.version import HugeMajorVersionNumError, IrrationalVersionError
+from packaging.version import suggest_normalized_version as suggest
+from packaging.version import VersionPredicate
+from packaging.tests import unittest
+
+
+class VersionTestCase(unittest.TestCase):
+
+ versions = ((V('1.0'), '1.0'),
+ (V('1.1'), '1.1'),
+ (V('1.2.3'), '1.2.3'),
+ (V('1.2'), '1.2'),
+ (V('1.2.3a4'), '1.2.3a4'),
+ (V('1.2c4'), '1.2c4'),
+ (V('4.17rc2'), '4.17rc2'),
+ (V('1.2.3.4'), '1.2.3.4'),
+ (V('1.2.3.4.0b3'), '1.2.3.4b3'),
+ (V('1.2.0.0.0'), '1.2'),
+ (V('1.0.dev345'), '1.0.dev345'),
+ (V('1.0.post456.dev623'), '1.0.post456.dev623'))
+
+ def test_repr(self):
+
+ self.assertEqual(repr(V('1.0')), "NormalizedVersion('1.0')")
+
+ def test_basic_versions(self):
+
+ for v, s in self.versions:
+ self.assertEqual(str(v), s)
+
+ def test_hash(self):
+
+ for v, s in self.versions:
+ self.assertEqual(hash(v), hash(V(s)))
+
+ versions = set([v for v, s in self.versions])
+ for v, s in self.versions:
+ self.assertIn(v, versions)
+
+ self.assertEqual(set([V('1.0')]), set([V('1.0'), V('1.0')]))
+
+ def test_from_parts(self):
+
+ for v, s in self.versions:
+ v2 = V.from_parts(*v.parts)
+ self.assertEqual(v, v2)
+ self.assertEqual(str(v), str(v2))
+
+ def test_irrational_versions(self):
+
+ irrational = ('1', '1.2a', '1.2.3b', '1.02', '1.2a03',
+ '1.2a3.04', '1.2.dev.2', '1.2dev', '1.2.dev',
+ '1.2.dev2.post2', '1.2.post2.dev3.post4')
+
+ for s in irrational:
+ self.assertRaises(IrrationalVersionError, V, s)
+
+ def test_huge_version(self):
+
+ self.assertEqual(str(V('1980.0')), '1980.0')
+ self.assertRaises(HugeMajorVersionNumError, V, '1981.0')
+ self.assertEqual(str(V('1981.0', error_on_huge_major_num=False)),
+ '1981.0')
+
+ def test_comparison(self):
+ comparison_doctest_string = r"""
+ >>> V('1.2.0') == '1.2'
+ Traceback (most recent call last):
+ ...
+ TypeError: cannot compare NormalizedVersion and str
+
+ >>> V('1.2') < '1.3'
+ Traceback (most recent call last):
+ ...
+ TypeError: cannot compare NormalizedVersion and str
+
+ >>> V('1.2.0') == V('1.2')
+ True
+ >>> V('1.2.0') == V('1.2.3')
+ False
+ >>> V('1.2.0') != V('1.2.3')
+ True
+ >>> V('1.2.0') < V('1.2.3')
+ True
+ >>> V('1.2.0') < V('1.2.0')
+ False
+ >>> V('1.2.0') <= V('1.2.0')
+ True
+ >>> V('1.2.0') <= V('1.2.3')
+ True
+ >>> V('1.2.3') <= V('1.2.0')
+ False
+ >>> V('1.2.0') >= V('1.2.0')
+ True
+ >>> V('1.2.3') >= V('1.2.0')
+ True
+ >>> V('1.2.0') >= V('1.2.3')
+ False
+ >>> V('1.2.0rc1') >= V('1.2.0')
+ False
+ >>> V('1.0') > V('1.0b2')
+ True
+ >>> V('1.0') > V('1.0c2')
+ True
+ >>> V('1.0') > V('1.0rc2')
+ True
+ >>> V('1.0rc2') > V('1.0rc1')
+ True
+ >>> V('1.0c4') > V('1.0c1')
+ True
+ >>> (V('1.0') > V('1.0c2') > V('1.0c1') > V('1.0b2') > V('1.0b1')
+ ... > V('1.0a2') > V('1.0a1'))
+ True
+ >>> (V('1.0.0') > V('1.0.0c2') > V('1.0.0c1') > V('1.0.0b2') > V('1.0.0b1')
+ ... > V('1.0.0a2') > V('1.0.0a1'))
+ True
+
+ >>> V('1.0') < V('1.0.post456.dev623')
+ True
+
+ >>> V('1.0.post456.dev623') < V('1.0.post456') < V('1.0.post1234')
+ True
+
+ >>> (V('1.0a1')
+ ... < V('1.0a2.dev456')
+ ... < V('1.0a2')
+ ... < V('1.0a2.1.dev456') # e.g. need to do a quick post release on 1.0a2
+ ... < V('1.0a2.1')
+ ... < V('1.0b1.dev456')
+ ... < V('1.0b2')
+ ... < V('1.0c1.dev456')
+ ... < V('1.0c1')
+ ... < V('1.0.dev7')
+ ... < V('1.0.dev18')
+ ... < V('1.0.dev456')
+ ... < V('1.0.dev1234')
+ ... < V('1.0rc1')
+ ... < V('1.0rc2')
+ ... < V('1.0')
+ ... < V('1.0.post456.dev623') # development version of a post release
+ ... < V('1.0.post456'))
+ True
+ """
+ doctest.script_from_examples(comparison_doctest_string)
+
+ # the doctest above is never run, so temporarily add real unit
+ # tests until the doctest is rewritten
+ self.assertLessEqual(V('1.2.0rc1'), V('1.2.0'))
+ self.assertGreater(V('1.0'), V('1.0c2'))
+ self.assertGreater(V('1.0'), V('1.0rc2'))
+ self.assertGreater(V('1.0rc2'), V('1.0rc1'))
+ self.assertGreater(V('1.0c4'), V('1.0c1'))
+
+ def test_suggest_normalized_version(self):
+
+ self.assertEqual(suggest('1.0'), '1.0')
+ self.assertEqual(suggest('1.0-alpha1'), '1.0a1')
+ self.assertEqual(suggest('1.0c2'), '1.0c2')
+ self.assertEqual(suggest('walla walla washington'), None)
+ self.assertEqual(suggest('2.4c1'), '2.4c1')
+ self.assertEqual(suggest('v1.0'), '1.0')
+
+ # from setuptools
+ self.assertEqual(suggest('0.4a1.r10'), '0.4a1.post10')
+ self.assertEqual(suggest('0.7a1dev-r66608'), '0.7a1.dev66608')
+ self.assertEqual(suggest('0.6a9.dev-r41475'), '0.6a9.dev41475')
+ self.assertEqual(suggest('2.4preview1'), '2.4c1')
+ self.assertEqual(suggest('2.4pre1'), '2.4c1')
+ self.assertEqual(suggest('2.1-rc2'), '2.1c2')
+
+ # from pypi
+ self.assertEqual(suggest('0.1dev'), '0.1.dev0')
+ self.assertEqual(suggest('0.1.dev'), '0.1.dev0')
+
+ # we want to be able to parse Twisted
+ # development versions are like post releases in Twisted
+ self.assertEqual(suggest('9.0.0+r2363'), '9.0.0.post2363')
+
+ # pre-releases are using markers like "pre1"
+ self.assertEqual(suggest('9.0.0pre1'), '9.0.0c1')
+
+ # we want to be able to parse Tcl-TK
+ # they us "p1" "p2" for post releases
+ self.assertEqual(suggest('1.4p1'), '1.4.post1')
+
+ def test_predicate(self):
+ # VersionPredicate knows how to parse stuff like:
+ #
+ # Project (>=version, ver2)
+
+ predicates = ('zope.interface (>3.5.0)',
+ 'AnotherProject (3.4)',
+ 'OtherProject (<3.0)',
+ 'NoVersion',
+ 'Hey (>=2.5,<2.7)')
+
+ for predicate in predicates:
+ VersionPredicate(predicate)
+
+ self.assertTrue(VersionPredicate('Hey (>=2.5,<2.7)').match('2.6'))
+ self.assertTrue(VersionPredicate('Ho').match('2.6'))
+ self.assertFalse(VersionPredicate('Hey (>=2.5,!=2.6,<2.7)').match('2.6'))
+ self.assertTrue(VersionPredicate('Ho (<3.0)').match('2.6'))
+ self.assertTrue(VersionPredicate('Ho (<3.0,!=2.5)').match('2.6.0'))
+ self.assertFalse(VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.0'))
+ self.assertTrue(VersionPredicate('Ho (2.5)').match('2.5.4'))
+ self.assertFalse(VersionPredicate('Ho (!=2.5)').match('2.5.2'))
+ self.assertTrue(VersionPredicate('Hey (<=2.5)').match('2.5.9'))
+ self.assertFalse(VersionPredicate('Hey (<=2.5)').match('2.6.0'))
+ self.assertTrue(VersionPredicate('Hey (>=2.5)').match('2.5.1'))
+
+ self.assertRaises(ValueError, VersionPredicate, '')
+
+ self.assertTrue(VersionPredicate('Hey 2.5').match('2.5.1'))
+
+ # XXX need to silent the micro version in this case
+ self.assertFalse(VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.3'))
+
+ # Make sure a predicate that ends with a number works
+ self.assertTrue(VersionPredicate('virtualenv5 (1.0)').match('1.0'))
+ self.assertTrue(VersionPredicate('virtualenv5').match('1.0'))
+ self.assertTrue(VersionPredicate('vi5two').match('1.0'))
+ self.assertTrue(VersionPredicate('5two').match('1.0'))
+ self.assertTrue(VersionPredicate('vi5two 1.0').match('1.0'))
+ self.assertTrue(VersionPredicate('5two 1.0').match('1.0'))
+
+ # test repr
+ for predicate in predicates:
+ self.assertEqual(str(VersionPredicate(predicate)), predicate)
+
+ def test_predicate_name(self):
+ # Test that names are parsed the right way
+
+ self.assertEqual('Hey', VersionPredicate('Hey (<1.1)').name)
+ self.assertEqual('Foo-Bar', VersionPredicate('Foo-Bar (1.1)').name)
+ self.assertEqual('Foo Bar', VersionPredicate('Foo Bar (1.1)').name)
+
+ def test_is_final(self):
+ # VersionPredicate knows is a distribution is a final one or not.
+ final_versions = ('1.0', '1.0.post456')
+ other_versions = ('1.0.dev1', '1.0a2', '1.0c3')
+
+ for version in final_versions:
+ self.assertTrue(V(version).is_final)
+ for version in other_versions:
+ self.assertFalse(V(version).is_final)
+
+
+class VersionWhiteBoxTestCase(unittest.TestCase):
+
+ def test_parse_numdots(self):
+ # For code coverage completeness, as pad_zeros_length can't be set or
+ # influenced from the public interface
+ self.assertEqual(
+ V('1.0')._parse_numdots('1.0', '1.0', pad_zeros_length=3),
+ [1, 0, 0])
+
+
+def test_suite():
+ #README = os.path.join(os.path.dirname(__file__), 'README.txt')
+ #suite = [doctest.DocFileSuite(README), unittest.makeSuite(VersionTestCase)]
+ suite = [unittest.makeSuite(VersionTestCase),
+ unittest.makeSuite(VersionWhiteBoxTestCase)]
+ return unittest.TestSuite(suite)
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/util.py b/Lib/packaging/util.py
new file mode 100644
index 0000000..a1f6782
--- /dev/null
+++ b/Lib/packaging/util.py
@@ -0,0 +1,1480 @@
+"""Miscellaneous utility functions."""
+
+import os
+import re
+import csv
+import imp
+import sys
+import errno
+import codecs
+import shutil
+import string
+import hashlib
+import posixpath
+import subprocess
+import sysconfig
+from glob import iglob as std_iglob
+from fnmatch import fnmatchcase
+from inspect import getsource
+from configparser import RawConfigParser
+
+from packaging import logger
+from packaging.errors import (PackagingPlatformError, PackagingFileError,
+ PackagingExecError, InstallationException,
+ PackagingInternalError)
+
+__all__ = [
+ # file dependencies
+ 'newer', 'newer_group',
+ # helpers for commands (dry-run system)
+ 'execute', 'write_file',
+ # spawning programs
+ 'find_executable', 'spawn',
+ # path manipulation
+ 'convert_path', 'change_root',
+ # 2to3 conversion
+ 'Mixin2to3', 'run_2to3',
+ # packaging compatibility helpers
+ 'cfg_to_args', 'generate_setup_py',
+ 'egginfo_to_distinfo',
+ 'get_install_method',
+ # misc
+ 'ask', 'check_environ', 'encode_multipart', 'resolve_name',
+ # querying for information TODO move to sysconfig
+ 'get_compiler_versions', 'get_platform', 'set_platform',
+ # configuration TODO move to packaging.config
+ 'get_pypirc_path', 'read_pypirc', 'generate_pypirc',
+ 'strtobool', 'split_multiline',
+]
+
+_PLATFORM = None
+_DEFAULT_INSTALLER = 'packaging'
+
+
+def newer(source, target):
+ """Tell if the target is newer than the source.
+
+ Returns true if 'source' exists and is more recently modified than
+ 'target', or if 'source' exists and 'target' doesn't.
+
+ Returns false if both exist and 'target' is the same age or younger
+ than 'source'. Raise PackagingFileError if 'source' does not exist.
+
+ Note that this test is not very accurate: files created in the same second
+ will have the same "age".
+ """
+ if not os.path.exists(source):
+ raise PackagingFileError("file '%s' does not exist" %
+ os.path.abspath(source))
+ if not os.path.exists(target):
+ return True
+
+ return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+
+def get_platform():
+ """Return a string that identifies the current platform.
+
+ By default, will return the value returned by sysconfig.get_platform(),
+ but it can be changed by calling set_platform().
+ """
+ global _PLATFORM
+ if _PLATFORM is None:
+ _PLATFORM = sysconfig.get_platform()
+ return _PLATFORM
+
+
+def set_platform(identifier):
+ """Set the platform string identifier returned by get_platform().
+
+ Note that this change doesn't impact the value returned by
+ sysconfig.get_platform(); it is local to packaging.
+ """
+ global _PLATFORM
+ _PLATFORM = identifier
+
+
+def convert_path(pathname):
+ """Return 'pathname' as a name that will work on the native filesystem.
+
+ The path is split on '/' and put back together again using the current
+ directory separator. Needed because filenames in the setup script are
+ always supplied in Unix style, and have to be converted to the local
+ convention before we can actually use them in the filesystem. Raises
+ ValueError on non-Unix-ish systems if 'pathname' either starts or
+ ends with a slash.
+ """
+ if os.sep == '/':
+ return pathname
+ if not pathname:
+ return pathname
+ if pathname[0] == '/':
+ raise ValueError("path '%s' cannot be absolute" % pathname)
+ if pathname[-1] == '/':
+ raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+ paths = pathname.split('/')
+ while os.curdir in paths:
+ paths.remove(os.curdir)
+ if not paths:
+ return os.curdir
+ return os.path.join(*paths)
+
+
+def change_root(new_root, pathname):
+ """Return 'pathname' with 'new_root' prepended.
+
+ If 'pathname' is relative, this is equivalent to
+ os.path.join(new_root,pathname). Otherwise, it requires making 'pathname'
+ relative and then joining the two, which is tricky on DOS/Windows.
+ """
+ if os.name == 'posix':
+ if not os.path.isabs(pathname):
+ return os.path.join(new_root, pathname)
+ else:
+ return os.path.join(new_root, pathname[1:])
+
+ elif os.name == 'nt':
+ drive, path = os.path.splitdrive(pathname)
+ if path[0] == '\\':
+ path = path[1:]
+ return os.path.join(new_root, path)
+
+ elif os.name == 'os2':
+ drive, path = os.path.splitdrive(pathname)
+ if path[0] == os.sep:
+ path = path[1:]
+ return os.path.join(new_root, path)
+
+ else:
+ raise PackagingPlatformError("nothing known about "
+ "platform '%s'" % os.name)
+
+_environ_checked = False
+
+
+def check_environ():
+ """Ensure that 'os.environ' has all the environment variables needed.
+
+ We guarantee that users can use in config files, command-line options,
+ etc. Currently this includes:
+ HOME - user's home directory (Unix only)
+ PLAT - description of the current platform, including hardware
+ and OS (see 'get_platform()')
+ """
+ global _environ_checked
+ if _environ_checked:
+ return
+
+ if os.name == 'posix' and 'HOME' not in os.environ:
+ import pwd
+ os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
+
+ if 'PLAT' not in os.environ:
+ os.environ['PLAT'] = sysconfig.get_platform()
+
+ _environ_checked = True
+
+
+# Needed by 'split_quoted()'
+_wordchars_re = _squote_re = _dquote_re = None
+
+
+def _init_regex():
+ global _wordchars_re, _squote_re, _dquote_re
+ _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+ _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
+ _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
+
+
+# TODO replace with shlex.split after testing
+
+def split_quoted(s):
+ """Split a string up according to Unix shell-like rules for quotes and
+ backslashes.
+
+ In short: words are delimited by spaces, as long as those
+ spaces are not escaped by a backslash, or inside a quoted string.
+ Single and double quotes are equivalent, and the quote characters can
+ be backslash-escaped. The backslash is stripped from any two-character
+ escape sequence, leaving only the escaped character. The quote
+ characters are stripped from any quoted string. Returns a list of
+ words.
+ """
+ # This is a nice algorithm for splitting up a single string, since it
+ # doesn't require character-by-character examination. It was a little
+ # bit of a brain-bender to get it working right, though...
+ if _wordchars_re is None:
+ _init_regex()
+
+ s = s.strip()
+ words = []
+ pos = 0
+
+ while s:
+ m = _wordchars_re.match(s, pos)
+ end = m.end()
+ if end == len(s):
+ words.append(s[:end])
+ break
+
+ if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
+ words.append(s[:end]) # we definitely have a word delimiter
+ s = s[end:].lstrip()
+ pos = 0
+
+ elif s[end] == '\\': # preserve whatever is being escaped;
+ # will become part of the current word
+ s = s[:end] + s[end + 1:]
+ pos = end + 1
+
+ else:
+ if s[end] == "'": # slurp singly-quoted string
+ m = _squote_re.match(s, end)
+ elif s[end] == '"': # slurp doubly-quoted string
+ m = _dquote_re.match(s, end)
+ else:
+ raise RuntimeError("this can't happen "
+ "(bad char '%c')" % s[end])
+
+ if m is None:
+ raise ValueError("bad string (mismatched %s quotes?)" % s[end])
+
+ beg, end = m.span()
+ s = s[:beg] + s[beg + 1:end - 1] + s[end:]
+ pos = m.end() - 2
+
+ if pos >= len(s):
+ words.append(s)
+ break
+
+ return words
+
+
+def split_multiline(value):
+ """Split a multiline string into a list, excluding blank lines."""
+
+ return [element for element in
+ (line.strip() for line in value.split('\n'))
+ if element]
+
+
+def execute(func, args, msg=None, dry_run=False):
+ """Perform some action that affects the outside world.
+
+ Some actions (e.g. writing to the filesystem) are special because
+ they are disabled by the 'dry_run' flag. This method takes care of all
+ that bureaucracy for you; all you have to do is supply the
+ function to call and an argument tuple for it (to embody the
+ "external action" being performed), and an optional message to
+ print.
+ """
+ if msg is None:
+ msg = "%s%r" % (func.__name__, args)
+ if msg[-2:] == ',)': # correct for singleton tuple
+ msg = msg[0:-2] + ')'
+
+ logger.info(msg)
+ if not dry_run:
+ func(*args)
+
+
+def strtobool(val):
+ """Convert a string representation of truth to a boolean.
+
+ True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+ are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
+ 'val' is anything else.
+ """
+ val = val.lower()
+ if val in ('y', 'yes', 't', 'true', 'on', '1'):
+ return True
+ elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+ return False
+ else:
+ raise ValueError("invalid truth value %r" % (val,))
+
+
+def byte_compile(py_files, optimize=0, force=False, prefix=None,
+ base_dir=None, dry_run=False, direct=None):
+ """Byte-compile a collection of Python source files to either .pyc
+ or .pyo files in a __pycache__ subdirectory.
+
+ 'py_files' is a list of files to compile; any files that don't end in
+ ".py" are silently skipped. 'optimize' must be one of the following:
+ 0 - don't optimize (generate .pyc)
+ 1 - normal optimization (like "python -O")
+ 2 - extra optimization (like "python -OO")
+ This function is independent from the running Python's -O or -B options;
+ it is fully controlled by the parameters passed in.
+
+ If 'force' is true, all files are recompiled regardless of
+ timestamps.
+
+ The source filename encoded in each bytecode file defaults to the
+ filenames listed in 'py_files'; you can modify these with 'prefix' and
+ 'basedir'. 'prefix' is a string that will be stripped off of each
+ source filename, and 'base_dir' is a directory name that will be
+ prepended (after 'prefix' is stripped). You can supply either or both
+ (or neither) of 'prefix' and 'base_dir', as you wish.
+
+ If 'dry_run' is true, doesn't actually do anything that would
+ affect the filesystem.
+
+ Byte-compilation is either done directly in this interpreter process
+ with the standard py_compile module, or indirectly by writing a
+ temporary script and executing it. Normally, you should let
+ 'byte_compile()' figure out to use direct compilation or not (see
+ the source for details). The 'direct' flag is used by the script
+ generated in indirect mode; unless you know what you're doing, leave
+ it set to None.
+ """
+ # FIXME use compileall + remove direct/indirect shenanigans
+
+ # First, if the caller didn't force us into direct or indirect mode,
+ # figure out which mode we should be in. We take a conservative
+ # approach: choose direct mode *only* if the current interpreter is
+ # in debug mode and optimize is 0. If we're not in debug mode (-O
+ # or -OO), we don't know which level of optimization this
+ # interpreter is running with, so we can't do direct
+ # byte-compilation and be certain that it's the right thing. Thus,
+ # always compile indirectly if the current interpreter is in either
+ # optimize mode, or if either optimization level was requested by
+ # the caller.
+ if direct is None:
+ direct = (__debug__ and optimize == 0)
+
+ # "Indirect" byte-compilation: write a temporary script and then
+ # run it with the appropriate flags.
+ if not direct:
+ from tempfile import mkstemp
+ # XXX use something better than mkstemp
+ script_fd, script_name = mkstemp(".py")
+ os.close(script_fd)
+ script_fd = None
+ logger.info("writing byte-compilation script '%s'", script_name)
+ if not dry_run:
+ if script_fd is not None:
+ script = os.fdopen(script_fd, "w", encoding='utf-8')
+ else:
+ script = open(script_name, "w", encoding='utf-8')
+
+ with script:
+ script.write("""\
+from packaging.util import byte_compile
+files = [
+""")
+
+ # XXX would be nice to write absolute filenames, just for
+ # safety's sake (script should be more robust in the face of
+ # chdir'ing before running it). But this requires abspath'ing
+ # 'prefix' as well, and that breaks the hack in build_lib's
+ # 'byte_compile()' method that carefully tacks on a trailing
+ # slash (os.sep really) to make sure the prefix here is "just
+ # right". This whole prefix business is rather delicate -- the
+ # problem is that it's really a directory, but I'm treating it
+ # as a dumb string, so trailing slashes and so forth matter.
+
+ #py_files = map(os.path.abspath, py_files)
+ #if prefix:
+ # prefix = os.path.abspath(prefix)
+
+ script.write(",\n".join(map(repr, py_files)) + "]\n")
+ script.write("""
+byte_compile(files, optimize=%r, force=%r,
+ prefix=%r, base_dir=%r,
+ dry_run=False,
+ direct=True)
+""" % (optimize, force, prefix, base_dir))
+
+ cmd = [sys.executable, script_name]
+
+ env = os.environ.copy()
+ env['PYTHONPATH'] = os.path.pathsep.join(sys.path)
+ try:
+ spawn(cmd, env=env)
+ finally:
+ execute(os.remove, (script_name,), "removing %s" % script_name,
+ dry_run=dry_run)
+
+ # "Direct" byte-compilation: use the py_compile module to compile
+ # right here, right now. Note that the script generated in indirect
+ # mode simply calls 'byte_compile()' in direct mode, a weird sort of
+ # cross-process recursion. Hey, it works!
+ else:
+ from py_compile import compile
+
+ for file in py_files:
+ if file[-3:] != ".py":
+ # This lets us be lazy and not filter filenames in
+ # the "install_lib" command.
+ continue
+
+ # Terminology from the py_compile module:
+ # cfile - byte-compiled file
+ # dfile - purported source filename (same as 'file' by default)
+ # The second argument to cache_from_source forces the extension to
+ # be .pyc (if true) or .pyo (if false); without it, the extension
+ # would depend on the calling Python's -O option
+ cfile = imp.cache_from_source(file, not optimize)
+ dfile = file
+
+ if prefix:
+ if file[:len(prefix)] != prefix:
+ raise ValueError("invalid prefix: filename %r doesn't "
+ "start with %r" % (file, prefix))
+ dfile = dfile[len(prefix):]
+ if base_dir:
+ dfile = os.path.join(base_dir, dfile)
+
+ cfile_base = os.path.basename(cfile)
+ if direct:
+ if force or newer(file, cfile):
+ logger.info("byte-compiling %s to %s", file, cfile_base)
+ if not dry_run:
+ compile(file, cfile, dfile)
+ else:
+ logger.debug("skipping byte-compilation of %s to %s",
+ file, cfile_base)
+
+
+_RE_VERSION = re.compile('(\d+\.\d+(\.\d+)*)')
+_MAC_OS_X_LD_VERSION = re.compile('^@\(#\)PROGRAM:ld '
+ 'PROJECT:ld64-((\d+)(\.\d+)*)')
+
+
+def _find_ld_version():
+ """Find the ld version. The version scheme differs under Mac OS X."""
+ if sys.platform == 'darwin':
+ return _find_exe_version('ld -v', _MAC_OS_X_LD_VERSION)
+ else:
+ return _find_exe_version('ld -v')
+
+
+def _find_exe_version(cmd, pattern=_RE_VERSION):
+ """Find the version of an executable by running `cmd` in the shell.
+
+ `pattern` is a compiled regular expression. If not provided, defaults
+ to _RE_VERSION. If the command is not found, or the output does not
+ match the mattern, returns None.
+ """
+ from subprocess import Popen, PIPE
+ executable = cmd.split()[0]
+ if find_executable(executable) is None:
+ return None
+ pipe = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
+ try:
+ stdout, stderr = pipe.communicate()
+ finally:
+ pipe.stdout.close()
+ pipe.stderr.close()
+ # some commands like ld under MacOS X, will give the
+ # output in the stderr, rather than stdout.
+ if stdout != '':
+ out_string = stdout
+ else:
+ out_string = stderr
+
+ result = pattern.search(out_string)
+ if result is None:
+ return None
+ return result.group(1)
+
+
+def get_compiler_versions():
+ """Return a tuple providing the versions of gcc, ld and dllwrap
+
+ For each command, if a command is not found, None is returned.
+ Otherwise a string with the version is returned.
+ """
+ gcc = _find_exe_version('gcc -dumpversion')
+ ld = _find_ld_version()
+ dllwrap = _find_exe_version('dllwrap --version')
+ return gcc, ld, dllwrap
+
+
+def newer_group(sources, target, missing='error'):
+ """Return true if 'target' is out-of-date with respect to any file
+ listed in 'sources'.
+
+ In other words, if 'target' exists and is newer
+ than every file in 'sources', return false; otherwise return true.
+ 'missing' controls what we do when a source file is missing; the
+ default ("error") is to blow up with an OSError from inside 'stat()';
+ if it is "ignore", we silently drop any missing source files; if it is
+ "newer", any missing source files make us assume that 'target' is
+ out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
+ carry out commands that wouldn't work because inputs are missing, but
+ that doesn't matter because you're not actually going to run the
+ commands).
+ """
+ # If the target doesn't even exist, then it's definitely out-of-date.
+ if not os.path.exists(target):
+ return True
+
+ # Otherwise we have to find out the hard way: if *any* source file
+ # is more recent than 'target', then 'target' is out-of-date and
+ # we can immediately return true. If we fall through to the end
+ # of the loop, then 'target' is up-to-date and we return false.
+ target_mtime = os.stat(target).st_mtime
+
+ for source in sources:
+ if not os.path.exists(source):
+ if missing == 'error': # blow up when we stat() the file
+ pass
+ elif missing == 'ignore': # missing source dropped from
+ continue # target's dependency list
+ elif missing == 'newer': # missing source means target is
+ return True # out-of-date
+
+ if os.stat(source).st_mtime > target_mtime:
+ return True
+
+ return False
+
+
+def write_file(filename, contents):
+ """Create *filename* and write *contents* to it.
+
+ *contents* is a sequence of strings without line terminators.
+
+ This functions is not intended to replace the usual with open + write
+ idiom in all cases, only with Command.execute, which runs depending on
+ the dry_run argument and also logs its arguments).
+ """
+ with open(filename, "w") as f:
+ for line in contents:
+ f.write(line + "\n")
+
+
+def _is_package(path):
+ return os.path.isdir(path) and os.path.isfile(
+ os.path.join(path, '__init__.py'))
+
+
+# Code taken from the pip project
+def _is_archive_file(name):
+ archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar')
+ ext = splitext(name)[1].lower()
+ return ext in archives
+
+
+def _under(path, root):
+ # XXX use os.path
+ path = path.split(os.sep)
+ root = root.split(os.sep)
+ if len(root) > len(path):
+ return False
+ for pos, part in enumerate(root):
+ if path[pos] != part:
+ return False
+ return True
+
+
+def _package_name(root_path, path):
+ # Return a dotted package name, given a subpath
+ if not _under(path, root_path):
+ raise ValueError('"%s" is not a subpath of "%s"' % (path, root_path))
+ return path[len(root_path) + 1:].replace(os.sep, '.')
+
+
+def find_packages(paths=(os.curdir,), exclude=()):
+ """Return a list all Python packages found recursively within
+ directories 'paths'
+
+ 'paths' should be supplied as a sequence of "cross-platform"
+ (i.e. URL-style) path; it will be converted to the appropriate local
+ path syntax.
+
+ 'exclude' is a sequence of package names to exclude; '*' can be used as
+ a wildcard in the names, such that 'foo.*' will exclude all subpackages
+ of 'foo' (but not 'foo' itself).
+ """
+ packages = []
+ discarded = []
+
+ def _discarded(path):
+ for discard in discarded:
+ if _under(path, discard):
+ return True
+ return False
+
+ for path in paths:
+ path = convert_path(path)
+ for root, dirs, files in os.walk(path):
+ for dir_ in dirs:
+ fullpath = os.path.join(root, dir_)
+ if _discarded(fullpath):
+ continue
+ # we work only with Python packages
+ if not _is_package(fullpath):
+ discarded.append(fullpath)
+ continue
+ # see if it's excluded
+ excluded = False
+ package_name = _package_name(path, fullpath)
+ for pattern in exclude:
+ if fnmatchcase(package_name, pattern):
+ excluded = True
+ break
+ if excluded:
+ continue
+
+ # adding it to the list
+ packages.append(package_name)
+ return packages
+
+
+def resolve_name(name):
+ """Resolve a name like ``module.object`` to an object and return it.
+
+ This functions supports packages and attributes without depth limitation:
+ ``package.package.module.class.class.function.attr`` is valid input.
+ However, looking up builtins is not directly supported: use
+ ``builtins.name``.
+
+ Raises ImportError if importing the module fails or if one requested
+ attribute is not found.
+ """
+ if '.' not in name:
+ # shortcut
+ __import__(name)
+ return sys.modules[name]
+
+ # FIXME clean up this code!
+ parts = name.split('.')
+ cursor = len(parts)
+ module_name = parts[:cursor]
+ ret = ''
+
+ while cursor > 0:
+ try:
+ ret = __import__('.'.join(module_name))
+ break
+ except ImportError:
+ cursor -= 1
+ module_name = parts[:cursor]
+
+ if ret == '':
+ raise ImportError(parts[0])
+
+ for part in parts[1:]:
+ try:
+ ret = getattr(ret, part)
+ except AttributeError as exc:
+ raise ImportError(exc)
+
+ return ret
+
+
+def splitext(path):
+ """Like os.path.splitext, but take off .tar too"""
+ base, ext = posixpath.splitext(path)
+ if base.lower().endswith('.tar'):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+
+if sys.platform == 'darwin':
+ _cfg_target = None
+ _cfg_target_split = None
+
+
+def spawn(cmd, search_path=True, dry_run=False, env=None):
+ """Run another program specified as a command list 'cmd' in a new process.
+
+ 'cmd' is just the argument list for the new process, ie.
+ cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
+ There is no way to run a program with a name different from that of its
+ executable.
+
+ If 'search_path' is true (the default), the system's executable
+ search path will be used to find the program; otherwise, cmd[0]
+ must be the exact path to the executable. If 'dry_run' is true,
+ the command will not actually be run.
+
+ If 'env' is given, it's a environment dictionary used for the execution
+ environment.
+
+ Raise PackagingExecError if running the program fails in any way; just
+ return on success.
+ """
+ logger.debug('spawn: running %r', cmd)
+ if dry_run:
+ logger.debug('dry run, no process actually spawned')
+ return
+ if sys.platform == 'darwin':
+ global _cfg_target, _cfg_target_split
+ if _cfg_target is None:
+ _cfg_target = sysconfig.get_config_var(
+ 'MACOSX_DEPLOYMENT_TARGET') or ''
+ if _cfg_target:
+ _cfg_target_split = [int(x) for x in _cfg_target.split('.')]
+ if _cfg_target:
+ # ensure that the deployment target of build process is not less
+ # than that used when the interpreter was built. This ensures
+ # extension modules are built with correct compatibility values
+ env = env or os.environ
+ cur_target = env.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target)
+ if _cfg_target_split > [int(x) for x in cur_target.split('.')]:
+ my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: '
+ 'now "%s" but "%s" during configure'
+ % (cur_target, _cfg_target))
+ raise PackagingPlatformError(my_msg)
+ env = dict(env, MACOSX_DEPLOYMENT_TARGET=cur_target)
+
+ exit_status = subprocess.call(cmd, env=env)
+ if exit_status != 0:
+ msg = "command %r failed with exit status %d"
+ raise PackagingExecError(msg % (cmd, exit_status))
+
+
+def find_executable(executable, path=None):
+ """Try to find 'executable' in the directories listed in 'path'.
+
+ *path* is a string listing directories separated by 'os.pathsep' and
+ defaults to os.environ['PATH']. Returns the complete filename or None
+ if not found.
+ """
+ if path is None:
+ path = os.environ['PATH']
+ paths = path.split(os.pathsep)
+ base, ext = os.path.splitext(executable)
+
+ if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ executable = executable + '.exe'
+
+ if not os.path.isfile(executable):
+ for p in paths:
+ f = os.path.join(p, executable)
+ if os.path.isfile(f):
+ # the file exists, we have a shot at spawn working
+ return f
+ return None
+ else:
+ return executable
+
+
+DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
+DEFAULT_REALM = 'pypi'
+DEFAULT_PYPIRC = """\
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username:%s
+password:%s
+"""
+
+
+def get_pypirc_path():
+ """Return path to pypirc config file."""
+ return os.path.join(os.path.expanduser('~'), '.pypirc')
+
+
+def generate_pypirc(username, password):
+ """Create a default .pypirc file."""
+ rc = get_pypirc_path()
+ with open(rc, 'w') as f:
+ f.write(DEFAULT_PYPIRC % (username, password))
+ try:
+ os.chmod(rc, 0o600)
+ except OSError:
+ # should do something better here
+ pass
+
+
+def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM):
+ """Read the .pypirc file."""
+ rc = get_pypirc_path()
+ if os.path.exists(rc):
+ config = RawConfigParser()
+ config.read(rc)
+ sections = config.sections()
+ if 'distutils' in sections:
+ # let's get the list of servers
+ index_servers = config.get('distutils', 'index-servers')
+ _servers = [server.strip() for server in
+ index_servers.split('\n')
+ if server.strip() != '']
+ if _servers == []:
+ # nothing set, let's try to get the default pypi
+ if 'pypi' in sections:
+ _servers = ['pypi']
+ else:
+ # the file is not properly defined, returning
+ # an empty dict
+ return {}
+ for server in _servers:
+ current = {'server': server}
+ current['username'] = config.get(server, 'username')
+
+ # optional params
+ for key, default in (('repository', DEFAULT_REPOSITORY),
+ ('realm', DEFAULT_REALM),
+ ('password', None)):
+ if config.has_option(server, key):
+ current[key] = config.get(server, key)
+ else:
+ current[key] = default
+ if (current['server'] == repository or
+ current['repository'] == repository):
+ return current
+ elif 'server-login' in sections:
+ # old format
+ server = 'server-login'
+ if config.has_option(server, 'repository'):
+ repository = config.get(server, 'repository')
+ else:
+ repository = DEFAULT_REPOSITORY
+
+ return {'username': config.get(server, 'username'),
+ 'password': config.get(server, 'password'),
+ 'repository': repository,
+ 'server': server,
+ 'realm': DEFAULT_REALM}
+
+ return {}
+
+
+# utility functions for 2to3 support
+
+def run_2to3(files, doctests_only=False, fixer_names=None,
+ options=None, explicit=None):
+ """ Wrapper function around the refactor() class which
+ performs the conversions on a list of python files.
+ Invoke 2to3 on a list of Python files. The files should all come
+ from the build area, as the modification is done in-place."""
+
+ #if not files:
+ # return
+
+ # Make this class local, to delay import of 2to3
+ from lib2to3.refactor import get_fixers_from_package, RefactoringTool
+ fixers = get_fixers_from_package('lib2to3.fixes')
+
+ if fixer_names:
+ for fixername in fixer_names:
+ fixers.extend(get_fixers_from_package(fixername))
+ r = RefactoringTool(fixers, options=options)
+ r.refactor(files, write=True, doctests_only=doctests_only)
+
+
+class Mixin2to3:
+ """ Wrapper class for commands that run 2to3.
+ To configure 2to3, setup scripts may either change
+ the class variables, or inherit from this class
+ to override how 2to3 is invoked.
+ """
+ # list of fixers to run; defaults to all implicit from lib2to3.fixers
+ fixer_names = None
+ # dict of options
+ options = None
+ # list of extra fixers to invoke
+ explicit = None
+ # TODO need a better way to add just one fixer from a package
+ # TODO need a way to exclude individual fixers
+
+ def run_2to3(self, files, doctests_only=False):
+ """ Issues a call to util.run_2to3. """
+ return run_2to3(files, doctests_only, self.fixer_names,
+ self.options, self.explicit)
+
+ # TODO provide initialize/finalize_options
+
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+ """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
+ if _CHECK_RECURSIVE_GLOB.search(path_glob):
+ msg = """invalid glob %r: recursive glob "**" must be used alone"""
+ raise ValueError(msg % path_glob)
+ if _CHECK_MISMATCH_SET.search(path_glob):
+ msg = """invalid glob %r: mismatching set marker '{' or '}'"""
+ raise ValueError(msg % path_glob)
+ return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+ rich_path_glob = RICH_GLOB.split(path_glob, 1)
+ if len(rich_path_glob) > 1:
+ assert len(rich_path_glob) == 3, rich_path_glob
+ prefix, set, suffix = rich_path_glob
+ for item in set.split(','):
+ for path in _iglob(''.join((prefix, item, suffix))):
+ yield path
+ else:
+ if '**' not in path_glob:
+ for item in std_iglob(path_glob):
+ yield item
+ else:
+ prefix, radical = path_glob.split('**', 1)
+ if prefix == '':
+ prefix = '.'
+ if radical == '':
+ radical = '*'
+ else:
+ # we support both
+ radical = radical.lstrip('/')
+ radical = radical.lstrip('\\')
+ for path, dir, files in os.walk(prefix):
+ path = os.path.normpath(path)
+ for file in _iglob(os.path.join(path, radical)):
+ yield file
+
+
+# HOWTO change cfg_to_args
+#
+# This function has two major constraints: It is copied by inspect.getsource
+# in generate_setup_py; it is used in generated setup.py which may be run by
+# any Python version supported by distutils2 (2.4-3.3).
+#
+# * Keep objects like D1_D2_SETUP_ARGS static, i.e. in the function body
+# instead of global.
+# * If you use a function from another module, update the imports in
+# SETUP_TEMPLATE. Use only modules, classes and functions compatible with
+# all versions: codecs.open instead of open, RawConfigParser.readfp instead
+# of read, standard exceptions instead of Packaging*Error, etc.
+# * If you use a function from this module, update the template and
+# generate_setup_py.
+#
+# test_util tests this function and the generated setup.py, but does not test
+# that it's compatible with all Python versions.
+
+def cfg_to_args(path='setup.cfg'):
+ """Compatibility helper to use setup.cfg in setup.py.
+
+ This functions uses an existing setup.cfg to generate a dictionnary of
+ keywords that can be used by distutils.core.setup(**kwargs). It is used
+ by generate_setup_py.
+
+ *file* is the path to the setup.cfg file. If it doesn't exist,
+ PackagingFileError is raised.
+ """
+
+ # XXX ** == needs testing
+ D1_D2_SETUP_ARGS = {"name": ("metadata",),
+ "version": ("metadata",),
+ "author": ("metadata",),
+ "author_email": ("metadata",),
+ "maintainer": ("metadata",),
+ "maintainer_email": ("metadata",),
+ "url": ("metadata", "home_page"),
+ "description": ("metadata", "summary"),
+ "long_description": ("metadata", "description"),
+ "download-url": ("metadata",),
+ "classifiers": ("metadata", "classifier"),
+ "platforms": ("metadata", "platform"), # **
+ "license": ("metadata",),
+ "requires": ("metadata", "requires_dist"),
+ "provides": ("metadata", "provides_dist"), # **
+ "obsoletes": ("metadata", "obsoletes_dist"), # **
+ "package_dir": ("files", 'packages_root'),
+ "packages": ("files",),
+ "scripts": ("files",),
+ "py_modules": ("files", "modules"), # **
+ }
+
+ MULTI_FIELDS = ("classifiers",
+ "platforms",
+ "requires",
+ "provides",
+ "obsoletes",
+ "packages",
+ "scripts",
+ "py_modules")
+
+ def has_get_option(config, section, option):
+ if config.has_option(section, option):
+ return config.get(section, option)
+ elif config.has_option(section, option.replace('_', '-')):
+ return config.get(section, option.replace('_', '-'))
+ else:
+ return False
+
+ # The real code starts here
+ config = RawConfigParser()
+ f = codecs.open(path, encoding='utf-8')
+ try:
+ config.readfp(f)
+ finally:
+ f.close()
+
+ kwargs = {}
+ for arg in D1_D2_SETUP_ARGS:
+ if len(D1_D2_SETUP_ARGS[arg]) == 2:
+ # The distutils field name is different than packaging's
+ section, option = D1_D2_SETUP_ARGS[arg]
+
+ else:
+ # The distutils field name is the same thant packaging's
+ section = D1_D2_SETUP_ARGS[arg][0]
+ option = arg
+
+ in_cfg_value = has_get_option(config, section, option)
+ if not in_cfg_value:
+ # There is no such option in the setup.cfg
+ if arg == 'long_description':
+ filenames = has_get_option(config, section, 'description-file')
+ if filenames:
+ filenames = split_multiline(filenames)
+ in_cfg_value = []
+ for filename in filenames:
+ fp = codecs.open(filename, encoding='utf-8')
+ try:
+ in_cfg_value.append(fp.read())
+ finally:
+ fp.close()
+ in_cfg_value = '\n\n'.join(in_cfg_value)
+ else:
+ continue
+
+ if arg == 'package_dir' and in_cfg_value:
+ in_cfg_value = {'': in_cfg_value}
+
+ if arg in MULTI_FIELDS:
+ # support multiline options
+ in_cfg_value = split_multiline(in_cfg_value)
+
+ kwargs[arg] = in_cfg_value
+
+ return kwargs
+
+
+SETUP_TEMPLATE = """\
+# This script was automatically generated by packaging
+import codecs
+from distutils.core import setup
+try:
+ from ConfigParser import RawConfigParser
+except ImportError:
+ from configparser import RawConfigParser
+
+
+%(split_multiline)s
+
+%(cfg_to_args)s
+
+setup(**cfg_to_args())
+"""
+
+
+def generate_setup_py():
+ """Generate a distutils compatible setup.py using an existing setup.cfg.
+
+ Raises a PackagingFileError when a setup.py already exists.
+ """
+ if os.path.exists("setup.py"):
+ raise PackagingFileError("a setup.py file already exists")
+
+ source = SETUP_TEMPLATE % {'split_multiline': getsource(split_multiline),
+ 'cfg_to_args': getsource(cfg_to_args)}
+ with open("setup.py", "w", encoding='utf-8') as fp:
+ fp.write(source)
+
+
+# Taken from the pip project
+# https://github.com/pypa/pip/blob/master/pip/util.py
+def ask(message, options):
+ """Prompt the user with *message*; *options* contains allowed responses."""
+ while True:
+ response = input(message)
+ response = response.strip().lower()
+ if response not in options:
+ print('invalid response:', repr(response))
+ print('choose one of', ', '.join(repr(o) for o in options))
+ else:
+ return response
+
+
+def _parse_record_file(record_file):
+ distinfo, extra_metadata, installed = ({}, [], [])
+ with open(record_file, 'r') as rfile:
+ for path in rfile:
+ path = path.strip()
+ if path.endswith('egg-info') and os.path.isfile(path):
+ distinfo_dir = path.replace('egg-info', 'dist-info')
+ metadata = path
+ egginfo = path
+ elif path.endswith('egg-info') and os.path.isdir(path):
+ distinfo_dir = path.replace('egg-info', 'dist-info')
+ egginfo = path
+ for metadata_file in os.listdir(path):
+ metadata_fpath = os.path.join(path, metadata_file)
+ if metadata_file == 'PKG-INFO':
+ metadata = metadata_fpath
+ else:
+ extra_metadata.append(metadata_fpath)
+ elif 'egg-info' in path and os.path.isfile(path):
+ # skip extra metadata files
+ continue
+ else:
+ installed.append(path)
+
+ distinfo['egginfo'] = egginfo
+ distinfo['metadata'] = metadata
+ distinfo['distinfo_dir'] = distinfo_dir
+ distinfo['installer_path'] = os.path.join(distinfo_dir, 'INSTALLER')
+ distinfo['metadata_path'] = os.path.join(distinfo_dir, 'METADATA')
+ distinfo['record_path'] = os.path.join(distinfo_dir, 'RECORD')
+ distinfo['requested_path'] = os.path.join(distinfo_dir, 'REQUESTED')
+ installed.extend([distinfo['installer_path'], distinfo['metadata_path']])
+ distinfo['installed'] = installed
+ distinfo['extra_metadata'] = extra_metadata
+ return distinfo
+
+
+def _write_record_file(record_path, installed_files):
+ with open(record_path, 'w', encoding='utf-8') as f:
+ writer = csv.writer(f, delimiter=',', lineterminator=os.linesep,
+ quotechar='"')
+
+ for fpath in installed_files:
+ if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
+ # do not put size and md5 hash, as in PEP-376
+ writer.writerow((fpath, '', ''))
+ else:
+ hash = hashlib.md5()
+ with open(fpath, 'rb') as fp:
+ hash.update(fp.read())
+ md5sum = hash.hexdigest()
+ size = os.path.getsize(fpath)
+ writer.writerow((fpath, md5sum, size))
+
+ # add the RECORD file itself
+ writer.writerow((record_path, '', ''))
+ return record_path
+
+
+def egginfo_to_distinfo(record_file, installer=_DEFAULT_INSTALLER,
+ requested=False, remove_egginfo=False):
+ """Create files and directories required for PEP 376
+
+ :param record_file: path to RECORD file as produced by setup.py --record
+ :param installer: installer name
+ :param requested: True if not installed as a dependency
+ :param remove_egginfo: delete egginfo dir?
+ """
+ distinfo = _parse_record_file(record_file)
+ distinfo_dir = distinfo['distinfo_dir']
+ if os.path.isdir(distinfo_dir) and not os.path.islink(distinfo_dir):
+ shutil.rmtree(distinfo_dir)
+ elif os.path.exists(distinfo_dir):
+ os.unlink(distinfo_dir)
+
+ os.makedirs(distinfo_dir)
+
+ # copy setuptools extra metadata files
+ if distinfo['extra_metadata']:
+ for path in distinfo['extra_metadata']:
+ shutil.copy2(path, distinfo_dir)
+ new_path = path.replace('egg-info', 'dist-info')
+ distinfo['installed'].append(new_path)
+
+ metadata_path = distinfo['metadata_path']
+ logger.info('creating %s', metadata_path)
+ shutil.copy2(distinfo['metadata'], metadata_path)
+
+ installer_path = distinfo['installer_path']
+ logger.info('creating %s', installer_path)
+ with open(installer_path, 'w') as f:
+ f.write(installer)
+
+ if requested:
+ requested_path = distinfo['requested_path']
+ logger.info('creating %s', requested_path)
+ open(requested_path, 'wb').close()
+ distinfo['installed'].append(requested_path)
+
+ record_path = distinfo['record_path']
+ logger.info('creating %s', record_path)
+ _write_record_file(record_path, distinfo['installed'])
+
+ if remove_egginfo:
+ egginfo = distinfo['egginfo']
+ logger.info('removing %s', egginfo)
+ if os.path.isfile(egginfo):
+ os.remove(egginfo)
+ else:
+ shutil.rmtree(egginfo)
+
+
+def _has_egg_info(srcdir):
+ if os.path.isdir(srcdir):
+ for item in os.listdir(srcdir):
+ full_path = os.path.join(srcdir, item)
+ if item.endswith('.egg-info') and os.path.isdir(full_path):
+ logger.debug("Found egg-info directory.")
+ return True
+ logger.debug("No egg-info directory found.")
+ return False
+
+
+def _has_setuptools_text(setup_py):
+ return _has_text(setup_py, 'setuptools')
+
+
+def _has_distutils_text(setup_py):
+ return _has_text(setup_py, 'distutils')
+
+
+def _has_text(setup_py, installer):
+ installer_pattern = re.compile('import {0}|from {0}'.format(installer))
+ with open(setup_py, 'r', encoding='utf-8') as setup:
+ for line in setup:
+ if re.search(installer_pattern, line):
+ logger.debug("Found %s text in setup.py.", installer)
+ return True
+ logger.debug("No %s text found in setup.py.", installer)
+ return False
+
+
+def _has_required_metadata(setup_cfg):
+ config = RawConfigParser()
+ config.read([setup_cfg], encoding='utf8')
+ return (config.has_section('metadata') and
+ 'name' in config.options('metadata') and
+ 'version' in config.options('metadata'))
+
+
+def _has_pkg_info(srcdir):
+ pkg_info = os.path.join(srcdir, 'PKG-INFO')
+ has_pkg_info = os.path.isfile(pkg_info)
+ if has_pkg_info:
+ logger.debug("PKG-INFO file found.")
+ else:
+ logger.debug("No PKG-INFO file found.")
+ return has_pkg_info
+
+
+def _has_setup_py(srcdir):
+ setup_py = os.path.join(srcdir, 'setup.py')
+ if os.path.isfile(setup_py):
+ logger.debug('setup.py file found.')
+ return True
+ return False
+
+
+def _has_setup_cfg(srcdir):
+ setup_cfg = os.path.join(srcdir, 'setup.cfg')
+ if os.path.isfile(setup_cfg):
+ logger.debug('setup.cfg file found.')
+ return True
+ logger.debug("No setup.cfg file found.")
+ return False
+
+
+def is_setuptools(path):
+ """Check if the project is based on setuptools.
+
+ :param path: path to source directory containing a setup.py script.
+
+ Return True if the project requires setuptools to install, else False.
+ """
+ srcdir = os.path.abspath(path)
+ setup_py = os.path.join(srcdir, 'setup.py')
+
+ return _has_setup_py(srcdir) and (_has_egg_info(srcdir) or
+ _has_setuptools_text(setup_py))
+
+
+def is_distutils(path):
+ """Check if the project is based on distutils.
+
+ :param path: path to source directory containing a setup.py script.
+
+ Return True if the project requires distutils to install, else False.
+ """
+ srcdir = os.path.abspath(path)
+ setup_py = os.path.join(srcdir, 'setup.py')
+
+ return _has_setup_py(srcdir) and (_has_pkg_info(srcdir) or
+ _has_distutils_text(setup_py))
+
+
+def is_packaging(path):
+ """Check if the project is based on packaging
+
+ :param path: path to source directory containing a setup.cfg file.
+
+ Return True if the project has a valid setup.cfg, else False.
+ """
+ srcdir = os.path.abspath(path)
+ setup_cfg = os.path.join(srcdir, 'setup.cfg')
+
+ return _has_setup_cfg(srcdir) and _has_required_metadata(setup_cfg)
+
+
+def get_install_method(path):
+ """Check if the project is based on packaging, setuptools, or distutils
+
+ :param path: path to source directory containing a setup.cfg file,
+ or setup.py.
+
+ Returns a string representing the best install method to use.
+ """
+ if is_packaging(path):
+ return "packaging"
+ elif is_setuptools(path):
+ return "setuptools"
+ elif is_distutils(path):
+ return "distutils"
+ else:
+ raise InstallationException('Cannot detect install method')
+
+
+# XXX to be replaced by shutil.copytree
+def copy_tree(src, dst, preserve_mode=True, preserve_times=True,
+ preserve_symlinks=False, update=False, dry_run=False):
+ # FIXME use of this function is why we get spurious logging message on
+ # stdout when tests run; kill and replace by shutil!
+ from distutils.file_util import copy_file
+
+ if not dry_run and not os.path.isdir(src):
+ raise PackagingFileError(
+ "cannot copy tree '%s': not a directory" % src)
+ try:
+ names = os.listdir(src)
+ except os.error as e:
+ errstr = e[1]
+ if dry_run:
+ names = []
+ else:
+ raise PackagingFileError(
+ "error listing files in '%s': %s" % (src, errstr))
+
+ if not dry_run:
+ _mkpath(dst)
+
+ outputs = []
+
+ for n in names:
+ src_name = os.path.join(src, n)
+ dst_name = os.path.join(dst, n)
+
+ if preserve_symlinks and os.path.islink(src_name):
+ link_dest = os.readlink(src_name)
+ logger.info("linking %s -> %s", dst_name, link_dest)
+ if not dry_run:
+ os.symlink(link_dest, dst_name)
+ outputs.append(dst_name)
+
+ elif os.path.isdir(src_name):
+ outputs.extend(
+ copy_tree(src_name, dst_name, preserve_mode,
+ preserve_times, preserve_symlinks, update,
+ dry_run=dry_run))
+ else:
+ copy_file(src_name, dst_name, preserve_mode,
+ preserve_times, update, dry_run=dry_run)
+ outputs.append(dst_name)
+
+ return outputs
+
+# cache for by mkpath() -- in addition to cheapening redundant calls,
+# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
+_path_created = set()
+
+
+# I don't use os.makedirs because a) it's new to Python 1.5.2, and
+# b) it blows up if the directory already exists (I want to silently
+# succeed in that case).
+def _mkpath(name, mode=0o777, dry_run=False):
+ # Detect a common bug -- name is None
+ if not isinstance(name, str):
+ raise PackagingInternalError(
+ "mkpath: 'name' must be a string (got %r)" % (name,))
+
+ # XXX what's the better way to handle verbosity? print as we create
+ # each directory in the path (the current behaviour), or only announce
+ # the creation of the whole path? (quite easy to do the latter since
+ # we're not using a recursive algorithm)
+
+ name = os.path.normpath(name)
+ created_dirs = []
+ if os.path.isdir(name) or name == '':
+ return created_dirs
+ if os.path.abspath(name) in _path_created:
+ return created_dirs
+
+ head, tail = os.path.split(name)
+ tails = [tail] # stack of lone dirs to create
+
+ while head and tail and not os.path.isdir(head):
+ head, tail = os.path.split(head)
+ tails.insert(0, tail) # push next higher dir onto stack
+
+ # now 'head' contains the deepest directory that already exists
+ # (that is, the child of 'head' in 'name' is the highest directory
+ # that does *not* exist)
+ for d in tails:
+ head = os.path.join(head, d)
+ abs_head = os.path.abspath(head)
+
+ if abs_head in _path_created:
+ continue
+
+ logger.info("creating %s", head)
+ if not dry_run:
+ try:
+ os.mkdir(head, mode)
+ except OSError as exc:
+ if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
+ raise PackagingFileError(
+ "could not create '%s': %s" % (head, exc.args[-1]))
+ created_dirs.append(head)
+
+ _path_created.add(abs_head)
+ return created_dirs
+
+
+def encode_multipart(fields, files, boundary=None):
+ """Prepare a multipart HTTP request.
+
+ *fields* is a sequence of (name: str, value: str) elements for regular
+ form fields, *files* is a sequence of (name: str, filename: str, value:
+ bytes) elements for data to be uploaded as files.
+
+ Returns (content_type: bytes, body: bytes) ready for http.client.HTTP.
+ """
+ # Taken from http://code.activestate.com/recipes/146306
+
+ if boundary is None:
+ boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ elif not isinstance(boundary, bytes):
+ raise TypeError('boundary must be bytes, not %r' % type(boundary))
+
+ l = []
+ for key, values in fields:
+ # handle multiple entries for the same name
+ if not isinstance(values, (tuple, list)):
+ values = [values]
+
+ for value in values:
+ l.extend((
+ b'--' + boundary,
+ ('Content-Disposition: form-data; name="%s"' %
+ key).encode('utf-8'),
+ b'',
+ value.encode('utf-8')))
+
+ for key, filename, value in files:
+ l.extend((
+ b'--' + boundary,
+ ('Content-Disposition: form-data; name="%s"; filename="%s"' %
+ (key, filename)).encode('utf-8'),
+ b'',
+ value))
+
+ l.append(b'--' + boundary + b'--')
+ l.append(b'')
+
+ body = b'\r\n'.join(l)
+ content_type = b'multipart/form-data; boundary=' + boundary
+ return content_type, body
diff --git a/Lib/packaging/version.py b/Lib/packaging/version.py
new file mode 100644
index 0000000..1970322
--- /dev/null
+++ b/Lib/packaging/version.py
@@ -0,0 +1,451 @@
+"""Implementation of the versioning scheme defined in PEP 386."""
+
+import re
+
+from packaging.errors import IrrationalVersionError, HugeMajorVersionNumError
+
+__all__ = ['NormalizedVersion', 'suggest_normalized_version',
+ 'VersionPredicate', 'is_valid_version', 'is_valid_versions',
+ 'is_valid_predicate']
+
+# A marker used in the second and third parts of the `parts` tuple, for
+# versions that don't have those segments, to sort properly. An example
+# of versions in sort order ('highest' last):
+# 1.0b1 ((1,0), ('b',1), ('z',))
+# 1.0.dev345 ((1,0), ('z',), ('dev', 345))
+# 1.0 ((1,0), ('z',), ('z',))
+# 1.0.post256.dev345 ((1,0), ('z',), ('z', 'post', 256, 'dev', 345))
+# 1.0.post345 ((1,0), ('z',), ('z', 'post', 345, 'z'))
+# ^ ^ ^
+# 'b' < 'z' ---------------------/ | |
+# | |
+# 'dev' < 'z' ----------------------------/ |
+# |
+# 'dev' < 'z' ----------------------------------------------/
+# 'f' for 'final' would be kind of nice, but due to bugs in the support of
+# 'rc' we must use 'z'
+_FINAL_MARKER = ('z',)
+
+_VERSION_RE = re.compile(r'''
+ ^
+ (?P<version>\d+\.\d+) # minimum 'N.N'
+ (?P<extraversion>(?:\.\d+)*) # any number of extra '.N' segments
+ (?:
+ (?P<prerel>[abc]|rc) # 'a'=alpha, 'b'=beta, 'c'=release candidate
+ # 'rc'= alias for release candidate
+ (?P<prerelversion>\d+(?:\.\d+)*)
+ )?
+ (?P<postdev>(\.post(?P<post>\d+))?(\.dev(?P<dev>\d+))?)?
+ $''', re.VERBOSE)
+
+
+class NormalizedVersion:
+ """A rational version.
+
+ Good:
+ 1.2 # equivalent to "1.2.0"
+ 1.2.0
+ 1.2a1
+ 1.2.3a2
+ 1.2.3b1
+ 1.2.3c1
+ 1.2.3.4
+ TODO: fill this out
+
+ Bad:
+ 1 # mininum two numbers
+ 1.2a # release level must have a release serial
+ 1.2.3b
+ """
+ def __init__(self, s, error_on_huge_major_num=True):
+ """Create a NormalizedVersion instance from a version string.
+
+ @param s {str} The version string.
+ @param error_on_huge_major_num {bool} Whether to consider an
+ apparent use of a year or full date as the major version number
+ an error. Default True. One of the observed patterns on PyPI before
+ the introduction of `NormalizedVersion` was version numbers like
+ this:
+ 2009.01.03
+ 20040603
+ 2005.01
+ This guard is here to strongly encourage the package author to
+ use an alternate version, because a release deployed into PyPI
+ and, e.g. downstream Linux package managers, will forever remove
+ the possibility of using a version number like "1.0" (i.e.
+ where the major number is less than that huge major number).
+ """
+ self.is_final = True # by default, consider a version as final.
+ self._parse(s, error_on_huge_major_num)
+
+ @classmethod
+ def from_parts(cls, version, prerelease=_FINAL_MARKER,
+ devpost=_FINAL_MARKER):
+ return cls(cls.parts_to_str((version, prerelease, devpost)))
+
+ def _parse(self, s, error_on_huge_major_num=True):
+ """Parses a string version into parts."""
+ match = _VERSION_RE.search(s)
+ if not match:
+ raise IrrationalVersionError(s)
+
+ groups = match.groupdict()
+ parts = []
+
+ # main version
+ block = self._parse_numdots(groups['version'], s, False, 2)
+ extraversion = groups.get('extraversion')
+ if extraversion not in ('', None):
+ block += self._parse_numdots(extraversion[1:], s)
+ parts.append(tuple(block))
+
+ # prerelease
+ prerel = groups.get('prerel')
+ if prerel is not None:
+ block = [prerel]
+ block += self._parse_numdots(groups.get('prerelversion'), s,
+ pad_zeros_length=1)
+ parts.append(tuple(block))
+ self.is_final = False
+ else:
+ parts.append(_FINAL_MARKER)
+
+ # postdev
+ if groups.get('postdev'):
+ post = groups.get('post')
+ dev = groups.get('dev')
+ postdev = []
+ if post is not None:
+ postdev.extend((_FINAL_MARKER[0], 'post', int(post)))
+ if dev is None:
+ postdev.append(_FINAL_MARKER[0])
+ if dev is not None:
+ postdev.extend(('dev', int(dev)))
+ self.is_final = False
+ parts.append(tuple(postdev))
+ else:
+ parts.append(_FINAL_MARKER)
+ self.parts = tuple(parts)
+ if error_on_huge_major_num and self.parts[0][0] > 1980:
+ raise HugeMajorVersionNumError("huge major version number, %r, "
+ "which might cause future problems: %r" % (self.parts[0][0], s))
+
+ def _parse_numdots(self, s, full_ver_str, drop_trailing_zeros=True,
+ pad_zeros_length=0):
+ """Parse 'N.N.N' sequences, return a list of ints.
+
+ @param s {str} 'N.N.N...' sequence to be parsed
+ @param full_ver_str {str} The full version string from which this
+ comes. Used for error strings.
+ @param drop_trailing_zeros {bool} Whether to drop trailing zeros
+ from the returned list. Default True.
+ @param pad_zeros_length {int} The length to which to pad the
+ returned list with zeros, if necessary. Default 0.
+ """
+ nums = []
+ for n in s.split("."):
+ if len(n) > 1 and n[0] == '0':
+ raise IrrationalVersionError("cannot have leading zero in "
+ "version number segment: '%s' in %r" % (n, full_ver_str))
+ nums.append(int(n))
+ if drop_trailing_zeros:
+ while nums and nums[-1] == 0:
+ nums.pop()
+ while len(nums) < pad_zeros_length:
+ nums.append(0)
+ return nums
+
+ def __str__(self):
+ return self.parts_to_str(self.parts)
+
+ @classmethod
+ def parts_to_str(cls, parts):
+ """Transforms a version expressed in tuple into its string
+ representation."""
+ # XXX This doesn't check for invalid tuples
+ main, prerel, postdev = parts
+ s = '.'.join(str(v) for v in main)
+ if prerel is not _FINAL_MARKER:
+ s += prerel[0]
+ s += '.'.join(str(v) for v in prerel[1:])
+ # XXX clean up: postdev is always true; code is obscure
+ if postdev and postdev is not _FINAL_MARKER:
+ if postdev[0] == _FINAL_MARKER[0]:
+ postdev = postdev[1:]
+ i = 0
+ while i < len(postdev):
+ if i % 2 == 0:
+ s += '.'
+ s += str(postdev[i])
+ i += 1
+ return s
+
+ def __repr__(self):
+ return "%s('%s')" % (self.__class__.__name__, self)
+
+ def _cannot_compare(self, other):
+ raise TypeError("cannot compare %s and %s"
+ % (type(self).__name__, type(other).__name__))
+
+ def __eq__(self, other):
+ if not isinstance(other, NormalizedVersion):
+ self._cannot_compare(other)
+ return self.parts == other.parts
+
+ def __lt__(self, other):
+ if not isinstance(other, NormalizedVersion):
+ self._cannot_compare(other)
+ return self.parts < other.parts
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __gt__(self, other):
+ return not (self.__lt__(other) or self.__eq__(other))
+
+ def __le__(self, other):
+ return self.__eq__(other) or self.__lt__(other)
+
+ def __ge__(self, other):
+ return self.__eq__(other) or self.__gt__(other)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ def __hash__(self):
+ return hash(self.parts)
+
+
+def suggest_normalized_version(s):
+ """Suggest a normalized version close to the given version string.
+
+ If you have a version string that isn't rational (i.e. NormalizedVersion
+ doesn't like it) then you might be able to get an equivalent (or close)
+ rational version from this function.
+
+ This does a number of simple normalizations to the given string, based
+ on observation of versions currently in use on PyPI. Given a dump of
+ those version during PyCon 2009, 4287 of them:
+ - 2312 (53.93%) match NormalizedVersion without change
+ with the automatic suggestion
+ - 3474 (81.04%) match when using this suggestion method
+
+ @param s {str} An irrational version string.
+ @returns A rational version string, or None, if couldn't determine one.
+ """
+ try:
+ NormalizedVersion(s)
+ return s # already rational
+ except IrrationalVersionError:
+ pass
+
+ rs = s.lower()
+
+ # part of this could use maketrans
+ for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
+ ('beta', 'b'), ('rc', 'c'), ('-final', ''),
+ ('-pre', 'c'),
+ ('-release', ''), ('.release', ''), ('-stable', ''),
+ ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
+ ('final', '')):
+ rs = rs.replace(orig, repl)
+
+ # if something ends with dev or pre, we add a 0
+ rs = re.sub(r"pre$", r"pre0", rs)
+ rs = re.sub(r"dev$", r"dev0", rs)
+
+ # if we have something like "b-2" or "a.2" at the end of the
+ # version, that is pobably beta, alpha, etc
+ # let's remove the dash or dot
+ rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
+
+ # 1.0-dev-r371 -> 1.0.dev371
+ # 0.1-dev-r79 -> 0.1.dev79
+ rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
+
+ # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
+ rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
+
+ # Clean: v0.3, v1.0
+ if rs.startswith('v'):
+ rs = rs[1:]
+
+ # Clean leading '0's on numbers.
+ #TODO: unintended side-effect on, e.g., "2003.05.09"
+ # PyPI stats: 77 (~2%) better
+ rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
+
+ # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
+ # zero.
+ # PyPI stats: 245 (7.56%) better
+ rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
+
+ # the 'dev-rNNN' tag is a dev tag
+ rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
+
+ # clean the - when used as a pre delimiter
+ rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
+
+ # a terminal "dev" or "devel" can be changed into ".dev0"
+ rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
+
+ # a terminal "dev" can be changed into ".dev0"
+ rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
+
+ # a terminal "final" or "stable" can be removed
+ rs = re.sub(r"(final|stable)$", "", rs)
+
+ # The 'r' and the '-' tags are post release tags
+ # 0.4a1.r10 -> 0.4a1.post10
+ # 0.9.33-17222 -> 0.9.33.post17222
+ # 0.9.33-r17222 -> 0.9.33.post17222
+ rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
+
+ # Clean 'r' instead of 'dev' usage:
+ # 0.9.33+r17222 -> 0.9.33.dev17222
+ # 1.0dev123 -> 1.0.dev123
+ # 1.0.git123 -> 1.0.dev123
+ # 1.0.bzr123 -> 1.0.dev123
+ # 0.1a0dev.123 -> 0.1a0.dev123
+ # PyPI stats: ~150 (~4%) better
+ rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
+
+ # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
+ # 0.2.pre1 -> 0.2c1
+ # 0.2-c1 -> 0.2c1
+ # 1.0preview123 -> 1.0c123
+ # PyPI stats: ~21 (0.62%) better
+ rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
+
+ # Tcl/Tk uses "px" for their post release markers
+ rs = re.sub(r"p(\d+)$", r".post\1", rs)
+
+ try:
+ NormalizedVersion(rs)
+ return rs # already rational
+ except IrrationalVersionError:
+ pass
+ return None
+
+
+# A predicate is: "ProjectName (VERSION1, VERSION2, ..)
+_PREDICATE = re.compile(r"(?i)^\s*(\w[\s\w-]*(?:\.\w*)*)(.*)")
+_VERSIONS = re.compile(r"^\s*\((?P<versions>.*)\)\s*$|^\s*"
+ "(?P<versions2>.*)\s*$")
+_PLAIN_VERSIONS = re.compile(r"^\s*(.*)\s*$")
+_SPLIT_CMP = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
+
+
+def _split_predicate(predicate):
+ match = _SPLIT_CMP.match(predicate)
+ if match is None:
+ # probably no op, we'll use "=="
+ comp, version = '==', predicate
+ else:
+ comp, version = match.groups()
+ return comp, NormalizedVersion(version)
+
+
+class VersionPredicate:
+ """Defines a predicate: ProjectName (>ver1,ver2, ..)"""
+
+ _operators = {"<": lambda x, y: x < y,
+ ">": lambda x, y: x > y,
+ "<=": lambda x, y: str(x).startswith(str(y)) or x < y,
+ ">=": lambda x, y: str(x).startswith(str(y)) or x > y,
+ "==": lambda x, y: str(x).startswith(str(y)),
+ "!=": lambda x, y: not str(x).startswith(str(y)),
+ }
+
+ def __init__(self, predicate):
+ self._string = predicate
+ predicate = predicate.strip()
+ match = _PREDICATE.match(predicate)
+ if match is None:
+ raise ValueError('Bad predicate "%s"' % predicate)
+
+ name, predicates = match.groups()
+ self.name = name.strip()
+ self.predicates = []
+ if predicates is None:
+ return
+
+ predicates = _VERSIONS.match(predicates.strip())
+ if predicates is None:
+ return
+
+ predicates = predicates.groupdict()
+ if predicates['versions'] is not None:
+ versions = predicates['versions']
+ else:
+ versions = predicates.get('versions2')
+
+ if versions is not None:
+ for version in versions.split(','):
+ if version.strip() == '':
+ continue
+ self.predicates.append(_split_predicate(version))
+
+ def match(self, version):
+ """Check if the provided version matches the predicates."""
+ if isinstance(version, str):
+ version = NormalizedVersion(version)
+ for operator, predicate in self.predicates:
+ if not self._operators[operator](version, predicate):
+ return False
+ return True
+
+ def __repr__(self):
+ return self._string
+
+
+class _Versions(VersionPredicate):
+ def __init__(self, predicate):
+ predicate = predicate.strip()
+ match = _PLAIN_VERSIONS.match(predicate)
+ self.name = None
+ predicates = match.groups()[0]
+ self.predicates = [_split_predicate(pred.strip())
+ for pred in predicates.split(',')]
+
+
+class _Version(VersionPredicate):
+ def __init__(self, predicate):
+ predicate = predicate.strip()
+ match = _PLAIN_VERSIONS.match(predicate)
+ self.name = None
+ self.predicates = _split_predicate(match.groups()[0])
+
+
+def is_valid_predicate(predicate):
+ try:
+ VersionPredicate(predicate)
+ except (ValueError, IrrationalVersionError):
+ return False
+ else:
+ return True
+
+
+def is_valid_versions(predicate):
+ try:
+ _Versions(predicate)
+ except (ValueError, IrrationalVersionError):
+ return False
+ else:
+ return True
+
+
+def is_valid_version(predicate):
+ try:
+ _Version(predicate)
+ except (ValueError, IrrationalVersionError):
+ return False
+ else:
+ return True
+
+
+def get_version_predicate(requirements):
+ """Return a VersionPredicate object, from a string or an already
+ existing object.
+ """
+ if isinstance(requirements, str):
+ requirements = VersionPredicate(requirements)
+ return requirements
diff --git a/Lib/pdb.py b/Lib/pdb.py
index 6776a3f..3043391 100755
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -73,6 +73,7 @@ import cmd
import bdb
import dis
import code
+import glob
import pprint
import signal
import inspect
@@ -155,6 +156,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
# Try to load readline if it exists
try:
import readline
+ # remove some common file name delimiters
+ readline.set_completer_delims(' \t\n`@#$%^&*()=+[{]}\\|;:\'",<>?')
except ImportError:
pass
self.allow_kbdint = False
@@ -445,6 +448,61 @@ class Pdb(bdb.Bdb, cmd.Cmd):
def error(self, msg):
print('***', msg, file=self.stdout)
+ # Generic completion functions. Individual complete_foo methods can be
+ # assigned below to one of these functions.
+
+ def _complete_location(self, text, line, begidx, endidx):
+ # Complete a file/module/function location for break/tbreak/clear.
+ if line.strip().endswith((':', ',')):
+ # Here comes a line number or a condition which we can't complete.
+ return []
+ # First, try to find matching functions (i.e. expressions).
+ try:
+ ret = self._complete_expression(text, line, begidx, endidx)
+ except Exception:
+ ret = []
+ # Then, try to complete file names as well.
+ globs = glob.glob(text + '*')
+ for fn in globs:
+ if os.path.isdir(fn):
+ ret.append(fn + '/')
+ elif os.path.isfile(fn) and fn.lower().endswith(('.py', '.pyw')):
+ ret.append(fn + ':')
+ return ret
+
+ def _complete_bpnumber(self, text, line, begidx, endidx):
+ # Complete a breakpoint number. (This would be more helpful if we could
+ # display additional info along with the completions, such as file/line
+ # of the breakpoint.)
+ return [str(i) for i, bp in enumerate(bdb.Breakpoint.bpbynumber)
+ if bp is not None and str(i).startswith(text)]
+
+ def _complete_expression(self, text, line, begidx, endidx):
+ # Complete an arbitrary expression.
+ if not self.curframe:
+ return []
+ # Collect globals and locals. It is usually not really sensible to also
+ # complete builtins, and they clutter the namespace quite heavily, so we
+ # leave them out.
+ ns = self.curframe.f_globals.copy()
+ ns.update(self.curframe_locals)
+ if '.' in text:
+ # Walk an attribute chain up to the last part, similar to what
+ # rlcompleter does. This will bail if any of the parts are not
+ # simple attribute access, which is what we want.
+ dotted = text.split('.')
+ try:
+ obj = ns[dotted[0]]
+ for part in dotted[1:-1]:
+ obj = getattr(obj, part)
+ except (KeyError, AttributeError):
+ return []
+ prefix = '.'.join(dotted[:-1]) + '.'
+ return [prefix + n for n in dir(obj) if n.startswith(dotted[-1])]
+ else:
+ # Complete a simple name.
+ return [n for n in ns.keys() if n.startswith(text)]
+
# Command definitions, called by cmdloop()
# The argument is the remaining string on the command line
# Return true to exit from the command loop
@@ -526,6 +584,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
self.commands_defining = False
self.prompt = prompt_back
+ complete_commands = _complete_bpnumber
+
def do_break(self, arg, temporary = 0):
"""b(reak) [ ([filename:]lineno | function) [, condition] ]
Without argument, list all breaks.
@@ -628,6 +688,9 @@ class Pdb(bdb.Bdb, cmd.Cmd):
do_b = do_break
+ complete_break = _complete_location
+ complete_b = _complete_location
+
def do_tbreak(self, arg):
"""tbreak [ ([filename:]lineno | function) [, condition] ]
Same arguments as break, but sets a temporary breakpoint: it
@@ -635,6 +698,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
"""
self.do_break(arg, 1)
+ complete_tbreak = _complete_location
+
def lineinfo(self, identifier):
failed = (None, None, None)
# Input is identifier, may be in single quotes
@@ -704,6 +769,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
bp.enable()
self.message('Enabled %s' % bp)
+ complete_enable = _complete_bpnumber
+
def do_disable(self, arg):
"""disable bpnumber [bpnumber ...]
Disables the breakpoints given as a space separated list of
@@ -722,6 +789,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
bp.disable()
self.message('Disabled %s' % bp)
+ complete_disable = _complete_bpnumber
+
def do_condition(self, arg):
"""condition bpnumber [condition]
Set a new condition for the breakpoint, an expression which
@@ -745,6 +814,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
else:
self.message('New condition set for breakpoint %d.' % bp.number)
+ complete_condition = _complete_bpnumber
+
def do_ignore(self, arg):
"""ignore bpnumber [count]
Set the ignore count for the given breakpoint number. If
@@ -776,6 +847,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
self.message('Will stop next time breakpoint %d is reached.'
% bp.number)
+ complete_ignore = _complete_bpnumber
+
def do_clear(self, arg):
"""cl(ear) filename:lineno\ncl(ear) [bpnumber [bpnumber...]]
With a space separated list of breakpoint numbers, clear
@@ -824,6 +897,9 @@ class Pdb(bdb.Bdb, cmd.Cmd):
self.message('Deleted %s' % bp)
do_cl = do_clear # 'c' is already an abbreviation for 'continue'
+ complete_clear = _complete_location
+ complete_cl = _complete_location
+
def do_where(self, arg):
"""w(here)
Print a stack trace, with the most recent frame at the bottom.
@@ -1007,6 +1083,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
sys.settrace(self.trace_dispatch)
self.lastcmd = p.lastcmd
+ complete_debug = _complete_expression
+
def do_quit(self, arg):
"""q(uit)\nexit
Quit from the debugger. The program being executed is aborted.
@@ -1093,6 +1171,10 @@ class Pdb(bdb.Bdb, cmd.Cmd):
except:
pass
+ complete_print = _complete_expression
+ complete_p = _complete_expression
+ complete_pp = _complete_expression
+
def do_list(self, arg):
"""l(ist) [first [,last] | .]
@@ -1173,6 +1255,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
return
self._print_lines(lines, lineno)
+ complete_source = _complete_expression
+
def _print_lines(self, lines, start, breaks=(), frame=None):
"""Print a range of lines."""
if frame:
@@ -1227,6 +1311,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
# None of the above...
self.message(type(value))
+ complete_whatis = _complete_expression
+
def do_display(self, arg):
"""display [expression]
@@ -1244,6 +1330,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
self.displaying.setdefault(self.curframe, {})[arg] = val
self.message('display %s: %r' % (arg, val))
+ complete_display = _complete_expression
+
def do_undisplay(self, arg):
"""undisplay [expression]
@@ -1259,6 +1347,10 @@ class Pdb(bdb.Bdb, cmd.Cmd):
else:
self.displaying.pop(self.curframe, None)
+ def complete_undisplay(self, text, line, begidx, endidx):
+ return [e for e in self.displaying.get(self.curframe, {})
+ if e.startswith(text)]
+
def do_interact(self, arg):
"""interact
@@ -1313,6 +1405,9 @@ class Pdb(bdb.Bdb, cmd.Cmd):
if args[0] in self.aliases:
del self.aliases[args[0]]
+ def complete_unalias(self, text, line, begidx, endidx):
+ return [a for a in self.aliases if a.startswith(text)]
+
# List of all the commands making the program resume execution.
commands_resuming = ['do_continue', 'do_step', 'do_next', 'do_return',
'do_quit', 'do_jump']
diff --git a/Lib/pickle.py b/Lib/pickle.py
index d10ac776..9e65368 100644
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -23,8 +23,6 @@ Misc variables:
"""
-__version__ = "$Revision$" # Code version
-
from types import FunctionType, BuiltinFunctionType
from copyreg import dispatch_table
from copyreg import _extension_registry, _inverted_registry, _extension_cache
@@ -299,8 +297,8 @@ class _Pickler:
f(self, obj) # Call unbound method with explicit self
return
- # Check copyreg.dispatch_table
- reduce = dispatch_table.get(t)
+ # Check private dispatch table if any, or else copyreg.dispatch_table
+ reduce = getattr(self, 'dispatch_table', dispatch_table).get(t)
if reduce:
rv = reduce(obj)
else:
@@ -377,7 +375,7 @@ class _Pickler:
# allowing protocol 0 and 1 to work normally. For this to
# work, the function returned by __reduce__ should be
# called __newobj__, and its first argument should be a
- # new-style class. The implementation for __newobj__
+ # class. The implementation for __newobj__
# should be as follows, although pickle has no way to
# verify this:
#
@@ -440,6 +438,14 @@ class _Pickler:
self.write(NONE)
dispatch[type(None)] = save_none
+ def save_ellipsis(self, obj):
+ self.save_global(Ellipsis, 'Ellipsis')
+ dispatch[type(Ellipsis)] = save_ellipsis
+
+ def save_notimplemented(self, obj):
+ self.save_global(NotImplemented, 'NotImplemented')
+ dispatch[type(NotImplemented)] = save_notimplemented
+
def save_bool(self, obj):
if self.proto >= 2:
self.write(obj and NEWTRUE or NEWFALSE)
@@ -1332,7 +1338,7 @@ def _test():
return doctest.testmod()
if __name__ == "__main__":
- import sys, argparse
+ import argparse
parser = argparse.ArgumentParser(
description='display contents of the pickle files')
parser.add_argument(
diff --git a/Lib/pickletools.py b/Lib/pickletools.py
index ec6cc53..66f4edd 100644
--- a/Lib/pickletools.py
+++ b/Lib/pickletools.py
@@ -510,10 +510,7 @@ def read_decimalnl_short(f):
elif s == b"01":
return True
- try:
- return int(s)
- except OverflowError:
- return int(s)
+ return int(s)
def read_decimalnl_long(f):
r"""
@@ -1642,6 +1639,8 @@ opcodes = [
is pushed on the stack.
NOTE: checks for __safe_for_unpickling__ went away in Python 2.3.
+ NOTE: the distinction between old-style and new-style classes does
+ not make sense in Python 3.
"""),
I(name='OBJ',
diff --git a/Lib/pipes.py b/Lib/pipes.py
index 4297053..f1a16f6 100644
--- a/Lib/pipes.py
+++ b/Lib/pipes.py
@@ -60,7 +60,9 @@ To create a new template object initialized to a given one:
import re
import os
import tempfile
-import string
+# we import the quote function rather than the module for backward compat
+# (quote used to be an undocumented but used function in pipes)
+from shlex import quote
__all__ = ["Template"]
@@ -243,22 +245,3 @@ def makepipeline(infile, steps, outfile):
cmdlist = trapcmd + '\n' + cmdlist + '\n' + rmcmd
#
return cmdlist
-
-
-# Reliably quote a string as a single argument for /bin/sh
-
-# Safe unquoted
-_safechars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./')
-
-def quote(file):
- """Return a shell-escaped version of the file string."""
- for c in file:
- if c not in _safechars:
- break
- else:
- if not file:
- return "''"
- return file
- # use single quotes, and put single quotes into double quotes
- # the string $'b is then quoted as '$'"'"'b'
- return "'" + file.replace("'", "'\"'\"'") + "'"
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index 51da0b1..ef027be 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -21,7 +21,7 @@ def read_code(stream):
if magic != imp.get_magic():
return None
- stream.read(4) # Skip timestamp
+ stream.read(8) # Skip timestamp and size
return marshal.load(stream)
@@ -256,7 +256,7 @@ class ImpLoader:
if self.file and self.file.closed:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
- self.file = open(self.filename, 'rU')
+ self.file = open(self.filename, 'r')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
self.file = open(self.filename, 'rb')
@@ -301,7 +301,7 @@ class ImpLoader:
self.file.close()
elif mod_type==imp.PY_COMPILED:
if os.path.exists(self.filename[:-1]):
- f = open(self.filename[:-1], 'rU')
+ f = open(self.filename[:-1], 'r')
self.source = f.read()
f.close()
elif mod_type==imp.PKG_DIRECTORY:
@@ -315,9 +315,9 @@ class ImpLoader:
def get_filename(self, fullname=None):
fullname = self._fix_name(fullname)
mod_type = self.etc[2]
- if self.etc[2]==imp.PKG_DIRECTORY:
+ if mod_type==imp.PKG_DIRECTORY:
return self._get_delegate().get_filename()
- elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
+ elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
return self.filename
return None
diff --git a/Lib/plat-linux2/CDROM.py b/Lib/plat-linux/CDROM.py
index 4340936..4340936 100644
--- a/Lib/plat-linux2/CDROM.py
+++ b/Lib/plat-linux/CDROM.py
diff --git a/Lib/plat-linux2/DLFCN.py b/Lib/plat-linux/DLFCN.py
index dd10ac4..dd10ac4 100644
--- a/Lib/plat-linux2/DLFCN.py
+++ b/Lib/plat-linux/DLFCN.py
diff --git a/Lib/plat-linux2/IN.py b/Lib/plat-linux/IN.py
index d7d3002..d7d3002 100644
--- a/Lib/plat-linux2/IN.py
+++ b/Lib/plat-linux/IN.py
diff --git a/Lib/plat-linux2/TYPES.py b/Lib/plat-linux/TYPES.py
index e7a324b..e7a324b 100644
--- a/Lib/plat-linux2/TYPES.py
+++ b/Lib/plat-linux/TYPES.py
diff --git a/Lib/plat-linux2/regen b/Lib/plat-linux/regen
index c76950e..c76950e 100755
--- a/Lib/plat-linux2/regen
+++ b/Lib/plat-linux/regen
diff --git a/Lib/platform.py b/Lib/platform.py
index 985a9f8..4554659 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -130,15 +130,15 @@ except AttributeError:
### Platform specific APIs
-_libc_search = re.compile(r'(__libc_init)'
- '|'
- '(GLIBC_([0-9.]+))'
- '|'
- '(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII)
+_libc_search = re.compile(b'(__libc_init)'
+ b'|'
+ b'(GLIBC_([0-9.]+))'
+ b'|'
+ br'(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII)
def libc_ver(executable=sys.executable,lib='',version='',
- chunksize=2048):
+ chunksize=16384):
""" Tries to determine the libc version that the file executable
(which defaults to the Python interpreter) is linked against.
@@ -159,17 +159,22 @@ def libc_ver(executable=sys.executable,lib='',version='',
# able to open symlinks for reading
executable = os.path.realpath(executable)
f = open(executable,'rb')
- binary = f.read(chunksize).decode('latin-1')
+ binary = f.read(chunksize)
pos = 0
while 1:
- m = _libc_search.search(binary,pos)
+ if b'libc' in binary or b'GLIBC' in binary:
+ m = _libc_search.search(binary,pos)
+ else:
+ m = None
if not m:
- binary = f.read(chunksize).decode('latin-1')
+ binary = f.read(chunksize)
if not binary:
break
pos = 0
continue
- libcinit,glibc,glibcversion,so,threads,soversion = m.groups()
+ libcinit,glibc,glibcversion,so,threads,soversion = [
+ s.decode('latin1') if s is not None else s
+ for s in m.groups()]
if libcinit and not lib:
lib = 'libc'
elif glibc:
@@ -357,92 +362,13 @@ def dist(distname='',version='',id='',
supported_dists=supported_dists,
full_distribution_name=0)
-class _popen:
-
- """ Fairly portable (alternative) popen implementation.
-
- This is mostly needed in case os.popen() is not available, or
- doesn't work as advertised, e.g. in Win9X GUI programs like
- PythonWin or IDLE.
-
- Writing to the pipe is currently not supported.
-
- """
- tmpfile = ''
- pipe = None
- bufsize = None
- mode = 'r'
-
- def __init__(self,cmd,mode='r',bufsize=None):
-
- if mode != 'r':
- raise ValueError('popen()-emulation only supports read mode')
- import tempfile
- self.tmpfile = tmpfile = tempfile.mktemp()
- os.system(cmd + ' > %s' % tmpfile)
- self.pipe = open(tmpfile,'rb')
- self.bufsize = bufsize
- self.mode = mode
-
- def read(self):
-
- return self.pipe.read()
-
- def readlines(self):
-
- if self.bufsize is not None:
- return self.pipe.readlines()
-
- def close(self,
-
- remove=os.unlink,error=os.error):
-
- if self.pipe:
- rc = self.pipe.close()
- else:
- rc = 255
- if self.tmpfile:
- try:
- remove(self.tmpfile)
- except error:
- pass
- return rc
-
- # Alias
- __del__ = close
-
def popen(cmd, mode='r', bufsize=-1):
""" Portable popen() interface.
"""
- # Find a working popen implementation preferring win32pipe.popen
- # over os.popen over _popen
- popen = None
- if os.environ.get('OS','') == 'Windows_NT':
- # On NT win32pipe should work; on Win9x it hangs due to bugs
- # in the MS C lib (see MS KnowledgeBase article Q150956)
- try:
- import win32pipe
- except ImportError:
- pass
- else:
- popen = win32pipe.popen
- if popen is None:
- if hasattr(os,'popen'):
- popen = os.popen
- # Check whether it works... it doesn't in GUI programs
- # on Windows platforms
- if sys.platform == 'win32': # XXX Others too ?
- try:
- popen('')
- except os.error:
- popen = _popen
- else:
- popen = _popen
- if bufsize is None:
- return popen(cmd,mode)
- else:
- return popen(cmd,mode,bufsize)
+ import warnings
+ warnings.warn('use os.popen instead', DeprecationWarning, stacklevel=2)
+ return os.popen(cmd, mode, bufsize)
def _norm_version(version, build=''):
diff --git a/Lib/plistlib.py b/Lib/plistlib.py
index 2e7e512..21076db 100644
--- a/Lib/plistlib.py
+++ b/Lib/plistlib.py
@@ -266,13 +266,13 @@ class _InternalDict(dict):
raise AttributeError(attr)
from warnings import warn
warn("Attribute access from plist dicts is deprecated, use d[key] "
- "notation instead", PendingDeprecationWarning, 2)
+ "notation instead", DeprecationWarning, 2)
return value
def __setattr__(self, attr, value):
from warnings import warn
warn("Attribute access from plist dicts is deprecated, use d[key] "
- "notation instead", PendingDeprecationWarning, 2)
+ "notation instead", DeprecationWarning, 2)
self[attr] = value
def __delattr__(self, attr):
@@ -282,14 +282,14 @@ class _InternalDict(dict):
raise AttributeError(attr)
from warnings import warn
warn("Attribute access from plist dicts is deprecated, use d[key] "
- "notation instead", PendingDeprecationWarning, 2)
+ "notation instead", DeprecationWarning, 2)
class Dict(_InternalDict):
def __init__(self, **kwargs):
from warnings import warn
warn("The plistlib.Dict class is deprecated, use builtin dict instead",
- PendingDeprecationWarning, 2)
+ DeprecationWarning, 2)
super().__init__(**kwargs)
@@ -302,7 +302,7 @@ class Plist(_InternalDict):
def __init__(self, **kwargs):
from warnings import warn
warn("The Plist class is deprecated, use the readPlist() and "
- "writePlist() functions instead", PendingDeprecationWarning, 2)
+ "writePlist() functions instead", DeprecationWarning, 2)
super().__init__(**kwargs)
def fromFile(cls, pathOrFile):
diff --git a/Lib/poplib.py b/Lib/poplib.py
index 84ea88d..d42d9dd 100644
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -250,15 +250,18 @@ class POP3:
def quit(self):
"""Signoff: commit changes on server, unlock mailbox, close connection."""
- try:
- resp = self._shortcmd('QUIT')
- except error_proto as val:
- resp = val
- self.file.close()
- self.sock.close()
- del self.file, self.sock
+ resp = self._shortcmd('QUIT')
+ self.close()
return resp
+ def close(self):
+ """Close the connection without assuming anything about it."""
+ if self.file is not None:
+ self.file.close()
+ if self.sock is not None:
+ self.sock.close()
+ self.file = self.sock = None
+
#__del__ = quit
diff --git a/Lib/pstats.py b/Lib/pstats.py
index 3f0add2..13d944c 100644
--- a/Lib/pstats.py
+++ b/Lib/pstats.py
@@ -674,13 +674,14 @@ if __name__ == '__main__':
return stop
return None
- import sys
if len(sys.argv) > 1:
initprofile = sys.argv[1]
else:
initprofile = None
try:
browser = ProfileBrowser(initprofile)
+ for profile in sys.argv[2:]:
+ browser.do_add(profile)
print("Welcome to the profile statistics browser.", file=browser.stream)
browser.cmdloop()
print("Goodbye.", file=browser.stream)
diff --git a/Lib/py_compile.py b/Lib/py_compile.py
index 5adb70a..62d69ad 100644
--- a/Lib/py_compile.py
+++ b/Lib/py_compile.py
@@ -110,9 +110,11 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1):
"""
with tokenize.open(file) as f:
try:
- timestamp = int(os.fstat(f.fileno()).st_mtime)
+ st = os.fstat(f.fileno())
except AttributeError:
- timestamp = int(os.stat(file).st_mtime)
+ st = os.stat(file)
+ timestamp = int(st.st_mtime)
+ size = st.st_size & 0xFFFFFFFF
codestring = f.read()
try:
codeobject = builtins.compile(codestring, dfile or file, 'exec',
@@ -139,6 +141,7 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1):
with open(cfile, 'wb') as fc:
fc.write(b'\0\0\0\0')
wr_long(fc, timestamp)
+ wr_long(fc, size)
marshal.dump(codeobject, fc)
fc.flush()
fc.seek(0, 0)
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index f45d461..591717b 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -22,11 +22,6 @@ Run "pydoc -b" to start an HTTP server on an arbitrary unused port and
open a Web browser to interactively browse documentation. The -p option
can be used with the -b option to explicitly specify the server port.
-For platforms without a command line, "pydoc -g" starts the HTTP server
-and also pops up a little window for controlling it. This option is
-deprecated, since the server can now be controlled directly from HTTP
-clients.
-
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
@@ -42,7 +37,6 @@ __all__ = ['help']
__author__ = "Ka-Ping Yee <ping@lfw.org>"
__date__ = "26 February 2001"
-__version__ = "$Revision$"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
@@ -169,11 +163,11 @@ def _split_list(s, predicate):
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant.
- _hidden_names = ('__builtins__', '__doc__', '__file__', '__path__',
+ if name in {'__builtins__', '__doc__', '__file__', '__path__',
'__module__', '__name__', '__slots__', '__package__',
'__cached__', '__author__', '__credits__', '__date__',
- '__version__')
- if name in _hidden_names: return 0
+ '__version__', '__qualname__'}:
+ return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
@@ -963,6 +957,9 @@ class HTMLDoc(Doc):
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
+ if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
+ # ignore a module if its name contains a surrogate character
+ continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
@@ -2073,272 +2070,6 @@ def apropos(key):
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key, onerror=onerror)
-# --------------------------------------------------- Web browser interface
-
-def serve(port, callback=None, completer=None):
- import http.server, email.message, select
-
- msg = 'the pydoc.serve() function is deprecated'
- warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
- class DocHandler(http.server.BaseHTTPRequestHandler):
- def send_document(self, title, contents):
- try:
- self.send_response(200)
- self.send_header('Content-Type', 'text/html; charset=UTF-8')
- self.end_headers()
- self.wfile.write(html.page(title, contents).encode('utf-8'))
- except IOError: pass
-
- def do_GET(self):
- path = self.path
- if path[-5:] == '.html': path = path[:-5]
- if path[:1] == '/': path = path[1:]
- if path and path != '.':
- try:
- obj = locate(path, forceload=1)
- except ErrorDuringImport as value:
- self.send_document(path, html.escape(str(value)))
- return
- if obj:
- self.send_document(describe(obj), html.document(obj, path))
- else:
- self.send_document(path,
-'no Python documentation found for %s' % repr(path))
- else:
- heading = html.heading(
-'<big><big><strong>Python: Index of Modules</strong></big></big>',
-'#ffffff', '#7799ee')
- def bltinlink(name):
- return '<a href="%s.html">%s</a>' % (name, name)
- names = [x for x in sys.builtin_module_names if x != '__main__']
- contents = html.multicolumn(names, bltinlink)
- indices = ['<p>' + html.bigsection(
- 'Built-in Modules', '#ffffff', '#ee77aa', contents)]
-
- seen = {}
- for dir in sys.path:
- indices.append(html.index(dir, seen))
- contents = heading + ' '.join(indices) + '''<p align=right>
-<font color="#909090" face="helvetica, arial"><strong>
-pydoc</strong> by Ka-Ping Yee &lt;ping@lfw.org&gt;</font>'''
- self.send_document('Index of Modules', contents)
-
- def log_message(self, *args): pass
-
- class DocServer(http.server.HTTPServer):
- def __init__(self, port, callback):
- host = 'localhost'
- self.address = (host, port)
- self.url = 'http://%s:%d/' % (host, port)
- self.callback = callback
- self.base.__init__(self, self.address, self.handler)
-
- def serve_until_quit(self):
- import select
- self.quit = False
- while not self.quit:
- rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
- if rd: self.handle_request()
- self.server_close()
-
- def server_activate(self):
- self.base.server_activate(self)
- if self.callback: self.callback(self)
-
- DocServer.base = http.server.HTTPServer
- DocServer.handler = DocHandler
- DocHandler.MessageClass = email.message.Message
- try:
- try:
- DocServer(port, callback).serve_until_quit()
- except (KeyboardInterrupt, select.error):
- pass
- finally:
- if completer: completer()
-
-# ----------------------------------------------------- graphical interface
-
-def gui():
- """Graphical interface (starts Web server and pops up a control window)."""
-
- msg = ('the pydoc.gui() function and "pydoc -g" option are deprecated\n',
- 'use "pydoc.browse() function and "pydoc -b" option instead.')
- warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
- class GUI:
- def __init__(self, window, port=7464):
- self.window = window
- self.server = None
- self.scanner = None
-
- import tkinter
- self.server_frm = tkinter.Frame(window)
- self.title_lbl = tkinter.Label(self.server_frm,
- text='Starting server...\n ')
- self.open_btn = tkinter.Button(self.server_frm,
- text='open browser', command=self.open, state='disabled')
- self.quit_btn = tkinter.Button(self.server_frm,
- text='quit serving', command=self.quit, state='disabled')
-
- self.search_frm = tkinter.Frame(window)
- self.search_lbl = tkinter.Label(self.search_frm, text='Search for')
- self.search_ent = tkinter.Entry(self.search_frm)
- self.search_ent.bind('<Return>', self.search)
- self.stop_btn = tkinter.Button(self.search_frm,
- text='stop', pady=0, command=self.stop, state='disabled')
- if sys.platform == 'win32':
- # Trying to hide and show this button crashes under Windows.
- self.stop_btn.pack(side='right')
-
- self.window.title('pydoc')
- self.window.protocol('WM_DELETE_WINDOW', self.quit)
- self.title_lbl.pack(side='top', fill='x')
- self.open_btn.pack(side='left', fill='x', expand=1)
- self.quit_btn.pack(side='right', fill='x', expand=1)
- self.server_frm.pack(side='top', fill='x')
-
- self.search_lbl.pack(side='left')
- self.search_ent.pack(side='right', fill='x', expand=1)
- self.search_frm.pack(side='top', fill='x')
- self.search_ent.focus_set()
-
- font = ('helvetica', sys.platform == 'win32' and 8 or 10)
- self.result_lst = tkinter.Listbox(window, font=font, height=6)
- self.result_lst.bind('<Button-1>', self.select)
- self.result_lst.bind('<Double-Button-1>', self.goto)
- self.result_scr = tkinter.Scrollbar(window,
- orient='vertical', command=self.result_lst.yview)
- self.result_lst.config(yscrollcommand=self.result_scr.set)
-
- self.result_frm = tkinter.Frame(window)
- self.goto_btn = tkinter.Button(self.result_frm,
- text='go to selected', command=self.goto)
- self.hide_btn = tkinter.Button(self.result_frm,
- text='hide results', command=self.hide)
- self.goto_btn.pack(side='left', fill='x', expand=1)
- self.hide_btn.pack(side='right', fill='x', expand=1)
-
- self.window.update()
- self.minwidth = self.window.winfo_width()
- self.minheight = self.window.winfo_height()
- self.bigminheight = (self.server_frm.winfo_reqheight() +
- self.search_frm.winfo_reqheight() +
- self.result_lst.winfo_reqheight() +
- self.result_frm.winfo_reqheight())
- self.bigwidth, self.bigheight = self.minwidth, self.bigminheight
- self.expanded = 0
- self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
- self.window.wm_minsize(self.minwidth, self.minheight)
- self.window.tk.willdispatch()
-
- import threading
- threading.Thread(
- target=serve, args=(port, self.ready, self.quit)).start()
-
- def ready(self, server):
- self.server = server
- self.title_lbl.config(
- text='Python documentation server at\n' + server.url)
- self.open_btn.config(state='normal')
- self.quit_btn.config(state='normal')
-
- def open(self, event=None, url=None):
- url = url or self.server.url
- import webbrowser
- webbrowser.open(url)
-
- def quit(self, event=None):
- if self.server:
- self.server.quit = 1
- self.window.quit()
-
- def search(self, event=None):
- key = self.search_ent.get()
- self.stop_btn.pack(side='right')
- self.stop_btn.config(state='normal')
- self.search_lbl.config(text='Searching for "%s"...' % key)
- self.search_ent.forget()
- self.search_lbl.pack(side='left')
- self.result_lst.delete(0, 'end')
- self.goto_btn.config(state='disabled')
- self.expand()
-
- import threading
- if self.scanner:
- self.scanner.quit = 1
- self.scanner = ModuleScanner()
- threading.Thread(target=self.scanner.run,
- args=(self.update, key, self.done)).start()
-
- def update(self, path, modname, desc):
- if modname[-9:] == '.__init__':
- modname = modname[:-9] + ' (package)'
- self.result_lst.insert('end',
- modname + ' - ' + (desc or '(no description)'))
-
- def stop(self, event=None):
- if self.scanner:
- self.scanner.quit = 1
- self.scanner = None
-
- def done(self):
- self.scanner = None
- self.search_lbl.config(text='Search for')
- self.search_lbl.pack(side='left')
- self.search_ent.pack(side='right', fill='x', expand=1)
- if sys.platform != 'win32': self.stop_btn.forget()
- self.stop_btn.config(state='disabled')
-
- def select(self, event=None):
- self.goto_btn.config(state='normal')
-
- def goto(self, event=None):
- selection = self.result_lst.curselection()
- if selection:
- modname = self.result_lst.get(selection[0]).split()[0]
- self.open(url=self.server.url + modname + '.html')
-
- def collapse(self):
- if not self.expanded: return
- self.result_frm.forget()
- self.result_scr.forget()
- self.result_lst.forget()
- self.bigwidth = self.window.winfo_width()
- self.bigheight = self.window.winfo_height()
- self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
- self.window.wm_minsize(self.minwidth, self.minheight)
- self.expanded = 0
-
- def expand(self):
- if self.expanded: return
- self.result_frm.pack(side='bottom', fill='x')
- self.result_scr.pack(side='right', fill='y')
- self.result_lst.pack(side='top', fill='both', expand=1)
- self.window.wm_geometry('%dx%d' % (self.bigwidth, self.bigheight))
- self.window.wm_minsize(self.minwidth, self.bigminheight)
- self.expanded = 1
-
- def hide(self, event=None):
- self.stop()
- self.collapse()
-
- import tkinter
- try:
- root = tkinter.Tk()
- # Tk will crash if pythonw.exe has an XP .manifest
- # file and the root has is not destroyed explicitly.
- # If the problem is ever fixed in Tk, the explicit
- # destroy can go.
- try:
- gui = GUI(root)
- root.mainloop()
- finally:
- root.destroy()
- except KeyboardInterrupt:
- pass
-
-
# --------------------------------------- enhanced Web browser interface
def _start_server(urlhandler, port):
@@ -2795,15 +2526,12 @@ def cli():
sys.path.insert(0, '.')
try:
- opts, args = getopt.getopt(sys.argv[1:], 'bgk:p:w')
+ opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
writing = False
start_server = False
open_browser = False
port = None
for opt, val in opts:
- if opt == '-g':
- gui()
- return
if opt == '-b':
start_server = True
open_browser = True
@@ -2864,9 +2592,6 @@ def cli():
to interactively browse documentation. The -p option can be used with
the -b option to explicitly specify the server port.
-{cmd} -g
- Deprecated.
-
{cmd} -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '{sep}', it is treated as a filename; if
diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py
index 08a9e7c..92e045e 100644
--- a/Lib/pydoc_data/topics.py
+++ b/Lib/pydoc_data/topics.py
@@ -1,16 +1,17 @@
-# Autogenerated by Sphinx on Thu Feb 23 18:37:54 2012
+# -*- coding: utf-8 -*-
+# Autogenerated by Sphinx on Sun Mar 4 16:11:27 2012
topics = {'assert': '\nThe ``assert`` statement\n************************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, ``assert expression``, is equivalent to\n\n if __debug__:\n if not expression: raise AssertionError\n\nThe extended form, ``assert expression1, expression2``, is equivalent\nto\n\n if __debug__:\n if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that ``__debug__`` and ``AssertionError``\nrefer to the built-in variables with those names. In the current\nimplementation, the built-in variable ``__debug__`` is ``True`` under\nnormal circumstances, ``False`` when optimization is requested\n(command line option -O). The current code generator emits no code\nfor an assert statement when optimization is requested at compile\ntime. Note that it is unnecessary to include the source code for the\nexpression that failed in the error message; it will be displayed as\npart of the stack trace.\n\nAssignments to ``__debug__`` are illegal. The value for the built-in\nvariable is determined when the interpreter starts.\n',
'assignment': '\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n target_list ::= target ("," target)* [","]\n target ::= identifier\n | "(" target_list ")"\n | "[" target_list "]"\n | attributeref\n | subscription\n | slicing\n | "*" target\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable. The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list, optionally enclosed in\nparentheses or square brackets, is recursively defined as follows.\n\n* If the target list is a single target: The object is assigned to\n that target.\n\n* If the target list is a comma-separated list of targets: The object\n must be an iterable with the same number of items as there are\n targets in the target list, and the items are assigned, from left to\n right, to the corresponding targets.\n\n * If the target list contains one target prefixed with an asterisk,\n called a "starred" target: The object must be a sequence with at\n least as many items as there are targets in the target list, minus\n one. The first items of the sequence are assigned, from left to\n right, to the targets before the starred target. The final items\n of the sequence are assigned to the targets after the starred\n target. A list of the remaining items in the sequence is then\n assigned to the starred target (the list can be empty).\n\n * Else: The object must be a sequence with the same number of items\n as there are targets in the target list, and the items are\n assigned, from left to right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n * If the name does not occur in a ``global`` or ``nonlocal``\n statement in the current code block: the name is bound to the\n object in the current local namespace.\n\n * Otherwise: the name is bound to the object in the global namespace\n or the outer namespace determined by ``nonlocal``, respectively.\n\n The name is rebound if it was already bound. This may cause the\n reference count for the object previously bound to the name to reach\n zero, causing the object to be deallocated and its destructor (if it\n has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in square\n brackets: The object must be an iterable with the same number of\n items as there are targets in the target list, and its items are\n assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n the reference is evaluated. It should yield an object with\n assignable attributes; if this is not the case, ``TypeError`` is\n raised. That object is then asked to assign the assigned object to\n the given attribute; if it cannot perform the assignment, it raises\n an exception (usually but not necessarily ``AttributeError``).\n\n Note: If the object is a class instance and the attribute reference\n occurs on both sides of the assignment operator, the RHS expression,\n ``a.x`` can access either an instance attribute or (if no instance\n attribute exists) a class attribute. The LHS target ``a.x`` is\n always set as an instance attribute, creating it if necessary.\n Thus, the two occurrences of ``a.x`` do not necessarily refer to the\n same attribute: if the RHS expression refers to a class attribute,\n the LHS creates a new instance attribute as the target of the\n assignment:\n\n class Cls:\n x = 3 # class variable\n inst = Cls()\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\n\n This description does not necessarily apply to descriptor\n attributes, such as properties created with ``property()``.\n\n* If the target is a subscription: The primary expression in the\n reference is evaluated. It should yield either a mutable sequence\n object (such as a list) or a mapping object (such as a dictionary).\n Next, the subscript expression is evaluated.\n\n If the primary is a mutable sequence object (such as a list), the\n subscript must yield an integer. If it is negative, the sequence\'s\n length is added to it. The resulting value must be a nonnegative\n integer less than the sequence\'s length, and the sequence is asked\n to assign the assigned object to its item with that index. If the\n index is out of range, ``IndexError`` is raised (assignment to a\n subscripted sequence cannot add new items to a list).\n\n If the primary is a mapping object (such as a dictionary), the\n subscript must have a type compatible with the mapping\'s key type,\n and the mapping is then asked to create a key/datum pair which maps\n the subscript to the assigned object. This can either replace an\n existing key/value pair with the same key value, or insert a new\n key/value pair (if no key with the same value existed).\n\n For user-defined objects, the ``__setitem__()`` method is called\n with appropriate arguments.\n\n* If the target is a slicing: The primary expression in the reference\n is evaluated. It should yield a mutable sequence object (such as a\n list). The assigned object should be a sequence object of the same\n type. Next, the lower and upper bound expressions are evaluated,\n insofar they are present; defaults are zero and the sequence\'s\n length. The bounds should evaluate to integers. If either bound is\n negative, the sequence\'s length is added to it. The resulting\n bounds are clipped to lie between zero and the sequence\'s length,\n inclusive. Finally, the sequence object is asked to replace the\n slice with the items of the assigned sequence. The length of the\n slice may be different from the length of the assigned sequence,\n thus changing the length of the target sequence, if the object\n allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nWARNING: Although the definition of assignment implies that overlaps\nbetween the left-hand side and the right-hand side are \'safe\' (for\nexample ``a, b = b, a`` swaps two variables), overlaps *within* the\ncollection of assigned-to variables are not safe! For instance, the\nfollowing program prints ``[0, 2]``:\n\n x = [0, 1]\n i = 0\n i, x[i] = 1, 2\n print(x)\n\nSee also:\n\n **PEP 3132** - Extended Iterable Unpacking\n The specification for the ``*target`` feature.\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like ``x += 1`` can be rewritten as\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\nthe augmented version, ``x`` is only evaluated once. Also, when\npossible, the actual operation is performed *in-place*, meaning that\nrather than creating a new object and assigning that to the target,\nthe old object is modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n',
'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a ``NameError`` exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name in front of the name, with leading underscores removed, and\na single underscore inserted in front of the class name. For example,\nthe identifier ``__spam`` occurring in a class named ``Ham`` will be\ntransformed to ``_Ham__spam``. This transformation is independent of\nthe syntactical context in which the identifier is used. If the\ntransformed name is extremely long (longer than 255 characters),\nimplementation defined truncation may happen. If the class name\nconsists only of underscores, no transformation is done.\n',
'atom-literals': "\nLiterals\n********\n\nPython supports string and bytes literals and various numeric\nliterals:\n\n literal ::= stringliteral | bytesliteral\n | integer | floatnumber | imagnumber\n\nEvaluation of a literal yields an object of the given type (string,\nbytes, integer, floating point number, complex number) with the given\nvalue. The value may be approximated in the case of floating point\nand imaginary (complex) literals. See section *Literals* for details.\n\nAll literals correspond to immutable data types, and hence the\nobject's identity is less important than its value. Multiple\nevaluations of literals with the same value (either the same\noccurrence in the program text or a different occurrence) may obtain\nthe same object or a different object with the same value.\n",
- 'attribute-access': '\nCustomizing attribute access\n****************************\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when ``dir()`` is called on the object. A list must be\n returned.\n\n\nImplementing Descriptors\n========================\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n====================\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to an object instance, ``a.x`` is transformed into the\n call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a class, ``A.x`` is transformed into the call:\n ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n=========\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n--------------------------\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``int``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n',
+ 'attribute-access': '\nCustomizing attribute access\n****************************\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when ``dir()`` is called on the object. A sequence must be\n returned. ``dir()`` converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n========================\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n====================\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to an object instance, ``a.x`` is transformed into the\n call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a class, ``A.x`` is transformed into the call:\n ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n=========\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n--------------------------\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``int``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n',
'attribute-references': '\nAttribute references\n********************\n\nAn attribute reference is a primary followed by a period and a name:\n\n attributeref ::= primary "." identifier\n\nThe primary must evaluate to an object of a type that supports\nattribute references, which most objects do. This object is then\nasked to produce the attribute whose name is the identifier (which can\nbe customized by overriding the ``__getattr__()`` method). If this\nattribute is not available, the exception ``AttributeError`` is\nraised. Otherwise, the type and value of the object produced is\ndetermined by the object. Multiple evaluations of the same attribute\nreference may yield different objects.\n',
'augassign': '\nAugmented assignment statements\n*******************************\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like ``x += 1`` can be rewritten as\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\nthe augmented version, ``x`` is only evaluated once. Also, when\npossible, the actual operation is performed *in-place*, meaning that\nrather than creating a new object and assigning that to the target,\nthe old object is modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n',
'binary': '\nBinary arithmetic operations\n****************************\n\nThe binary arithmetic operations have the conventional priority\nlevels. Note that some of these operations also apply to certain non-\nnumeric types. Apart from the power operator, there are only two\nlevels, one for multiplicative operators and one for additive\noperators:\n\n m_expr ::= u_expr | m_expr "*" u_expr | m_expr "//" u_expr | m_expr "/" u_expr\n | m_expr "%" u_expr\n a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr\n\nThe ``*`` (multiplication) operator yields the product of its\narguments. The arguments must either both be numbers, or one argument\nmust be an integer and the other must be a sequence. In the former\ncase, the numbers are converted to a common type and then multiplied\ntogether. In the latter case, sequence repetition is performed; a\nnegative repetition factor yields an empty sequence.\n\nThe ``/`` (division) and ``//`` (floor division) operators yield the\nquotient of their arguments. The numeric arguments are first\nconverted to a common type. Integer division yields a float, while\nfloor division of integers results in an integer; the result is that\nof mathematical division with the \'floor\' function applied to the\nresult. Division by zero raises the ``ZeroDivisionError`` exception.\n\nThe ``%`` (modulo) operator yields the remainder from the division of\nthe first argument by the second. The numeric arguments are first\nconverted to a common type. A zero right argument raises the\n``ZeroDivisionError`` exception. The arguments may be floating point\nnumbers, e.g., ``3.14%0.7`` equals ``0.34`` (since ``3.14`` equals\n``4*0.7 + 0.34``.) The modulo operator always yields a result with\nthe same sign as its second operand (or zero); the absolute value of\nthe result is strictly smaller than the absolute value of the second\noperand [1].\n\nThe floor division and modulo operators are connected by the following\nidentity: ``x == (x//y)*y + (x%y)``. Floor division and modulo are\nalso connected with the built-in function ``divmod()``: ``divmod(x, y)\n== (x//y, x%y)``. [2].\n\nIn addition to performing the modulo operation on numbers, the ``%``\noperator is also overloaded by string objects to perform old-style\nstring formatting (also known as interpolation). The syntax for\nstring formatting is described in the Python Library Reference,\nsection *Old String Formatting Operations*.\n\nThe floor division operator, the modulo operator, and the ``divmod()``\nfunction are not defined for complex numbers. Instead, convert to a\nfloating point number using the ``abs()`` function if appropriate.\n\nThe ``+`` (addition) operator yields the sum of its arguments. The\narguments must either both be numbers or both sequences of the same\ntype. In the former case, the numbers are converted to a common type\nand then added together. In the latter case, the sequences are\nconcatenated.\n\nThe ``-`` (subtraction) operator yields the difference of its\narguments. The numeric arguments are first converted to a common\ntype.\n',
'bitwise': '\nBinary bitwise operations\n*************************\n\nEach of the three bitwise operations has a different priority level:\n\n and_expr ::= shift_expr | and_expr "&" shift_expr\n xor_expr ::= and_expr | xor_expr "^" and_expr\n or_expr ::= xor_expr | or_expr "|" xor_expr\n\nThe ``&`` operator yields the bitwise AND of its arguments, which must\nbe integers.\n\nThe ``^`` operator yields the bitwise XOR (exclusive OR) of its\narguments, which must be integers.\n\nThe ``|`` operator yields the bitwise (inclusive) OR of its arguments,\nwhich must be integers.\n',
'bltin-code-objects': '\nCode Objects\n************\n\nCode objects are used by the implementation to represent "pseudo-\ncompiled" executable Python code such as a function body. They differ\nfrom function objects because they don\'t contain a reference to their\nglobal execution environment. Code objects are returned by the built-\nin ``compile()`` function and can be extracted from function objects\nthrough their ``__code__`` attribute. See also the ``code`` module.\n\nA code object can be executed or evaluated by passing it (instead of a\nsource string) to the ``exec()`` or ``eval()`` built-in functions.\n\nSee *The standard type hierarchy* for more information.\n',
- 'bltin-ellipsis-object': '\nThe Ellipsis Object\n*******************\n\nThis object is commonly used by slicing (see *Slicings*). It supports\nno special operations. There is exactly one ellipsis object, named\n``Ellipsis`` (a built-in name).\n\nIt is written as ``Ellipsis`` or ``...``.\n',
- 'bltin-null-object': "\nThe Null Object\n***************\n\nThis object is returned by functions that don't explicitly return a\nvalue. It supports no special operations. There is exactly one null\nobject, named ``None`` (a built-in name).\n\nIt is written as ``None``.\n",
+ 'bltin-ellipsis-object': '\nThe Ellipsis Object\n*******************\n\nThis object is commonly used by slicing (see *Slicings*), but may also\nbe used in other situations where a sentinel value other than ``None``\nis needed. It supports no special operations. There is exactly one\nellipsis object, named ``Ellipsis`` (a built-in name).\n``type(Ellipsis)()`` produces the ``Ellipsis`` singleton.\n\nIt is written as ``Ellipsis`` or ``...``.\n',
+ 'bltin-null-object': "\nThe Null Object\n***************\n\nThis object is returned by functions that don't explicitly return a\nvalue. It supports no special operations. There is exactly one null\nobject, named ``None`` (a built-in name). ``type(None)()`` produces\nthe same singleton.\n\nIt is written as ``None``.\n",
'bltin-type-objects': "\nType Objects\n************\n\nType objects represent the various object types. An object's type is\naccessed by the built-in function ``type()``. There are no special\noperations on types. The standard module ``types`` defines names for\nall standard built-in types.\n\nTypes are written like this: ``<class 'int'>``.\n",
'booleans': '\nBoolean operations\n******************\n\n or_test ::= and_test | or_test "or" and_test\n and_test ::= not_test | and_test "and" not_test\n not_test ::= comparison | "not" not_test\n\nIn the context of Boolean operations, and also when expressions are\nused by control flow statements, the following values are interpreted\nas false: ``False``, ``None``, numeric zero of all types, and empty\nstrings and containers (including strings, tuples, lists,\ndictionaries, sets and frozensets). All other values are interpreted\nas true. User-defined objects can customize their truth value by\nproviding a ``__bool__()`` method.\n\nThe operator ``not`` yields ``True`` if its argument is false,\n``False`` otherwise.\n\nThe expression ``x and y`` first evaluates *x*; if *x* is false, its\nvalue is returned; otherwise, *y* is evaluated and the resulting value\nis returned.\n\nThe expression ``x or y`` first evaluates *x*; if *x* is true, its\nvalue is returned; otherwise, *y* is evaluated and the resulting value\nis returned.\n\n(Note that neither ``and`` nor ``or`` restrict the value and type they\nreturn to ``False`` and ``True``, but rather return the last evaluated\nargument. This is sometimes useful, e.g., if ``s`` is a string that\nshould be replaced by a default value if it is empty, the expression\n``s or \'foo\'`` yields the desired value. Because ``not`` has to\ninvent a value anyway, it does not bother to return a value of the\nsame type as its argument, so e.g., ``not \'foo\'`` yields ``False``,\nnot ``\'\'``.)\n',
'break': '\nThe ``break`` statement\n***********************\n\n break_stmt ::= "break"\n\n``break`` may only occur syntactically nested in a ``for`` or\n``while`` loop, but not nested in a function or class definition\nwithin that loop.\n\nIt terminates the nearest enclosing loop, skipping the optional\n``else`` clause if the loop has one.\n\nIf a ``for`` loop is terminated by ``break``, the loop control target\nkeeps its current value.\n\nWhen ``break`` passes control out of a ``try`` statement with a\n``finally`` clause, that ``finally`` clause is executed before really\nleaving the loop.\n',
@@ -22,7 +23,7 @@ topics = {'assert': '\nThe ``assert`` statement\n************************\n\nAss
'context-managers': '\nWith Statement Context Managers\n*******************************\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a ``with`` statement. The context\nmanager handles the entry into, and the exit from, the desired runtime\ncontext for the execution of the block of code. Context managers are\nnormally invoked using the ``with`` statement (described in section\n*The with statement*), but can also be used by directly invoking their\nmethods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The ``with``\n statement will bind this method\'s return value to the target(s)\n specified in the ``as`` clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be ``None``.\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that ``__exit__()`` methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n',
'continue': '\nThe ``continue`` statement\n**************************\n\n continue_stmt ::= "continue"\n\n``continue`` may only occur syntactically nested in a ``for`` or\n``while`` loop, but not nested in a function or class definition or\n``finally`` clause within that loop. It continues with the next cycle\nof the nearest enclosing loop.\n\nWhen ``continue`` passes control out of a ``try`` statement with a\n``finally`` clause, that ``finally`` clause is executed before really\nstarting the next loop cycle.\n',
'conversions': '\nArithmetic conversions\n**********************\n\nWhen a description of an arithmetic operator below uses the phrase\n"the numeric arguments are converted to a common type," this means\nthat the operator implementation for built-in types works that way:\n\n* If either argument is a complex number, the other is converted to\n complex;\n\n* otherwise, if either argument is a floating point number, the other\n is converted to floating point;\n\n* otherwise, both must be integers and no conversion is necessary.\n\nSome additional rules apply for certain operators (e.g., a string left\nargument to the \'%\' operator). Extensions must define their own\nconversion behavior.\n',
- 'customization': '\nBasic customization\n*******************\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected when the option cycle detector is enabled (it\'s on by\n default), but can only be cleaned up if there are no Python-\n level ``__del__()`` methods involved. Refer to the documentation\n for the ``gc`` module for more information about how\n ``__del__()`` methods are handled by the cycle detector,\n particularly the description of the ``garbage`` value.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by the ``str()`` built-in function and by the ``print()``\n function to compute the "informal" string representation of an\n object. This differs from ``__repr__()`` in that it does not have\n to be a valid Python expression: a more convenient or concise\n representation may be used instead. The return value must be a\n string object.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``x<y`` calls ``x.__lt__(y)``, ``x<=y`` calls\n ``x.__le__(y)``, ``x==y`` calls ``x.__eq__(y)``, ``x!=y`` calls\n ``x.__ne__(y)``, ``x>y`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns ``id(x)``.\n\n Classes which inherit a ``__hash__()`` method from a parent class\n but change the meaning of ``__eq__()`` such that the hash value\n returned is no longer appropriate (e.g. by switching to a value-\n based concept of equality instead of the default identity based\n equality) can explicitly flag themselves as being unhashable by\n setting ``__hash__ = None`` in the class definition. Doing so means\n that not only will instances of the class raise an appropriate\n ``TypeError`` when a program attempts to retrieve their hash value,\n but they will also be correctly identified as unhashable when\n checking ``isinstance(obj, collections.Hashable)`` (unlike classes\n which define their own ``__hash__()`` to explicitly raise\n ``TypeError``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n <ParentClass>.__hash__``. Otherwise the inheritance of\n ``__hash__()`` will be blocked, just as if ``__hash__`` had been\n explicitly set to ``None``.\n\n See also the *-R* command-line option.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n',
+ 'customization': '\nBasic customization\n*******************\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected when the option cycle detector is enabled (it\'s on by\n default), but can only be cleaned up if there are no Python-\n level ``__del__()`` methods involved. Refer to the documentation\n for the ``gc`` module for more information about how\n ``__del__()`` methods are handled by the cycle detector,\n particularly the description of the ``garbage`` value.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by the ``str()`` built-in function and by the ``print()``\n function to compute the "informal" string representation of an\n object. This differs from ``__repr__()`` in that it does not have\n to be a valid Python expression: a more convenient or concise\n representation may be used instead. The return value must be a\n string object.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``x<y`` calls ``x.__lt__(y)``, ``x<=y`` calls\n ``x.__le__(y)``, ``x==y`` calls ``x.__eq__(y)``, ``x!=y`` calls\n ``x.__ne__(y)``, ``x>y`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns ``id(x)``.\n\n Classes which inherit a ``__hash__()`` method from a parent class\n but change the meaning of ``__eq__()`` such that the hash value\n returned is no longer appropriate (e.g. by switching to a value-\n based concept of equality instead of the default identity based\n equality) can explicitly flag themselves as being unhashable by\n setting ``__hash__ = None`` in the class definition. Doing so means\n that not only will instances of the class raise an appropriate\n ``TypeError`` when a program attempts to retrieve their hash value,\n but they will also be correctly identified as unhashable when\n checking ``isinstance(obj, collections.Hashable)`` (unlike classes\n which define their own ``__hash__()`` to explicitly raise\n ``TypeError``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n <ParentClass>.__hash__``. Otherwise the inheritance of\n ``__hash__()`` will be blocked, just as if ``__hash__`` had been\n explicitly set to ``None``.\n\n Note: Note by default the ``__hash__()`` values of str, bytes and\n datetime objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the order in which keys are\n retrieved from a dict. Note Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also ``PYTHONHASHSEED``.\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n',
'debugger': '\n``pdb`` --- The Python Debugger\n*******************************\n\nThe module ``pdb`` defines an interactive source code debugger for\nPython programs. It supports setting (conditional) breakpoints and\nsingle stepping at the source line level, inspection of stack frames,\nsource code listing, and evaluation of arbitrary Python code in the\ncontext of any stack frame. It also supports post-mortem debugging\nand can be called under program control.\n\nThe debugger is extensible -- it is actually defined as the class\n``Pdb``. This is currently undocumented but easily understood by\nreading the source. The extension interface uses the modules ``bdb``\nand ``cmd``.\n\nThe debugger\'s prompt is ``(Pdb)``. Typical usage to run a program\nunder control of the debugger is:\n\n >>> import pdb\n >>> import mymodule\n >>> pdb.run(\'mymodule.test()\')\n > <string>(0)?()\n (Pdb) continue\n > <string>(1)?()\n (Pdb) continue\n NameError: \'spam\'\n > <string>(1)?()\n (Pdb)\n\n``pdb.py`` can also be invoked as a script to debug other scripts.\nFor example:\n\n python3 -m pdb myscript.py\n\nWhen invoked as a script, pdb will automatically enter post-mortem\ndebugging if the program being debugged exits abnormally. After post-\nmortem debugging (or after normal exit of the program), pdb will\nrestart the program. Automatic restarting preserves pdb\'s state (such\nas breakpoints) and in most cases is more useful than quitting the\ndebugger upon program\'s exit.\n\nNew in version 3.2: ``pdb.py`` now accepts a ``-c`` option that\nexecutes commands as if given in a ``.pdbrc`` file, see *Debugger\nCommands*.\n\nThe typical usage to break into the debugger from a running program is\nto insert\n\n import pdb; pdb.set_trace()\n\nat the location you want to break into the debugger. You can then\nstep through the code following this statement, and continue running\nwithout the debugger using the ``continue`` command.\n\nThe typical usage to inspect a crashed program is:\n\n >>> import pdb\n >>> import mymodule\n >>> mymodule.test()\n Traceback (most recent call last):\n File "<stdin>", line 1, in ?\n File "./mymodule.py", line 4, in test\n test2()\n File "./mymodule.py", line 3, in test2\n print(spam)\n NameError: spam\n >>> pdb.pm()\n > ./mymodule.py(3)test2()\n -> print(spam)\n (Pdb)\n\nThe module defines the following functions; each enters the debugger\nin a slightly different way:\n\npdb.run(statement, globals=None, locals=None)\n\n Execute the *statement* (given as a string or a code object) under\n debugger control. The debugger prompt appears before any code is\n executed; you can set breakpoints and type ``continue``, or you can\n step through the statement using ``step`` or ``next`` (all these\n commands are explained below). The optional *globals* and *locals*\n arguments specify the environment in which the code is executed; by\n default the dictionary of the module ``__main__`` is used. (See\n the explanation of the built-in ``exec()`` or ``eval()``\n functions.)\n\npdb.runeval(expression, globals=None, locals=None)\n\n Evaluate the *expression* (given as a string or a code object)\n under debugger control. When ``runeval()`` returns, it returns the\n value of the expression. Otherwise this function is similar to\n ``run()``.\n\npdb.runcall(function, *args, **kwds)\n\n Call the *function* (a function or method object, not a string)\n with the given arguments. When ``runcall()`` returns, it returns\n whatever the function call returned. The debugger prompt appears\n as soon as the function is entered.\n\npdb.set_trace()\n\n Enter the debugger at the calling stack frame. This is useful to\n hard-code a breakpoint at a given point in a program, even if the\n code is not otherwise being debugged (e.g. when an assertion\n fails).\n\npdb.post_mortem(traceback=None)\n\n Enter post-mortem debugging of the given *traceback* object. If no\n *traceback* is given, it uses the one of the exception that is\n currently being handled (an exception must be being handled if the\n default is to be used).\n\npdb.pm()\n\n Enter post-mortem debugging of the traceback found in\n ``sys.last_traceback``.\n\nThe ``run*`` functions and ``set_trace()`` are aliases for\ninstantiating the ``Pdb`` class and calling the method of the same\nname. If you want to access further features, you have to do this\nyourself:\n\nclass class pdb.Pdb(completekey=\'tab\', stdin=None, stdout=None, skip=None, nosigint=False)\n\n ``Pdb`` is the debugger class.\n\n The *completekey*, *stdin* and *stdout* arguments are passed to the\n underlying ``cmd.Cmd`` class; see the description there.\n\n The *skip* argument, if given, must be an iterable of glob-style\n module name patterns. The debugger will not step into frames that\n originate in a module that matches one of these patterns. [1]\n\n By default, Pdb sets a handler for the SIGINT signal (which is sent\n when the user presses Ctrl-C on the console) when you give a\n ``continue`` command. This allows you to break into the debugger\n again by pressing Ctrl-C. If you want Pdb not to touch the SIGINT\n handler, set *nosigint* tot true.\n\n Example call to enable tracing with *skip*:\n\n import pdb; pdb.Pdb(skip=[\'django.*\']).set_trace()\n\n New in version 3.1: The *skip* argument.\n\n New in version 3.2: The *nosigint* argument. Previously, a SIGINT\n handler was never set by Pdb.\n\n run(statement, globals=None, locals=None)\n runeval(expression, globals=None, locals=None)\n runcall(function, *args, **kwds)\n set_trace()\n\n See the documentation for the functions explained above.\n\n\nDebugger Commands\n=================\n\nThe commands recognized by the debugger are listed below. Most\ncommands can be abbreviated to one or two letters as indicated; e.g.\n``h(elp)`` means that either ``h`` or ``help`` can be used to enter\nthe help command (but not ``he`` or ``hel``, nor ``H`` or ``Help`` or\n``HELP``). Arguments to commands must be separated by whitespace\n(spaces or tabs). Optional arguments are enclosed in square brackets\n(``[]``) in the command syntax; the square brackets must not be typed.\nAlternatives in the command syntax are separated by a vertical bar\n(``|``).\n\nEntering a blank line repeats the last command entered. Exception: if\nthe last command was a ``list`` command, the next 11 lines are listed.\n\nCommands that the debugger doesn\'t recognize are assumed to be Python\nstatements and are executed in the context of the program being\ndebugged. Python statements can also be prefixed with an exclamation\npoint (``!``). This is a powerful way to inspect the program being\ndebugged; it is even possible to change a variable or call a function.\nWhen an exception occurs in such a statement, the exception name is\nprinted but the debugger\'s state is not changed.\n\nThe debugger supports *aliases*. Aliases can have parameters which\nallows one a certain level of adaptability to the context under\nexamination.\n\nMultiple commands may be entered on a single line, separated by\n``;;``. (A single ``;`` is not used as it is the separator for\nmultiple commands in a line that is passed to the Python parser.) No\nintelligence is applied to separating the commands; the input is split\nat the first ``;;`` pair, even if it is in the middle of a quoted\nstring.\n\nIf a file ``.pdbrc`` exists in the user\'s home directory or in the\ncurrent directory, it is read in and executed as if it had been typed\nat the debugger prompt. This is particularly useful for aliases. If\nboth files exist, the one in the home directory is read first and\naliases defined there can be overridden by the local file.\n\nChanged in version 3.2: ``.pdbrc`` can now contain commands that\ncontinue debugging, such as ``continue`` or ``next``. Previously,\nthese commands had no effect.\n\nh(elp) [command]\n\n Without argument, print the list of available commands. With a\n *command* as argument, print help about that command. ``help pdb``\n displays the full documentation (the docstring of the ``pdb``\n module). Since the *command* argument must be an identifier,\n ``help exec`` must be entered to get help on the ``!`` command.\n\nw(here)\n\n Print a stack trace, with the most recent frame at the bottom. An\n arrow indicates the current frame, which determines the context of\n most commands.\n\nd(own) [count]\n\n Move the current frame *count* (default one) levels down in the\n stack trace (to a newer frame).\n\nu(p) [count]\n\n Move the current frame *count* (default one) levels up in the stack\n trace (to an older frame).\n\nb(reak) [([filename:]lineno | function) [, condition]]\n\n With a *lineno* argument, set a break there in the current file.\n With a *function* argument, set a break at the first executable\n statement within that function. The line number may be prefixed\n with a filename and a colon, to specify a breakpoint in another\n file (probably one that hasn\'t been loaded yet). The file is\n searched on ``sys.path``. Note that each breakpoint is assigned a\n number to which all the other breakpoint commands refer.\n\n If a second argument is present, it is an expression which must\n evaluate to true before the breakpoint is honored.\n\n Without argument, list all breaks, including for each breakpoint,\n the number of times that breakpoint has been hit, the current\n ignore count, and the associated condition if any.\n\ntbreak [([filename:]lineno | function) [, condition]]\n\n Temporary breakpoint, which is removed automatically when it is\n first hit. The arguments are the same as for ``break``.\n\ncl(ear) [filename:lineno | bpnumber [bpnumber ...]]\n\n With a *filename:lineno* argument, clear all the breakpoints at\n this line. With a space separated list of breakpoint numbers, clear\n those breakpoints. Without argument, clear all breaks (but first\n ask confirmation).\n\ndisable [bpnumber [bpnumber ...]]\n\n Disable the breakpoints given as a space separated list of\n breakpoint numbers. Disabling a breakpoint means it cannot cause\n the program to stop execution, but unlike clearing a breakpoint, it\n remains in the list of breakpoints and can be (re-)enabled.\n\nenable [bpnumber [bpnumber ...]]\n\n Enable the breakpoints specified.\n\nignore bpnumber [count]\n\n Set the ignore count for the given breakpoint number. If count is\n omitted, the ignore count is set to 0. A breakpoint becomes active\n when the ignore count is zero. When non-zero, the count is\n decremented each time the breakpoint is reached and the breakpoint\n is not disabled and any associated condition evaluates to true.\n\ncondition bpnumber [condition]\n\n Set a new *condition* for the breakpoint, an expression which must\n evaluate to true before the breakpoint is honored. If *condition*\n is absent, any existing condition is removed; i.e., the breakpoint\n is made unconditional.\n\ncommands [bpnumber]\n\n Specify a list of commands for breakpoint number *bpnumber*. The\n commands themselves appear on the following lines. Type a line\n containing just ``end`` to terminate the commands. An example:\n\n (Pdb) commands 1\n (com) print some_variable\n (com) end\n (Pdb)\n\n To remove all commands from a breakpoint, type commands and follow\n it immediately with ``end``; that is, give no commands.\n\n With no *bpnumber* argument, commands refers to the last breakpoint\n set.\n\n You can use breakpoint commands to start your program up again.\n Simply use the continue command, or step, or any other command that\n resumes execution.\n\n Specifying any command resuming execution (currently continue,\n step, next, return, jump, quit and their abbreviations) terminates\n the command list (as if that command was immediately followed by\n end). This is because any time you resume execution (even with a\n simple next or step), you may encounter another breakpoint--which\n could have its own command list, leading to ambiguities about which\n list to execute.\n\n If you use the \'silent\' command in the command list, the usual\n message about stopping at a breakpoint is not printed. This may be\n desirable for breakpoints that are to print a specific message and\n then continue. If none of the other commands print anything, you\n see no sign that the breakpoint was reached.\n\ns(tep)\n\n Execute the current line, stop at the first possible occasion\n (either in a function that is called or on the next line in the\n current function).\n\nn(ext)\n\n Continue execution until the next line in the current function is\n reached or it returns. (The difference between ``next`` and\n ``step`` is that ``step`` stops inside a called function, while\n ``next`` executes called functions at (nearly) full speed, only\n stopping at the next line in the current function.)\n\nunt(il) [lineno]\n\n Without argument, continue execution until the line with a number\n greater than the current one is reached.\n\n With a line number, continue execution until a line with a number\n greater or equal to that is reached. In both cases, also stop when\n the current frame returns.\n\n Changed in version 3.2: Allow giving an explicit line number.\n\nr(eturn)\n\n Continue execution until the current function returns.\n\nc(ont(inue))\n\n Continue execution, only stop when a breakpoint is encountered.\n\nj(ump) lineno\n\n Set the next line that will be executed. Only available in the\n bottom-most frame. This lets you jump back and execute code again,\n or jump forward to skip code that you don\'t want to run.\n\n It should be noted that not all jumps are allowed -- for instance\n it is not possible to jump into the middle of a ``for`` loop or out\n of a ``finally`` clause.\n\nl(ist) [first[, last]]\n\n List source code for the current file. Without arguments, list 11\n lines around the current line or continue the previous listing.\n With ``.`` as argument, list 11 lines around the current line.\n With one argument, list 11 lines around at that line. With two\n arguments, list the given range; if the second argument is less\n than the first, it is interpreted as a count.\n\n The current line in the current frame is indicated by ``->``. If\n an exception is being debugged, the line where the exception was\n originally raised or propagated is indicated by ``>>``, if it\n differs from the current line.\n\n New in version 3.2: The ``>>`` marker.\n\nll | longlist\n\n List all source code for the current function or frame.\n Interesting lines are marked as for ``list``.\n\n New in version 3.2.\n\na(rgs)\n\n Print the argument list of the current function.\n\np(rint) expression\n\n Evaluate the *expression* in the current context and print its\n value.\n\npp expression\n\n Like the ``print`` command, except the value of the expression is\n pretty-printed using the ``pprint`` module.\n\nwhatis expression\n\n Print the type of the *expression*.\n\nsource expression\n\n Try to get source code for the given object and display it.\n\n New in version 3.2.\n\ndisplay [expression]\n\n Display the value of the expression if it changed, each time\n execution stops in the current frame.\n\n Without expression, list all display expressions for the current\n frame.\n\n New in version 3.2.\n\nundisplay [expression]\n\n Do not display the expression any more in the current frame.\n Without expression, clear all display expressions for the current\n frame.\n\n New in version 3.2.\n\ninteract\n\n Start an interative interpreter (using the ``code`` module) whose\n global namespace contains all the (global and local) names found in\n the current scope.\n\n New in version 3.2.\n\nalias [name [command]]\n\n Create an alias called *name* that executes *command*. The command\n must *not* be enclosed in quotes. Replaceable parameters can be\n indicated by ``%1``, ``%2``, and so on, while ``%*`` is replaced by\n all the parameters. If no command is given, the current alias for\n *name* is shown. If no arguments are given, all aliases are listed.\n\n Aliases may be nested and can contain anything that can be legally\n typed at the pdb prompt. Note that internal pdb commands *can* be\n overridden by aliases. Such a command is then hidden until the\n alias is removed. Aliasing is recursively applied to the first\n word of the command line; all other words in the line are left\n alone.\n\n As an example, here are two useful aliases (especially when placed\n in the ``.pdbrc`` file):\n\n # Print instance variables (usage "pi classInst")\n alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k])\n # Print instance variables in self\n alias ps pi self\n\nunalias name\n\n Delete the specified alias.\n\n! statement\n\n Execute the (one-line) *statement* in the context of the current\n stack frame. The exclamation point can be omitted unless the first\n word of the statement resembles a debugger command. To set a\n global variable, you can prefix the assignment command with a\n ``global`` statement on the same line, e.g.:\n\n (Pdb) global list_options; list_options = [\'-l\']\n (Pdb)\n\nrun [args ...]\nrestart [args ...]\n\n Restart the debugged Python program. If an argument is supplied,\n it is split with ``shlex`` and the result is used as the new\n ``sys.argv``. History, breakpoints, actions and debugger options\n are preserved. ``restart`` is an alias for ``run``.\n\nq(uit)\n\n Quit from the debugger. The program being executed is aborted.\n\n-[ Footnotes ]-\n\n[1] Whether a frame is considered to originate in a certain module is\n determined by the ``__name__`` in the frame globals.\n',
'del': '\nThe ``del`` statement\n*********************\n\n del_stmt ::= "del" target_list\n\nDeletion is recursively defined very similar to the way assignment is\ndefined. Rather than spelling it out in full details, here are some\nhints.\n\nDeletion of a target list recursively deletes each target, from left\nto right.\n\nDeletion of a name removes the binding of that name from the local or\nglobal namespace, depending on whether the name occurs in a ``global``\nstatement in the same code block. If the name is unbound, a\n``NameError`` exception will be raised.\n\nDeletion of attribute references, subscriptions and slicings is passed\nto the primary object involved; deletion of a slicing is in general\nequivalent to assignment of an empty slice of the right type (but even\nthis is determined by the sliced object).\n\nChanged in version 3.2.\n',
'dict': '\nDictionary displays\n*******************\n\nA dictionary display is a possibly empty series of key/datum pairs\nenclosed in curly braces:\n\n dict_display ::= "{" [key_datum_list | dict_comprehension] "}"\n key_datum_list ::= key_datum ("," key_datum)* [","]\n key_datum ::= expression ":" expression\n dict_comprehension ::= expression ":" expression comp_for\n\nA dictionary display yields a new dictionary object.\n\nIf a comma-separated sequence of key/datum pairs is given, they are\nevaluated from left to right to define the entries of the dictionary:\neach key object is used as a key into the dictionary to store the\ncorresponding datum. This means that you can specify the same key\nmultiple times in the key/datum list, and the final dictionary\'s value\nfor that key will be the last one given.\n\nA dict comprehension, in contrast to list and set comprehensions,\nneeds two expressions separated with a colon followed by the usual\n"for" and "if" clauses. When the comprehension is run, the resulting\nkey and value elements are inserted in the new dictionary in the order\nthey are produced.\n\nRestrictions on the types of the key values are listed earlier in\nsection *The standard type hierarchy*. (To summarize, the key type\nshould be *hashable*, which excludes all mutable objects.) Clashes\nbetween duplicate keys are not detected; the last datum (textually\nrightmost in the display) stored for a given key value prevails.\n',
@@ -54,25 +55,25 @@ topics = {'assert': '\nThe ``assert`` statement\n************************\n\nAss
'pass': '\nThe ``pass`` statement\n**********************\n\n pass_stmt ::= "pass"\n\n``pass`` is a null operation --- when it is executed, nothing happens.\nIt is useful as a placeholder when a statement is required\nsyntactically, but no code needs to be executed, for example:\n\n def f(arg): pass # a function that does nothing (yet)\n\n class C: pass # a class with no methods (yet)\n',
'power': '\nThe power operator\n******************\n\nThe power operator binds more tightly than unary operators on its\nleft; it binds less tightly than unary operators on its right. The\nsyntax is:\n\n power ::= primary ["**" u_expr]\n\nThus, in an unparenthesized sequence of power and unary operators, the\noperators are evaluated from right to left (this does not constrain\nthe evaluation order for the operands): ``-1**2`` results in ``-1``.\n\nThe power operator has the same semantics as the built-in ``pow()``\nfunction, when called with two arguments: it yields its left argument\nraised to the power of its right argument. The numeric arguments are\nfirst converted to a common type, and the result is of that type.\n\nFor int operands, the result has the same type as the operands unless\nthe second argument is negative; in that case, all arguments are\nconverted to float and a float result is delivered. For example,\n``10**2`` returns ``100``, but ``10**-2`` returns ``0.01``.\n\nRaising ``0.0`` to a negative power results in a\n``ZeroDivisionError``. Raising a negative number to a fractional power\nresults in a ``complex`` number. (In earlier versions it raised a\n``ValueError``.)\n',
'raise': '\nThe ``raise`` statement\n***********************\n\n raise_stmt ::= "raise" [expression ["from" expression]]\n\nIf no expressions are present, ``raise`` re-raises the last exception\nthat was active in the current scope. If no exception is active in\nthe current scope, a ``RuntimeError`` exception is raised indicating\nthat this is an error.\n\nOtherwise, ``raise`` evaluates the first expression as the exception\nobject. It must be either a subclass or an instance of\n``BaseException``. If it is a class, the exception instance will be\nobtained when needed by instantiating the class with no arguments.\n\nThe *type* of the exception is the exception instance\'s class, the\n*value* is the instance itself.\n\nA traceback object is normally created automatically when an exception\nis raised and attached to it as the ``__traceback__`` attribute, which\nis writable. You can create an exception and set your own traceback in\none step using the ``with_traceback()`` exception method (which\nreturns the same exception instance, with its traceback set to its\nargument), like so:\n\n raise Exception("foo occurred").with_traceback(tracebackobj)\n\nThe ``from`` clause is used for exception chaining: if given, the\nsecond *expression* must be another exception class or instance, which\nwill then be attached to the raised exception as the ``__cause__``\nattribute (which is writable). If the raised exception is not\nhandled, both exceptions will be printed:\n\n >>> try:\n ... print(1 / 0)\n ... except Exception as exc:\n ... raise RuntimeError("Something bad happened") from exc\n ...\n Traceback (most recent call last):\n File "<stdin>", line 2, in <module>\n ZeroDivisionError: int division or modulo by zero\n\n The above exception was the direct cause of the following exception:\n\n Traceback (most recent call last):\n File "<stdin>", line 4, in <module>\n RuntimeError: Something bad happened\n\nA similar mechanism works implicitly if an exception is raised inside\nan exception handler: the previous exception is then attached as the\nnew exception\'s ``__context__`` attribute:\n\n >>> try:\n ... print(1 / 0)\n ... except:\n ... raise RuntimeError("Something bad happened")\n ...\n Traceback (most recent call last):\n File "<stdin>", line 2, in <module>\n ZeroDivisionError: int division or modulo by zero\n\n During handling of the above exception, another exception occurred:\n\n Traceback (most recent call last):\n File "<stdin>", line 4, in <module>\n RuntimeError: Something bad happened\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information about handling exceptions is in section\n*The try statement*.\n',
- 'return': '\nThe ``return`` statement\n************************\n\n return_stmt ::= "return" [expression_list]\n\n``return`` may only occur syntactically nested in a function\ndefinition, not within a nested class definition.\n\nIf an expression list is present, it is evaluated, else ``None`` is\nsubstituted.\n\n``return`` leaves the current function call with the expression list\n(or ``None``) as return value.\n\nWhen ``return`` passes control out of a ``try`` statement with a\n``finally`` clause, that ``finally`` clause is executed before really\nleaving the function.\n\nIn a generator function, the ``return`` statement is not allowed to\ninclude an ``expression_list``. In that context, a bare ``return``\nindicates that the generator is done and will cause ``StopIteration``\nto be raised.\n',
+ 'return': '\nThe ``return`` statement\n************************\n\n return_stmt ::= "return" [expression_list]\n\n``return`` may only occur syntactically nested in a function\ndefinition, not within a nested class definition.\n\nIf an expression list is present, it is evaluated, else ``None`` is\nsubstituted.\n\n``return`` leaves the current function call with the expression list\n(or ``None``) as return value.\n\nWhen ``return`` passes control out of a ``try`` statement with a\n``finally`` clause, that ``finally`` clause is executed before really\nleaving the function.\n\nIn a generator function, the ``return`` statement indicates that the\ngenerator is done and will cause ``StopIteration`` to be raised. The\nreturned value (if any) is used as an argument to construct\n``StopIteration`` and becomes the ``StopIteration.value`` attribute.\n',
'sequence-types': "\nEmulating container types\n*************************\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\nand ``update()`` behaving similar to those for Python's standard\ndictionary objects. The ``collections`` module provides a\n``MutableMapping`` abstract base class to help create those methods\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\nlike Python standard list objects. Finally, sequence types should\nimplement addition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\ndescribed below; they should not define other numerical operators. It\nis recommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should search the mapping's keys; for\nsequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``keys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn't define a ``__bool__()`` method and whose ``__len__()``\n method returns zero is considered to be false in a Boolean context.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with\n ``None``.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``keys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don't define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n",
'shifting': '\nShifting operations\n*******************\n\nThe shifting operations have lower priority than the arithmetic\noperations:\n\n shift_expr ::= a_expr | shift_expr ( "<<" | ">>" ) a_expr\n\nThese operators accept integers as arguments. They shift the first\nargument to the left or right by the number of bits given by the\nsecond argument.\n\nA right shift by *n* bits is defined as division by ``pow(2,n)``. A\nleft shift by *n* bits is defined as multiplication with ``pow(2,n)``.\n\nNote: In the current implementation, the right-hand operand is required to\n be at most ``sys.maxsize``. If the right-hand operand is larger\n than ``sys.maxsize`` an ``OverflowError`` exception is raised.\n',
'slicings': '\nSlicings\n********\n\nA slicing selects a range of items in a sequence object (e.g., a\nstring, tuple or list). Slicings may be used as expressions or as\ntargets in assignment or ``del`` statements. The syntax for a\nslicing:\n\n slicing ::= primary "[" slice_list "]"\n slice_list ::= slice_item ("," slice_item)* [","]\n slice_item ::= expression | proper_slice\n proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" [stride] ]\n lower_bound ::= expression\n upper_bound ::= expression\n stride ::= expression\n\nThere is ambiguity in the formal syntax here: anything that looks like\nan expression list also looks like a slice list, so any subscription\ncan be interpreted as a slicing. Rather than further complicating the\nsyntax, this is disambiguated by defining that in this case the\ninterpretation as a subscription takes priority over the\ninterpretation as a slicing (this is the case if the slice list\ncontains no proper slice).\n\nThe semantics for a slicing are as follows. The primary must evaluate\nto a mapping object, and it is indexed (using the same\n``__getitem__()`` method as normal subscription) with a key that is\nconstructed from the slice list, as follows. If the slice list\ncontains at least one comma, the key is a tuple containing the\nconversion of the slice items; otherwise, the conversion of the lone\nslice item is the key. The conversion of a slice item that is an\nexpression is that expression. The conversion of a proper slice is a\nslice object (see section *The standard type hierarchy*) whose\n``start``, ``stop`` and ``step`` attributes are the values of the\nexpressions given as lower bound, upper bound and stride,\nrespectively, substituting ``None`` for missing expressions.\n',
- 'specialattrs': '\nSpecial Attributes\n******************\n\nThe implementation adds a few special read-only attributes to several\nobject types, where they are relevant. Some of these are not reported\nby the ``dir()`` built-in function.\n\nobject.__dict__\n\n A dictionary or other mapping object used to store an object\'s\n (writable) attributes.\n\ninstance.__class__\n\n The class to which a class instance belongs.\n\nclass.__bases__\n\n The tuple of base classes of a class object.\n\nclass.__name__\n\n The name of the class or type.\n\nclass.__mro__\n\n This attribute is a tuple of classes that are considered when\n looking for base classes during method resolution.\n\nclass.mro()\n\n This method can be overridden by a metaclass to customize the\n method resolution order for its instances. It is called at class\n instantiation, and its result is stored in ``__mro__``.\n\nclass.__subclasses__()\n\n Each class keeps a list of weak references to its immediate\n subclasses. This method returns a list of all those references\n still alive. Example:\n\n >>> int.__subclasses__()\n [<class \'bool\'>]\n\n-[ Footnotes ]-\n\n[1] Additional information on these special methods may be found in\n the Python Reference Manual (*Basic customization*).\n\n[2] As a consequence, the list ``[1, 2]`` is considered equal to\n ``[1.0, 2.0]``, and similarly for tuples.\n\n[3] They must have since the parser can\'t tell the type of the\n operands.\n\n[4] Cased characters are those with general category property being\n one of "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt"\n (Letter, titlecase).\n\n[5] To format only a tuple you should therefore provide a singleton\n tuple whose only element is the tuple to be formatted.\n',
- 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named ``__getitem__()``, and ``x`` is an instance of this\nclass, then ``x[i]`` is roughly equivalent to ``type(x).__getitem__(x,\ni)``. Except where mentioned, attempts to execute an operation raise\nan exception when no appropriate method is defined (typically\n``AttributeError`` or ``TypeError``).\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n``NodeList`` interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected when the option cycle detector is enabled (it\'s on by\n default), but can only be cleaned up if there are no Python-\n level ``__del__()`` methods involved. Refer to the documentation\n for the ``gc`` module for more information about how\n ``__del__()`` methods are handled by the cycle detector,\n particularly the description of the ``garbage`` value.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by the ``str()`` built-in function and by the ``print()``\n function to compute the "informal" string representation of an\n object. This differs from ``__repr__()`` in that it does not have\n to be a valid Python expression: a more convenient or concise\n representation may be used instead. The return value must be a\n string object.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``x<y`` calls ``x.__lt__(y)``, ``x<=y`` calls\n ``x.__le__(y)``, ``x==y`` calls ``x.__eq__(y)``, ``x!=y`` calls\n ``x.__ne__(y)``, ``x>y`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns ``id(x)``.\n\n Classes which inherit a ``__hash__()`` method from a parent class\n but change the meaning of ``__eq__()`` such that the hash value\n returned is no longer appropriate (e.g. by switching to a value-\n based concept of equality instead of the default identity based\n equality) can explicitly flag themselves as being unhashable by\n setting ``__hash__ = None`` in the class definition. Doing so means\n that not only will instances of the class raise an appropriate\n ``TypeError`` when a program attempts to retrieve their hash value,\n but they will also be correctly identified as unhashable when\n checking ``isinstance(obj, collections.Hashable)`` (unlike classes\n which define their own ``__hash__()`` to explicitly raise\n ``TypeError``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n <ParentClass>.__hash__``. Otherwise the inheritance of\n ``__hash__()`` will be blocked, just as if ``__hash__`` had been\n explicitly set to ``None``.\n\n See also the *-R* command-line option.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when ``dir()`` is called on the object. A list must be\n returned.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to an object instance, ``a.x`` is transformed into the\n call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a class, ``A.x`` is transformed into the call:\n ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``int``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, classes are constructed using ``type()``. A class\ndefinition is read into a separate namespace and the value of class\nname is bound to the result of ``type(name, bases, dict)``.\n\nWhen the class definition is read, if a callable ``metaclass`` keyword\nargument is passed after the bases in the class definition, the\ncallable given will be called instead of ``type()``. If other keyword\narguments are passed, they will also be passed to the metaclass. This\nallows classes or functions to be written which monitor or alter the\nclass creation process:\n\n* Modifying the class dictionary prior to the class being created.\n\n* Returning an instance of another class -- essentially performing the\n role of a factory function.\n\nThese steps will have to be performed in the metaclass\'s ``__new__()``\nmethod -- ``type.__new__()`` can then be called from this method to\ncreate a class with different properties. This example adds a new\nelement to the class dictionary before creating the class:\n\n class metacls(type):\n def __new__(mcs, name, bases, dict):\n dict[\'foo\'] = \'metacls was here\'\n return type.__new__(mcs, name, bases, dict)\n\nYou can of course also override other class methods (or add new\nmethods); for example defining a custom ``__call__()`` method in the\nmetaclass allows custom behavior when the class is called, e.g. not\nalways creating a new instance.\n\nIf the metaclass has a ``__prepare__()`` attribute (usually\nimplemented as a class or static method), it is called before the\nclass body is evaluated with the name of the class and a tuple of its\nbases for arguments. It should return an object that supports the\nmapping interface that will be used to store the namespace of the\nclass. The default is a plain dictionary. This could be used, for\nexample, to keep track of the order that class attributes are declared\nin by returning an ordered dictionary.\n\nThe appropriate metaclass is determined by the following precedence\nrules:\n\n* If the ``metaclass`` keyword argument is passed with the bases, it\n is used.\n\n* Otherwise, if there is at least one base class, its metaclass is\n used.\n\n* Otherwise, the default metaclass (``type``) is used.\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored including logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\nHere is an example of a metaclass that uses an\n``collections.OrderedDict`` to remember the order that class members\nwere defined:\n\n class OrderedClass(type):\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwds):\n return collections.OrderedDict()\n\n def __new__(cls, name, bases, classdict):\n result = type.__new__(cls, name, bases, dict(classdict))\n result.members = tuple(classdict)\n return result\n\n class A(metaclass=OrderedClass):\n def one(self): pass\n def two(self): pass\n def three(self): pass\n def four(self): pass\n\n >>> A.members\n (\'__module__\', \'one\', \'two\', \'three\', \'four\')\n\nWhen the class definition for *A* gets executed, the process begins\nwith calling the metaclass\'s ``__prepare__()`` method which returns an\nempty ``collections.OrderedDict``. That mapping records the methods\nand attributes of *A* as they are defined within the body of the class\nstatement. Once those definitions are executed, the ordered dictionary\nis fully populated and the metaclass\'s ``__new__()`` method gets\ninvoked. That method builds the new type and it saves the ordered\ndictionary keys in an attribute called ``members``.\n\n\nCustomizing instance and subclass checks\n========================================\n\nThe following methods are used to override the default behavior of the\n``isinstance()`` and ``issubclass()`` built-in functions.\n\nIn particular, the metaclass ``abc.ABCMeta`` implements these methods\nin order to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n ``isinstance(instance, class)``.\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n ``issubclass(subclass, class)``.\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also:\n\n **PEP 3119** - Introducing Abstract Base Classes\n Includes the specification for customizing ``isinstance()`` and\n ``issubclass()`` behavior through ``__instancecheck__()`` and\n ``__subclasscheck__()``, with motivation for this functionality\n in the context of adding Abstract Base Classes (see the ``abc``\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\n ``x.__call__(arg1, arg2, ...)``.\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\nand ``update()`` behaving similar to those for Python\'s standard\ndictionary objects. The ``collections`` module provides a\n``MutableMapping`` abstract base class to help create those methods\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\nlike Python standard list objects. Finally, sequence types should\nimplement addition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\ndescribed below; they should not define other numerical operators. It\nis recommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should search the mapping\'s keys; for\nsequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``keys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn\'t define a ``__bool__()`` method and whose ``__len__()``\n method returns zero is considered to be false in a Boolean context.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with\n ``None``.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``keys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``). For instance, to evaluate the expression ``x + y``, where\n *x* is an instance of a class that has an ``__add__()`` method,\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\n should not be related to ``__truediv__()``. Note that\n ``__pow__()`` should be defined to accept an optional third\n argument if the ternary version of the built-in ``pow()`` function\n is to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return ``NotImplemented``.\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``) with reflected (swapped) operands. These functions are only\n called if the left operand does not support the corresponding\n operation and the operands are of different types. [2] For\n instance, to evaluate the expression ``x - y``, where *y* is an\n instance of a class that has an ``__rsub__()`` method,\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\n *NotImplemented*.\n\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\n (the coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\n should attempt to do the operation in-place (modifying *self*) and\n return the result (which could be, but does not have to be,\n *self*). If a specific method is not defined, the augmented\n assignment falls back to the normal methods. For instance, to\n execute the statement ``x += y``, where *x* is an instance of a\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\n called. If *x* is an instance of a class that does not define a\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\n considered, as with the evaluation of ``x + y``.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations (``-``, ``+``,\n ``abs()`` and ``~``).\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions ``complex()``,\n ``int()``, ``float()`` and ``round()``. Should return a value of\n the appropriate type.\n\nobject.__index__(self)\n\n Called to implement ``operator.index()``. Also called whenever\n Python needs an integer object (such as in slicing, or in the\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\n an integer.\n\n\nWith Statement Context Managers\n===============================\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a ``with`` statement. The context\nmanager handles the entry into, and the exit from, the desired runtime\ncontext for the execution of the block of code. Context managers are\nnormally invoked using the ``with`` statement (described in section\n*The with statement*), but can also be used by directly invoking their\nmethods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The ``with``\n statement will bind this method\'s return value to the target(s)\n specified in the ``as`` clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be ``None``.\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that ``__exit__()`` methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n\n\nSpecial method lookup\n=====================\n\nFor custom classes, implicit invocations of special methods are only\nguaranteed to work correctly if defined on an object\'s type, not in\nthe object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception:\n\n >>> class C:\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "<stdin>", line 1, in <module>\n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as ``__hash__()`` and ``__repr__()`` that are implemented\nby all objects, including type objects. If the implicit lookup of\nthese methods used the conventional lookup process, they would fail\nwhen invoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "<stdin>", line 1, in <module>\n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe ``__getattribute__()`` method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print("Metaclass getattribute invoked")\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object, metaclass=Meta):\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print("Class getattribute invoked")\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the ``__getattribute__()`` machinery in this fashion\nprovides significant scope for speed optimisations within the\ninterpreter, at the cost of some flexibility in the handling of\nspecial methods (the special method *must* be set on the class object\nitself in order to be consistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type, under\n certain controlled conditions. It generally isn\'t a good idea\n though, since it can lead to some very strange behaviour if it is\n handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as ``__add__()``) fails the operation is\n not supported, which is why the reflected method is not called.\n',
- 'string-methods': '\nString Methods\n**************\n\nString objects support the methods listed below.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, bytes, bytearray, list,\ntuple, range* section. To output formatted strings, see the *String\nFormatting* section. Also, see the ``re`` module for string functions\nbased on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is ``\'utf-8\'``. *errors* may be given to set a different\n error handling scheme. The default for *errors* is ``\'strict\'``,\n meaning that encoding errors raise a ``UnicodeError``. Other\n possible values are ``\'ignore\'``, ``\'replace\'``,\n ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by zero or more spaces, depending on the current column and the\n given tab size. The column number is reset to zero after each\n newline occurring in the string. If *tabsize* is not given, a tab\n size of ``8`` characters is assumed. This doesn\'t understand other\n non-printing characters or escape sequences.\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\n used directly and not copied to a ``dict`` . This is useful if for\n example ``mapping`` is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character\n ``c`` is alphanumeric if one of the following returns ``True``:\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\n ``c.isnumeric()``.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when ``repr()`` is\n invoked on a string. It has no bearing on the handling of strings\n written to ``sys.stdout`` or ``sys.stderr``.)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\n any non-string values in *iterable*, including ``bytes`` objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n ``str.translate()``.\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified, then there is no limit\n on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n lambda mo: mo.group(0)[0].upper() +\n mo.group(0)[1:].lower(),\n s)\n\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\n characters are left untouched. Characters mapped to ``None`` are\n deleted.\n\n You can use ``str.maketrans()`` to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see\n ``encodings.cp1251`` for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n',
- 'strings': '\nString and Bytes literals\n*************************\n\nString literals are described by the following lexical definitions:\n\n stringliteral ::= [stringprefix](shortstring | longstring)\n stringprefix ::= "r" | "R"\n shortstring ::= "\'" shortstringitem* "\'" | \'"\' shortstringitem* \'"\'\n longstring ::= "\'\'\'" longstringitem* "\'\'\'" | \'"""\' longstringitem* \'"""\'\n shortstringitem ::= shortstringchar | stringescapeseq\n longstringitem ::= longstringchar | stringescapeseq\n shortstringchar ::= <any source character except "\\" or newline or the quote>\n longstringchar ::= <any source character except "\\">\n stringescapeseq ::= "\\" <any source character>\n\n bytesliteral ::= bytesprefix(shortbytes | longbytes)\n bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR"\n shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' shortbytesitem* \'"\'\n longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' longbytesitem* \'"""\'\n shortbytesitem ::= shortbyteschar | bytesescapeseq\n longbytesitem ::= longbyteschar | bytesescapeseq\n shortbyteschar ::= <any ASCII character except "\\" or newline or the quote>\n longbyteschar ::= <any ASCII character except "\\">\n bytesescapeseq ::= "\\" <any ASCII character>\n\nOne syntactic restriction not indicated by these productions is that\nwhitespace is not allowed between the ``stringprefix`` or\n``bytesprefix`` and the rest of the literal. The source character set\nis defined by the encoding declaration; it is UTF-8 if no encoding\ndeclaration is given in the source file; see section *Encoding\ndeclarations*.\n\nIn plain English: Both types of literals can be enclosed in matching\nsingle quotes (``\'``) or double quotes (``"``). They can also be\nenclosed in matching groups of three single or double quotes (these\nare generally referred to as *triple-quoted strings*). The backslash\n(``\\``) character is used to escape characters that otherwise have a\nspecial meaning, such as newline, backslash itself, or the quote\ncharacter.\n\nBytes literals are always prefixed with ``\'b\'`` or ``\'B\'``; they\nproduce an instance of the ``bytes`` type instead of the ``str`` type.\nThey may only contain ASCII characters; bytes with a numeric value of\n128 or greater must be expressed with escapes.\n\nBoth string and bytes literals may optionally be prefixed with a\nletter ``\'r\'`` or ``\'R\'``; such strings are called *raw strings* and\ntreat backslashes as literal characters. As a result, in string\nliterals, ``\'\\U\'`` and ``\'\\u\'`` escapes in raw strings are not treated\nspecially.\n\nIn triple-quoted strings, unescaped newlines and quotes are allowed\n(and are retained), except that three unescaped quotes in a row\nterminate the string. (A "quote" is the character used to open the\nstring, i.e. either ``\'`` or ``"``.)\n\nUnless an ``\'r\'`` or ``\'R\'`` prefix is present, escape sequences in\nstrings are interpreted according to rules similar to those used by\nStandard C. The recognized escape sequences are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| ``\\newline`` | Backslash and newline ignored | |\n+-------------------+-----------------------------------+---------+\n| ``\\\\`` | Backslash (``\\``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\\'`` | Single quote (``\'``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\"`` | Double quote (``"``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\a`` | ASCII Bell (BEL) | |\n+-------------------+-----------------------------------+---------+\n| ``\\b`` | ASCII Backspace (BS) | |\n+-------------------+-----------------------------------+---------+\n| ``\\f`` | ASCII Formfeed (FF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\n`` | ASCII Linefeed (LF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\r`` | ASCII Carriage Return (CR) | |\n+-------------------+-----------------------------------+---------+\n| ``\\t`` | ASCII Horizontal Tab (TAB) | |\n+-------------------+-----------------------------------+---------+\n| ``\\v`` | ASCII Vertical Tab (VT) | |\n+-------------------+-----------------------------------+---------+\n| ``\\ooo`` | Character with octal value *ooo* | (1,3) |\n+-------------------+-----------------------------------+---------+\n| ``\\xhh`` | Character with hex value *hh* | (2,3) |\n+-------------------+-----------------------------------+---------+\n\nEscape sequences only recognized in string literals are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| ``\\N{name}`` | Character named *name* in the | |\n| | Unicode database | |\n+-------------------+-----------------------------------+---------+\n| ``\\uxxxx`` | Character with 16-bit hex value | (4) |\n| | *xxxx* | |\n+-------------------+-----------------------------------+---------+\n| ``\\Uxxxxxxxx`` | Character with 32-bit hex value | (5) |\n| | *xxxxxxxx* | |\n+-------------------+-----------------------------------+---------+\n\nNotes:\n\n1. As in Standard C, up to three octal digits are accepted.\n\n2. Unlike in Standard C, exactly two hex digits are required.\n\n3. In a bytes literal, hexadecimal and octal escapes denote the byte\n with the given value. In a string literal, these escapes denote a\n Unicode character with the given value.\n\n4. Individual code units which form parts of a surrogate pair can be\n encoded using this escape sequence. Exactly four hex digits are\n required.\n\n5. Any Unicode character can be encoded this way, but characters\n outside the Basic Multilingual Plane (BMP) will be encoded using a\n surrogate pair if Python is compiled to use 16-bit code units (the\n default). Exactly eight hex digits are required.\n\nUnlike Standard C, all unrecognized escape sequences are left in the\nstring unchanged, i.e., *the backslash is left in the string*. (This\nbehavior is useful when debugging: if an escape sequence is mistyped,\nthe resulting output is more easily recognized as broken.) It is also\nimportant to note that the escape sequences only recognized in string\nliterals fall into the category of unrecognized escapes for bytes\nliterals.\n\nEven in a raw string, string quotes can be escaped with a backslash,\nbut the backslash remains in the string; for example, ``r"\\""`` is a\nvalid string literal consisting of two characters: a backslash and a\ndouble quote; ``r"\\"`` is not a valid string literal (even a raw\nstring cannot end in an odd number of backslashes). Specifically, *a\nraw string cannot end in a single backslash* (since the backslash\nwould escape the following quote character). Note also that a single\nbackslash followed by a newline is interpreted as those two characters\nas part of the string, *not* as a line continuation.\n',
+ 'specialattrs': '\nSpecial Attributes\n******************\n\nThe implementation adds a few special read-only attributes to several\nobject types, where they are relevant. Some of these are not reported\nby the ``dir()`` built-in function.\n\nobject.__dict__\n\n A dictionary or other mapping object used to store an object\'s\n (writable) attributes.\n\ninstance.__class__\n\n The class to which a class instance belongs.\n\nclass.__bases__\n\n The tuple of base classes of a class object.\n\nclass.__name__\n\n The name of the class or type.\n\nclass.__qualname__\n\n The *qualified name* of the class or type.\n\n New in version 3.3.\n\nclass.__mro__\n\n This attribute is a tuple of classes that are considered when\n looking for base classes during method resolution.\n\nclass.mro()\n\n This method can be overridden by a metaclass to customize the\n method resolution order for its instances. It is called at class\n instantiation, and its result is stored in ``__mro__``.\n\nclass.__subclasses__()\n\n Each class keeps a list of weak references to its immediate\n subclasses. This method returns a list of all those references\n still alive. Example:\n\n >>> int.__subclasses__()\n [<class \'bool\'>]\n\n-[ Footnotes ]-\n\n[1] Additional information on these special methods may be found in\n the Python Reference Manual (*Basic customization*).\n\n[2] As a consequence, the list ``[1, 2]`` is considered equal to\n ``[1.0, 2.0]``, and similarly for tuples.\n\n[3] They must have since the parser can\'t tell the type of the\n operands.\n\n[4] Cased characters are those with general category property being\n one of "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt"\n (Letter, titlecase).\n\n[5] To format only a tuple you should therefore provide a singleton\n tuple whose only element is the tuple to be formatted.\n',
+ 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named ``__getitem__()``, and ``x`` is an instance of this\nclass, then ``x[i]`` is roughly equivalent to ``type(x).__getitem__(x,\ni)``. Except where mentioned, attempts to execute an operation raise\nan exception when no appropriate method is defined (typically\n``AttributeError`` or ``TypeError``).\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n``NodeList`` interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected when the option cycle detector is enabled (it\'s on by\n default), but can only be cleaned up if there are no Python-\n level ``__del__()`` methods involved. Refer to the documentation\n for the ``gc`` module for more information about how\n ``__del__()`` methods are handled by the cycle detector,\n particularly the description of the ``garbage`` value.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by the ``str()`` built-in function and by the ``print()``\n function to compute the "informal" string representation of an\n object. This differs from ``__repr__()`` in that it does not have\n to be a valid Python expression: a more convenient or concise\n representation may be used instead. The return value must be a\n string object.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``x<y`` calls ``x.__lt__(y)``, ``x<=y`` calls\n ``x.__le__(y)``, ``x==y`` calls ``x.__eq__(y)``, ``x!=y`` calls\n ``x.__ne__(y)``, ``x>y`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns ``id(x)``.\n\n Classes which inherit a ``__hash__()`` method from a parent class\n but change the meaning of ``__eq__()`` such that the hash value\n returned is no longer appropriate (e.g. by switching to a value-\n based concept of equality instead of the default identity based\n equality) can explicitly flag themselves as being unhashable by\n setting ``__hash__ = None`` in the class definition. Doing so means\n that not only will instances of the class raise an appropriate\n ``TypeError`` when a program attempts to retrieve their hash value,\n but they will also be correctly identified as unhashable when\n checking ``isinstance(obj, collections.Hashable)`` (unlike classes\n which define their own ``__hash__()`` to explicitly raise\n ``TypeError``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n <ParentClass>.__hash__``. Otherwise the inheritance of\n ``__hash__()`` will be blocked, just as if ``__hash__`` had been\n explicitly set to ``None``.\n\n Note: Note by default the ``__hash__()`` values of str, bytes and\n datetime objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the order in which keys are\n retrieved from a dict. Note Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also ``PYTHONHASHSEED``.\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when ``dir()`` is called on the object. A sequence must be\n returned. ``dir()`` converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to an object instance, ``a.x`` is transformed into the\n call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a class, ``A.x`` is transformed into the call:\n ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``int``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, classes are constructed using ``type()``. A class\ndefinition is read into a separate namespace and the value of class\nname is bound to the result of ``type(name, bases, dict)``.\n\nWhen the class definition is read, if a callable ``metaclass`` keyword\nargument is passed after the bases in the class definition, the\ncallable given will be called instead of ``type()``. If other keyword\narguments are passed, they will also be passed to the metaclass. This\nallows classes or functions to be written which monitor or alter the\nclass creation process:\n\n* Modifying the class dictionary prior to the class being created.\n\n* Returning an instance of another class -- essentially performing the\n role of a factory function.\n\nThese steps will have to be performed in the metaclass\'s ``__new__()``\nmethod -- ``type.__new__()`` can then be called from this method to\ncreate a class with different properties. This example adds a new\nelement to the class dictionary before creating the class:\n\n class metacls(type):\n def __new__(mcs, name, bases, dict):\n dict[\'foo\'] = \'metacls was here\'\n return type.__new__(mcs, name, bases, dict)\n\nYou can of course also override other class methods (or add new\nmethods); for example defining a custom ``__call__()`` method in the\nmetaclass allows custom behavior when the class is called, e.g. not\nalways creating a new instance.\n\nIf the metaclass has a ``__prepare__()`` attribute (usually\nimplemented as a class or static method), it is called before the\nclass body is evaluated with the name of the class and a tuple of its\nbases for arguments. It should return an object that supports the\nmapping interface that will be used to store the namespace of the\nclass. The default is a plain dictionary. This could be used, for\nexample, to keep track of the order that class attributes are declared\nin by returning an ordered dictionary.\n\nThe appropriate metaclass is determined by the following precedence\nrules:\n\n* If the ``metaclass`` keyword argument is passed with the bases, it\n is used.\n\n* Otherwise, if there is at least one base class, its metaclass is\n used.\n\n* Otherwise, the default metaclass (``type``) is used.\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored including logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\nHere is an example of a metaclass that uses an\n``collections.OrderedDict`` to remember the order that class members\nwere defined:\n\n class OrderedClass(type):\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwds):\n return collections.OrderedDict()\n\n def __new__(cls, name, bases, classdict):\n result = type.__new__(cls, name, bases, dict(classdict))\n result.members = tuple(classdict)\n return result\n\n class A(metaclass=OrderedClass):\n def one(self): pass\n def two(self): pass\n def three(self): pass\n def four(self): pass\n\n >>> A.members\n (\'__module__\', \'one\', \'two\', \'three\', \'four\')\n\nWhen the class definition for *A* gets executed, the process begins\nwith calling the metaclass\'s ``__prepare__()`` method which returns an\nempty ``collections.OrderedDict``. That mapping records the methods\nand attributes of *A* as they are defined within the body of the class\nstatement. Once those definitions are executed, the ordered dictionary\nis fully populated and the metaclass\'s ``__new__()`` method gets\ninvoked. That method builds the new type and it saves the ordered\ndictionary keys in an attribute called ``members``.\n\n\nCustomizing instance and subclass checks\n========================================\n\nThe following methods are used to override the default behavior of the\n``isinstance()`` and ``issubclass()`` built-in functions.\n\nIn particular, the metaclass ``abc.ABCMeta`` implements these methods\nin order to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n ``isinstance(instance, class)``.\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n ``issubclass(subclass, class)``.\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also:\n\n **PEP 3119** - Introducing Abstract Base Classes\n Includes the specification for customizing ``isinstance()`` and\n ``issubclass()`` behavior through ``__instancecheck__()`` and\n ``__subclasscheck__()``, with motivation for this functionality\n in the context of adding Abstract Base Classes (see the ``abc``\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\n ``x.__call__(arg1, arg2, ...)``.\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\nand ``update()`` behaving similar to those for Python\'s standard\ndictionary objects. The ``collections`` module provides a\n``MutableMapping`` abstract base class to help create those methods\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\nlike Python standard list objects. Finally, sequence types should\nimplement addition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\ndescribed below; they should not define other numerical operators. It\nis recommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should search the mapping\'s keys; for\nsequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``keys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn\'t define a ``__bool__()`` method and whose ``__len__()``\n method returns zero is considered to be false in a Boolean context.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with\n ``None``.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``keys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``). For instance, to evaluate the expression ``x + y``, where\n *x* is an instance of a class that has an ``__add__()`` method,\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\n should not be related to ``__truediv__()``. Note that\n ``__pow__()`` should be defined to accept an optional third\n argument if the ternary version of the built-in ``pow()`` function\n is to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return ``NotImplemented``.\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``) with reflected (swapped) operands. These functions are only\n called if the left operand does not support the corresponding\n operation and the operands are of different types. [2] For\n instance, to evaluate the expression ``x - y``, where *y* is an\n instance of a class that has an ``__rsub__()`` method,\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\n *NotImplemented*.\n\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\n (the coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\n should attempt to do the operation in-place (modifying *self*) and\n return the result (which could be, but does not have to be,\n *self*). If a specific method is not defined, the augmented\n assignment falls back to the normal methods. For instance, to\n execute the statement ``x += y``, where *x* is an instance of a\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\n called. If *x* is an instance of a class that does not define a\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\n considered, as with the evaluation of ``x + y``.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations (``-``, ``+``,\n ``abs()`` and ``~``).\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions ``complex()``,\n ``int()``, ``float()`` and ``round()``. Should return a value of\n the appropriate type.\n\nobject.__index__(self)\n\n Called to implement ``operator.index()``. Also called whenever\n Python needs an integer object (such as in slicing, or in the\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\n an integer.\n\n\nWith Statement Context Managers\n===============================\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a ``with`` statement. The context\nmanager handles the entry into, and the exit from, the desired runtime\ncontext for the execution of the block of code. Context managers are\nnormally invoked using the ``with`` statement (described in section\n*The with statement*), but can also be used by directly invoking their\nmethods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The ``with``\n statement will bind this method\'s return value to the target(s)\n specified in the ``as`` clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be ``None``.\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that ``__exit__()`` methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n\n\nSpecial method lookup\n=====================\n\nFor custom classes, implicit invocations of special methods are only\nguaranteed to work correctly if defined on an object\'s type, not in\nthe object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception:\n\n >>> class C:\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "<stdin>", line 1, in <module>\n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as ``__hash__()`` and ``__repr__()`` that are implemented\nby all objects, including type objects. If the implicit lookup of\nthese methods used the conventional lookup process, they would fail\nwhen invoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "<stdin>", line 1, in <module>\n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe ``__getattribute__()`` method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print("Metaclass getattribute invoked")\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object, metaclass=Meta):\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print("Class getattribute invoked")\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the ``__getattribute__()`` machinery in this fashion\nprovides significant scope for speed optimisations within the\ninterpreter, at the cost of some flexibility in the handling of\nspecial methods (the special method *must* be set on the class object\nitself in order to be consistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type, under\n certain controlled conditions. It generally isn\'t a good idea\n though, since it can lead to some very strange behaviour if it is\n handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as ``__add__()``) fails the operation is\n not supported, which is why the reflected method is not called.\n',
+ 'string-methods': '\nString Methods\n**************\n\nString objects support the methods listed below.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, bytes, bytearray, list,\ntuple, range* section. To output formatted strings, see the *String\nFormatting* section. Also, see the ``re`` module for string functions\nbased on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.casefold()\n\n Return a casefolded copy of the string. Casefolded strings may be\n used for caseless matching.\n\n Casefolding is similar to lowercasing but more aggressive because\n it is intended to remove all case distinctions in a string. For\n example, the German lowercase letter ``\'\xc3\x9f\'`` is equivalent to\n ``"ss"``. Since it is already lowercase, ``lower()`` would do\n nothing to ``\'\xc3\x9f\'``; ``casefold()`` converts it to ``"ss"``.\n\n The casefolding algorithm is described in section 3.13 of the\n Unicode Standard.\n\n New in version 3.3.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is ``\'utf-8\'``. *errors* may be given to set a different\n error handling scheme. The default for *errors* is ``\'strict\'``,\n meaning that encoding errors raise a ``UnicodeError``. Other\n possible values are ``\'ignore\'``, ``\'replace\'``,\n ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by zero or more spaces, depending on the current column and the\n given tab size. The column number is reset to zero after each\n newline occurring in the string. If *tabsize* is not given, a tab\n size of ``8`` characters is assumed. This doesn\'t understand other\n non-printing characters or escape sequences.\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\n used directly and not copied to a ``dict`` . This is useful if for\n example ``mapping`` is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character\n ``c`` is alphanumeric if one of the following returns ``True``:\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\n ``c.isnumeric()``.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when ``repr()`` is\n invoked on a string. It has no bearing on the handling of strings\n written to ``sys.stdout`` or ``sys.stderr``.)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\n any non-string values in *iterable*, including ``bytes`` objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n The lowercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n ``str.translate()``.\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified, then there is no limit\n on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa. Note that it is not necessarily true that\n ``s.swapcase().swapcase() == s``.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n lambda mo: mo.group(0)[0].upper() +\n mo.group(0)[1:].lower(),\n s)\n\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\n characters are left untouched. Characters mapped to ``None`` are\n deleted.\n\n You can use ``str.maketrans()`` to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see\n ``encodings.cp1251`` for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n The uppercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n',
+ 'strings': '\nString and Bytes literals\n*************************\n\nString literals are described by the following lexical definitions:\n\n stringliteral ::= [stringprefix](shortstring | longstring)\n stringprefix ::= "r" | "u" | "ur" | "R" | "U" | "UR" | "Ur" | "uR"\n shortstring ::= "\'" shortstringitem* "\'" | \'"\' shortstringitem* \'"\'\n longstring ::= "\'\'\'" longstringitem* "\'\'\'" | \'"""\' longstringitem* \'"""\'\n shortstringitem ::= shortstringchar | stringescapeseq\n longstringitem ::= longstringchar | stringescapeseq\n shortstringchar ::= <any source character except "\\" or newline or the quote>\n longstringchar ::= <any source character except "\\">\n stringescapeseq ::= "\\" <any source character>\n\n bytesliteral ::= bytesprefix(shortbytes | longbytes)\n bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | "rb" | "rB" | "Rb" | "RB"\n shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' shortbytesitem* \'"\'\n longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' longbytesitem* \'"""\'\n shortbytesitem ::= shortbyteschar | bytesescapeseq\n longbytesitem ::= longbyteschar | bytesescapeseq\n shortbyteschar ::= <any ASCII character except "\\" or newline or the quote>\n longbyteschar ::= <any ASCII character except "\\">\n bytesescapeseq ::= "\\" <any ASCII character>\n\nOne syntactic restriction not indicated by these productions is that\nwhitespace is not allowed between the ``stringprefix`` or\n``bytesprefix`` and the rest of the literal. The source character set\nis defined by the encoding declaration; it is UTF-8 if no encoding\ndeclaration is given in the source file; see section *Encoding\ndeclarations*.\n\nIn plain English: Both types of literals can be enclosed in matching\nsingle quotes (``\'``) or double quotes (``"``). They can also be\nenclosed in matching groups of three single or double quotes (these\nare generally referred to as *triple-quoted strings*). The backslash\n(``\\``) character is used to escape characters that otherwise have a\nspecial meaning, such as newline, backslash itself, or the quote\ncharacter.\n\nBytes literals are always prefixed with ``\'b\'`` or ``\'B\'``; they\nproduce an instance of the ``bytes`` type instead of the ``str`` type.\nThey may only contain ASCII characters; bytes with a numeric value of\n128 or greater must be expressed with escapes.\n\nAs of Python 3.3 it is possible again to prefix unicode strings with a\n``u`` prefix to simplify maintenance of dual 2.x and 3.x codebases.\n\nBoth string and bytes literals may optionally be prefixed with a\nletter ``\'r\'`` or ``\'R\'``; such strings are called *raw strings* and\ntreat backslashes as literal characters. As a result, in string\nliterals, ``\'\\U\'`` and ``\'\\u\'`` escapes in raw strings are not treated\nspecially.\n\n New in version 3.3: The ``\'rb\'`` prefix of raw bytes literals has\n been added as a synonym of ``\'br\'``.\n\n New in version 3.3: Support for the unicode legacy literal\n (``u\'value\'``) and other versions were reintroduced to simplify the\n maintenance of dual Python 2.x and 3.x codebases. See **PEP 414**\n for more information.\n\nIn triple-quoted strings, unescaped newlines and quotes are allowed\n(and are retained), except that three unescaped quotes in a row\nterminate the string. (A "quote" is the character used to open the\nstring, i.e. either ``\'`` or ``"``.)\n\nUnless an ``\'r\'`` or ``\'R\'`` prefix is present, escape sequences in\nstrings are interpreted according to rules similar to those used by\nStandard C. The recognized escape sequences are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| ``\\newline`` | Backslash and newline ignored | |\n+-------------------+-----------------------------------+---------+\n| ``\\\\`` | Backslash (``\\``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\\'`` | Single quote (``\'``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\"`` | Double quote (``"``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\a`` | ASCII Bell (BEL) | |\n+-------------------+-----------------------------------+---------+\n| ``\\b`` | ASCII Backspace (BS) | |\n+-------------------+-----------------------------------+---------+\n| ``\\f`` | ASCII Formfeed (FF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\n`` | ASCII Linefeed (LF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\r`` | ASCII Carriage Return (CR) | |\n+-------------------+-----------------------------------+---------+\n| ``\\t`` | ASCII Horizontal Tab (TAB) | |\n+-------------------+-----------------------------------+---------+\n| ``\\v`` | ASCII Vertical Tab (VT) | |\n+-------------------+-----------------------------------+---------+\n| ``\\ooo`` | Character with octal value *ooo* | (1,3) |\n+-------------------+-----------------------------------+---------+\n| ``\\xhh`` | Character with hex value *hh* | (2,3) |\n+-------------------+-----------------------------------+---------+\n\nEscape sequences only recognized in string literals are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| ``\\N{name}`` | Character named *name* in the | (4) |\n| | Unicode database | |\n+-------------------+-----------------------------------+---------+\n| ``\\uxxxx`` | Character with 16-bit hex value | (5) |\n| | *xxxx* | |\n+-------------------+-----------------------------------+---------+\n| ``\\Uxxxxxxxx`` | Character with 32-bit hex value | (6) |\n| | *xxxxxxxx* | |\n+-------------------+-----------------------------------+---------+\n\nNotes:\n\n1. As in Standard C, up to three octal digits are accepted.\n\n2. Unlike in Standard C, exactly two hex digits are required.\n\n3. In a bytes literal, hexadecimal and octal escapes denote the byte\n with the given value. In a string literal, these escapes denote a\n Unicode character with the given value.\n\n4. Changed in version 3.3: Support for name aliases [1] has been\n added.\n\n5. Individual code units which form parts of a surrogate pair can be\n encoded using this escape sequence. Exactly four hex digits are\n required.\n\n6. Any Unicode character can be encoded this way, but characters\n outside the Basic Multilingual Plane (BMP) will be encoded using a\n surrogate pair if Python is compiled to use 16-bit code units (the\n default). Exactly eight hex digits are required.\n\nUnlike Standard C, all unrecognized escape sequences are left in the\nstring unchanged, i.e., *the backslash is left in the string*. (This\nbehavior is useful when debugging: if an escape sequence is mistyped,\nthe resulting output is more easily recognized as broken.) It is also\nimportant to note that the escape sequences only recognized in string\nliterals fall into the category of unrecognized escapes for bytes\nliterals.\n\nEven in a raw string, string quotes can be escaped with a backslash,\nbut the backslash remains in the string; for example, ``r"\\""`` is a\nvalid string literal consisting of two characters: a backslash and a\ndouble quote; ``r"\\"`` is not a valid string literal (even a raw\nstring cannot end in an odd number of backslashes). Specifically, *a\nraw string cannot end in a single backslash* (since the backslash\nwould escape the following quote character). Note also that a single\nbackslash followed by a newline is interpreted as those two characters\nas part of the string, *not* as a line continuation.\n',
'subscriptions': '\nSubscriptions\n*************\n\nA subscription selects an item of a sequence (string, tuple or list)\nor mapping (dictionary) object:\n\n subscription ::= primary "[" expression_list "]"\n\nThe primary must evaluate to an object that supports subscription,\ne.g. a list or dictionary. User-defined objects can support\nsubscription by defining a ``__getitem__()`` method.\n\nFor built-in objects, there are two types of objects that support\nsubscription:\n\nIf the primary is a mapping, the expression list must evaluate to an\nobject whose value is one of the keys of the mapping, and the\nsubscription selects the value in the mapping that corresponds to that\nkey. (The expression list is a tuple except if it has exactly one\nitem.)\n\nIf the primary is a sequence, the expression (list) must evaluate to\nan integer or a slice (as discussed in the following section).\n\nThe formal syntax makes no special provision for negative indices in\nsequences; however, built-in sequences all provide a ``__getitem__()``\nmethod that interprets negative indices by adding the length of the\nsequence to the index (so that ``x[-1]`` selects the last item of\n``x``). The resulting value must be a nonnegative integer less than\nthe number of items in the sequence, and the subscription selects the\nitem whose index is that value (counting from zero). Since the support\nfor negative indices and slicing occurs in the object\'s\n``__getitem__()`` method, subclasses overriding this method will need\nto explicitly add that support.\n\nA string\'s items are characters. A character is not a separate data\ntype but a string of exactly one character.\n',
'truth': "\nTruth Value Testing\n*******************\n\nAny object can be tested for truth value, for use in an ``if`` or\n``while`` condition or as operand of the Boolean operations below. The\nfollowing values are considered false:\n\n* ``None``\n\n* ``False``\n\n* zero of any numeric type, for example, ``0``, ``0.0``, ``0j``.\n\n* any empty sequence, for example, ``''``, ``()``, ``[]``.\n\n* any empty mapping, for example, ``{}``.\n\n* instances of user-defined classes, if the class defines a\n ``__bool__()`` or ``__len__()`` method, when that method returns the\n integer zero or ``bool`` value ``False``. [1]\n\nAll other values are considered true --- so objects of many types are\nalways true.\n\nOperations and built-in functions that have a Boolean result always\nreturn ``0`` or ``False`` for false and ``1`` or ``True`` for true,\nunless otherwise stated. (Important exception: the Boolean operations\n``or`` and ``and`` always return one of their operands.)\n",
'try': '\nThe ``try`` statement\n*********************\n\nThe ``try`` statement specifies exception handlers and/or cleanup code\nfor a group of statements:\n\n try_stmt ::= try1_stmt | try2_stmt\n try1_stmt ::= "try" ":" suite\n ("except" [expression ["as" target]] ":" suite)+\n ["else" ":" suite]\n ["finally" ":" suite]\n try2_stmt ::= "try" ":" suite\n "finally" ":" suite\n\nThe ``except`` clause(s) specify one or more exception handlers. When\nno exception occurs in the ``try`` clause, no exception handler is\nexecuted. When an exception occurs in the ``try`` suite, a search for\nan exception handler is started. This search inspects the except\nclauses in turn until one is found that matches the exception. An\nexpression-less except clause, if present, must be last; it matches\nany exception. For an except clause with an expression, that\nexpression is evaluated, and the clause matches the exception if the\nresulting object is "compatible" with the exception. An object is\ncompatible with an exception if it is the class or a base class of the\nexception object or a tuple containing an item compatible with the\nexception.\n\nIf no except clause matches the exception, the search for an exception\nhandler continues in the surrounding code and on the invocation stack.\n[1]\n\nIf the evaluation of an expression in the header of an except clause\nraises an exception, the original search for a handler is canceled and\na search starts for the new exception in the surrounding code and on\nthe call stack (it is treated as if the entire ``try`` statement\nraised the exception).\n\nWhen a matching except clause is found, the exception is assigned to\nthe target specified after the ``as`` keyword in that except clause,\nif present, and the except clause\'s suite is executed. All except\nclauses must have an executable block. When the end of this block is\nreached, execution continues normally after the entire try statement.\n(This means that if two nested handlers exist for the same exception,\nand the exception occurs in the try clause of the inner handler, the\nouter handler will not handle the exception.)\n\nWhen an exception has been assigned using ``as target``, it is cleared\nat the end of the except clause. This is as if\n\n except E as N:\n foo\n\nwas translated to\n\n except E as N:\n try:\n foo\n finally:\n del N\n\nThis means the exception must be assigned to a different name to be\nable to refer to it after the except clause. Exceptions are cleared\nbecause with the traceback attached to them, they form a reference\ncycle with the stack frame, keeping all locals in that frame alive\nuntil the next garbage collection occurs.\n\nBefore an except clause\'s suite is executed, details about the\nexception are stored in the ``sys`` module and can be access via\n``sys.exc_info()``. ``sys.exc_info()`` returns a 3-tuple consisting of\nthe exception class, the exception instance and a traceback object\n(see section *The standard type hierarchy*) identifying the point in\nthe program where the exception occurred. ``sys.exc_info()`` values\nare restored to their previous values (before the call) when returning\nfrom a function that handled an exception.\n\nThe optional ``else`` clause is executed if and when control flows off\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\nare not handled by the preceding ``except`` clauses.\n\nIf ``finally`` is present, it specifies a \'cleanup\' handler. The\n``try`` clause is executed, including any ``except`` and ``else``\nclauses. If an exception occurs in any of the clauses and is not\nhandled, the exception is temporarily saved. The ``finally`` clause is\nexecuted. If there is a saved exception, it is re-raised at the end\nof the ``finally`` clause. If the ``finally`` clause raises another\nexception or executes a ``return`` or ``break`` statement, the saved\nexception is set as the context of the new exception. The exception\ninformation is not available to the program during execution of the\n``finally`` clause.\n\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\nthe ``try`` suite of a ``try``...``finally`` statement, the\n``finally`` clause is also executed \'on the way out.\' A ``continue``\nstatement is illegal in the ``finally`` clause. (The reason is a\nproblem with the current implementation --- this restriction may be\nlifted in the future).\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information on using the ``raise`` statement to\ngenerate exceptions may be found in section *The raise statement*.\n',
- 'types': '\nThe standard type hierarchy\n***************************\n\nBelow is a list of the types that are built into Python. Extension\nmodules (written in C, Java, or other languages, depending on the\nimplementation) can define additional types. Future versions of\nPython may add types to the type hierarchy (e.g., rational numbers,\nefficiently stored arrays of integers, etc.), although such additions\nwill often be provided via the standard library instead.\n\nSome of the type descriptions below contain a paragraph listing\n\'special attributes.\' These are attributes that provide access to the\nimplementation and are not intended for general use. Their definition\nmay change in the future.\n\nNone\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name ``None``.\n It is used to signify the absence of a value in many situations,\n e.g., it is returned from functions that don\'t explicitly return\n anything. Its truth value is false.\n\nNotImplemented\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name\n ``NotImplemented``. Numeric methods and rich comparison methods may\n return this value if they do not implement the operation for the\n operands provided. (The interpreter will then try the reflected\n operation, or some other fallback, depending on the operator.) Its\n truth value is true.\n\nEllipsis\n This type has a single value. There is a single object with this\n value. This object is accessed through the literal ``...`` or the\n built-in name ``Ellipsis``. Its truth value is true.\n\n``numbers.Number``\n These are created by numeric literals and returned as results by\n arithmetic operators and arithmetic built-in functions. Numeric\n objects are immutable; once created their value never changes.\n Python numbers are of course strongly related to mathematical\n numbers, but subject to the limitations of numerical representation\n in computers.\n\n Python distinguishes between integers, floating point numbers, and\n complex numbers:\n\n ``numbers.Integral``\n These represent elements from the mathematical set of integers\n (positive and negative).\n\n There are two types of integers:\n\n Integers (``int``)\n\n These represent numbers in an unlimited range, subject to\n available (virtual) memory only. For the purpose of shift\n and mask operations, a binary representation is assumed, and\n negative numbers are represented in a variant of 2\'s\n complement which gives the illusion of an infinite string of\n sign bits extending to the left.\n\n Booleans (``bool``)\n These represent the truth values False and True. The two\n objects representing the values False and True are the only\n Boolean objects. The Boolean type is a subtype of the integer\n type, and Boolean values behave like the values 0 and 1,\n respectively, in almost all contexts, the exception being\n that when converted to a string, the strings ``"False"`` or\n ``"True"`` are returned, respectively.\n\n The rules for integer representation are intended to give the\n most meaningful interpretation of shift and mask operations\n involving negative integers.\n\n ``numbers.Real`` (``float``)\n These represent machine-level double precision floating point\n numbers. You are at the mercy of the underlying machine\n architecture (and C or Java implementation) for the accepted\n range and handling of overflow. Python does not support single-\n precision floating point numbers; the savings in processor and\n memory usage that are usually the reason for using these is\n dwarfed by the overhead of using objects in Python, so there is\n no reason to complicate the language with two kinds of floating\n point numbers.\n\n ``numbers.Complex`` (``complex``)\n These represent complex numbers as a pair of machine-level\n double precision floating point numbers. The same caveats apply\n as for floating point numbers. The real and imaginary parts of a\n complex number ``z`` can be retrieved through the read-only\n attributes ``z.real`` and ``z.imag``.\n\nSequences\n These represent finite ordered sets indexed by non-negative\n numbers. The built-in function ``len()`` returns the number of\n items of a sequence. When the length of a sequence is *n*, the\n index set contains the numbers 0, 1, ..., *n*-1. Item *i* of\n sequence *a* is selected by ``a[i]``.\n\n Sequences also support slicing: ``a[i:j]`` selects all items with\n index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an\n expression, a slice is a sequence of the same type. This implies\n that the index set is renumbered so that it starts at 0.\n\n Some sequences also support "extended slicing" with a third "step"\n parameter: ``a[i:j:k]`` selects all items of *a* with index *x*\n where ``x = i + n*k``, *n* ``>=`` ``0`` and *i* ``<=`` *x* ``<``\n *j*.\n\n Sequences are distinguished according to their mutability:\n\n Immutable sequences\n An object of an immutable sequence type cannot change once it is\n created. (If the object contains references to other objects,\n these other objects may be mutable and may be changed; however,\n the collection of objects directly referenced by an immutable\n object cannot change.)\n\n The following types are immutable sequences:\n\n Strings\n The items of a string object are Unicode code units. A\n Unicode code unit is represented by a string object of one\n item and can hold either a 16-bit or 32-bit value\n representing a Unicode ordinal (the maximum value for the\n ordinal is given in ``sys.maxunicode``, and depends on how\n Python is configured at compile time). Surrogate pairs may\n be present in the Unicode object, and will be reported as two\n separate items. The built-in functions ``chr()`` and\n ``ord()`` convert between code units and nonnegative integers\n representing the Unicode ordinals as defined in the Unicode\n Standard 3.0. Conversion from and to other encodings are\n possible through the string method ``encode()``.\n\n Tuples\n The items of a tuple are arbitrary Python objects. Tuples of\n two or more items are formed by comma-separated lists of\n expressions. A tuple of one item (a \'singleton\') can be\n formed by affixing a comma to an expression (an expression by\n itself does not create a tuple, since parentheses must be\n usable for grouping of expressions). An empty tuple can be\n formed by an empty pair of parentheses.\n\n Bytes\n A bytes object is an immutable array. The items are 8-bit\n bytes, represented by integers in the range 0 <= x < 256.\n Bytes literals (like ``b\'abc\'`` and the built-in function\n ``bytes()`` can be used to construct bytes objects. Also,\n bytes objects can be decoded to strings via the ``decode()``\n method.\n\n Mutable sequences\n Mutable sequences can be changed after they are created. The\n subscription and slicing notations can be used as the target of\n assignment and ``del`` (delete) statements.\n\n There are currently two intrinsic mutable sequence types:\n\n Lists\n The items of a list are arbitrary Python objects. Lists are\n formed by placing a comma-separated list of expressions in\n square brackets. (Note that there are no special cases needed\n to form lists of length 0 or 1.)\n\n Byte Arrays\n A bytearray object is a mutable array. They are created by\n the built-in ``bytearray()`` constructor. Aside from being\n mutable (and hence unhashable), byte arrays otherwise provide\n the same interface and functionality as immutable bytes\n objects.\n\n The extension module ``array`` provides an additional example of\n a mutable sequence type, as does the ``collections`` module.\n\nSet types\n These represent unordered, finite sets of unique, immutable\n objects. As such, they cannot be indexed by any subscript. However,\n they can be iterated over, and the built-in function ``len()``\n returns the number of items in a set. Common uses for sets are fast\n membership testing, removing duplicates from a sequence, and\n computing mathematical operations such as intersection, union,\n difference, and symmetric difference.\n\n For set elements, the same immutability rules apply as for\n dictionary keys. Note that numeric types obey the normal rules for\n numeric comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``), only one of them can be contained in a set.\n\n There are currently two intrinsic set types:\n\n Sets\n These represent a mutable set. They are created by the built-in\n ``set()`` constructor and can be modified afterwards by several\n methods, such as ``add()``.\n\n Frozen sets\n These represent an immutable set. They are created by the\n built-in ``frozenset()`` constructor. As a frozenset is\n immutable and *hashable*, it can be used again as an element of\n another set, or as a dictionary key.\n\nMappings\n These represent finite sets of objects indexed by arbitrary index\n sets. The subscript notation ``a[k]`` selects the item indexed by\n ``k`` from the mapping ``a``; this can be used in expressions and\n as the target of assignments or ``del`` statements. The built-in\n function ``len()`` returns the number of items in a mapping.\n\n There is currently a single intrinsic mapping type:\n\n Dictionaries\n These represent finite sets of objects indexed by nearly\n arbitrary values. The only types of values not acceptable as\n keys are values containing lists or dictionaries or other\n mutable types that are compared by value rather than by object\n identity, the reason being that the efficient implementation of\n dictionaries requires a key\'s hash value to remain constant.\n Numeric types used for keys obey the normal rules for numeric\n comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``) then they can be used interchangeably to index the same\n dictionary entry.\n\n Dictionaries are mutable; they can be created by the ``{...}``\n notation (see section *Dictionary displays*).\n\n The extension modules ``dbm.ndbm`` and ``dbm.gnu`` provide\n additional examples of mapping types, as does the\n ``collections`` module.\n\nCallable types\n These are the types to which the function call operation (see\n section *Calls*) can be applied:\n\n User-defined functions\n A user-defined function object is created by a function\n definition (see section *Function definitions*). It should be\n called with an argument list containing the same number of items\n as the function\'s formal parameter list.\n\n Special attributes:\n\n +---------------------------+---------------------------------+-------------+\n | Attribute | Meaning | |\n +===========================+=================================+=============+\n | ``__doc__`` | The function\'s documentation | Writable |\n | | string, or ``None`` if | |\n | | unavailable | |\n +---------------------------+---------------------------------+-------------+\n | ``__name__`` | The function\'s name | Writable |\n +---------------------------+---------------------------------+-------------+\n | ``__module__`` | The name of the module the | Writable |\n | | function was defined in, or | |\n | | ``None`` if unavailable. | |\n +---------------------------+---------------------------------+-------------+\n | ``__defaults__`` | A tuple containing default | Writable |\n | | argument values for those | |\n | | arguments that have defaults, | |\n | | or ``None`` if no arguments | |\n | | have a default value | |\n +---------------------------+---------------------------------+-------------+\n | ``__code__`` | The code object representing | Writable |\n | | the compiled function body. | |\n +---------------------------+---------------------------------+-------------+\n | ``__globals__`` | A reference to the dictionary | Read-only |\n | | that holds the function\'s | |\n | | global variables --- the global | |\n | | namespace of the module in | |\n | | which the function was defined. | |\n +---------------------------+---------------------------------+-------------+\n | ``__dict__`` | The namespace supporting | Writable |\n | | arbitrary function attributes. | |\n +---------------------------+---------------------------------+-------------+\n | ``__closure__`` | ``None`` or a tuple of cells | Read-only |\n | | that contain bindings for the | |\n | | function\'s free variables. | |\n +---------------------------+---------------------------------+-------------+\n | ``__annotations__`` | A dict containing annotations | Writable |\n | | of parameters. The keys of the | |\n | | dict are the parameter names, | |\n | | or ``\'return\'`` for the return | |\n | | annotation, if provided. | |\n +---------------------------+---------------------------------+-------------+\n | ``__kwdefaults__`` | A dict containing defaults for | Writable |\n | | keyword-only parameters. | |\n +---------------------------+---------------------------------+-------------+\n\n Most of the attributes labelled "Writable" check the type of the\n assigned value.\n\n Function objects also support getting and setting arbitrary\n attributes, which can be used, for example, to attach metadata\n to functions. Regular attribute dot-notation is used to get and\n set such attributes. *Note that the current implementation only\n supports function attributes on user-defined functions. Function\n attributes on built-in functions may be supported in the\n future.*\n\n Additional information about a function\'s definition can be\n retrieved from its code object; see the description of internal\n types below.\n\n Instance methods\n An instance method object combines a class, a class instance and\n any callable object (normally a user-defined function).\n\n Special read-only attributes: ``__self__`` is the class instance\n object, ``__func__`` is the function object; ``__doc__`` is the\n method\'s documentation (same as ``__func__.__doc__``);\n ``__name__`` is the method name (same as ``__func__.__name__``);\n ``__module__`` is the name of the module the method was defined\n in, or ``None`` if unavailable.\n\n Methods also support accessing (but not setting) the arbitrary\n function attributes on the underlying function object.\n\n User-defined method objects may be created when getting an\n attribute of a class (perhaps via an instance of that class), if\n that attribute is a user-defined function object or a class\n method object.\n\n When an instance method object is created by retrieving a user-\n defined function object from a class via one of its instances,\n its ``__self__`` attribute is the instance, and the method\n object is said to be bound. The new method\'s ``__func__``\n attribute is the original function object.\n\n When a user-defined method object is created by retrieving\n another method object from a class or instance, the behaviour is\n the same as for a function object, except that the ``__func__``\n attribute of the new instance is not the original method object\n but its ``__func__`` attribute.\n\n When an instance method object is created by retrieving a class\n method object from a class or instance, its ``__self__``\n attribute is the class itself, and its ``__func__`` attribute is\n the function object underlying the class method.\n\n When an instance method object is called, the underlying\n function (``__func__``) is called, inserting the class instance\n (``__self__``) in front of the argument list. For instance,\n when ``C`` is a class which contains a definition for a function\n ``f()``, and ``x`` is an instance of ``C``, calling ``x.f(1)``\n is equivalent to calling ``C.f(x, 1)``.\n\n When an instance method object is derived from a class method\n object, the "class instance" stored in ``__self__`` will\n actually be the class itself, so that calling either ``x.f(1)``\n or ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is\n the underlying function.\n\n Note that the transformation from function object to instance\n method object happens each time the attribute is retrieved from\n the instance. In some cases, a fruitful optimization is to\n assign the attribute to a local variable and call that local\n variable. Also notice that this transformation only happens for\n user-defined functions; other callable objects (and all non-\n callable objects) are retrieved without transformation. It is\n also important to note that user-defined functions which are\n attributes of a class instance are not converted to bound\n methods; this *only* happens when the function is an attribute\n of the class.\n\n Generator functions\n A function or method which uses the ``yield`` statement (see\n section *The yield statement*) is called a *generator function*.\n Such a function, when called, always returns an iterator object\n which can be used to execute the body of the function: calling\n the iterator\'s ``__next__()`` method will cause the function to\n execute until it provides a value using the ``yield`` statement.\n When the function executes a ``return`` statement or falls off\n the end, a ``StopIteration`` exception is raised and the\n iterator will have reached the end of the set of values to be\n returned.\n\n Built-in functions\n A built-in function object is a wrapper around a C function.\n Examples of built-in functions are ``len()`` and ``math.sin()``\n (``math`` is a standard built-in module). The number and type of\n the arguments are determined by the C function. Special read-\n only attributes: ``__doc__`` is the function\'s documentation\n string, or ``None`` if unavailable; ``__name__`` is the\n function\'s name; ``__self__`` is set to ``None`` (but see the\n next item); ``__module__`` is the name of the module the\n function was defined in or ``None`` if unavailable.\n\n Built-in methods\n This is really a different disguise of a built-in function, this\n time containing an object passed to the C function as an\n implicit extra argument. An example of a built-in method is\n ``alist.append()``, assuming *alist* is a list object. In this\n case, the special read-only attribute ``__self__`` is set to the\n object denoted by *alist*.\n\n Classes\n Classes are callable. These objects normally act as factories\n for new instances of themselves, but variations are possible for\n class types that override ``__new__()``. The arguments of the\n call are passed to ``__new__()`` and, in the typical case, to\n ``__init__()`` to initialize the new instance.\n\n Class Instances\n Instances of arbitrary classes can be made callable by defining\n a ``__call__()`` method in their class.\n\nModules\n Modules are imported by the ``import`` statement (see section *The\n import statement*). A module object has a namespace implemented by\n a dictionary object (this is the dictionary referenced by the\n __globals__ attribute of functions defined in the module).\n Attribute references are translated to lookups in this dictionary,\n e.g., ``m.x`` is equivalent to ``m.__dict__["x"]``. A module object\n does not contain the code object used to initialize the module\n (since it isn\'t needed once the initialization is done).\n\n Attribute assignment updates the module\'s namespace dictionary,\n e.g., ``m.x = 1`` is equivalent to ``m.__dict__["x"] = 1``.\n\n Special read-only attribute: ``__dict__`` is the module\'s namespace\n as a dictionary object.\n\n **CPython implementation detail:** Because of the way CPython\n clears module dictionaries, the module dictionary will be cleared\n when the module falls out of scope even if the dictionary still has\n live references. To avoid this, copy the dictionary or keep the\n module around while using its dictionary directly.\n\n Predefined (writable) attributes: ``__name__`` is the module\'s\n name; ``__doc__`` is the module\'s documentation string, or ``None``\n if unavailable; ``__file__`` is the pathname of the file from which\n the module was loaded, if it was loaded from a file. The\n ``__file__`` attribute is not present for C modules that are\n statically linked into the interpreter; for extension modules\n loaded dynamically from a shared library, it is the pathname of the\n shared library file.\n\nCustom classes\n Custom class types are typically created by class definitions (see\n section *Class definitions*). A class has a namespace implemented\n by a dictionary object. Class attribute references are translated\n to lookups in this dictionary, e.g., ``C.x`` is translated to\n ``C.__dict__["x"]`` (although there are a number of hooks which\n allow for other means of locating attributes). When the attribute\n name is not found there, the attribute search continues in the base\n classes. This search of the base classes uses the C3 method\n resolution order which behaves correctly even in the presence of\n \'diamond\' inheritance structures where there are multiple\n inheritance paths leading back to a common ancestor. Additional\n details on the C3 MRO used by Python can be found in the\n documentation accompanying the 2.3 release at\n http://www.python.org/download/releases/2.3/mro/.\n\n When a class attribute reference (for class ``C``, say) would yield\n a class method object, it is transformed into an instance method\n object whose ``__self__`` attributes is ``C``. When it would yield\n a static method object, it is transformed into the object wrapped\n by the static method object. See section *Implementing Descriptors*\n for another way in which attributes retrieved from a class may\n differ from those actually contained in its ``__dict__``.\n\n Class attribute assignments update the class\'s dictionary, never\n the dictionary of a base class.\n\n A class object can be called (see above) to yield a class instance\n (see below).\n\n Special attributes: ``__name__`` is the class name; ``__module__``\n is the module name in which the class was defined; ``__dict__`` is\n the dictionary containing the class\'s namespace; ``__bases__`` is a\n tuple (possibly empty or a singleton) containing the base classes,\n in the order of their occurrence in the base class list;\n ``__doc__`` is the class\'s documentation string, or None if\n undefined.\n\nClass instances\n A class instance is created by calling a class object (see above).\n A class instance has a namespace implemented as a dictionary which\n is the first place in which attribute references are searched.\n When an attribute is not found there, and the instance\'s class has\n an attribute by that name, the search continues with the class\n attributes. If a class attribute is found that is a user-defined\n function object, it is transformed into an instance method object\n whose ``__self__`` attribute is the instance. Static method and\n class method objects are also transformed; see above under\n "Classes". See section *Implementing Descriptors* for another way\n in which attributes of a class retrieved via its instances may\n differ from the objects actually stored in the class\'s\n ``__dict__``. If no class attribute is found, and the object\'s\n class has a ``__getattr__()`` method, that is called to satisfy the\n lookup.\n\n Attribute assignments and deletions update the instance\'s\n dictionary, never a class\'s dictionary. If the class has a\n ``__setattr__()`` or ``__delattr__()`` method, this is called\n instead of updating the instance dictionary directly.\n\n Class instances can pretend to be numbers, sequences, or mappings\n if they have methods with certain special names. See section\n *Special method names*.\n\n Special attributes: ``__dict__`` is the attribute dictionary;\n ``__class__`` is the instance\'s class.\n\nI/O objects (also known as file objects)\n A *file object* represents an open file. Various shortcuts are\n available to create file objects: the ``open()`` built-in function,\n and also ``os.popen()``, ``os.fdopen()``, and the ``makefile()``\n method of socket objects (and perhaps by other functions or methods\n provided by extension modules).\n\n The objects ``sys.stdin``, ``sys.stdout`` and ``sys.stderr`` are\n initialized to file objects corresponding to the interpreter\'s\n standard input, output and error streams; they are all open in text\n mode and therefore follow the interface defined by the\n ``io.TextIOBase`` abstract class.\n\nInternal types\n A few types used internally by the interpreter are exposed to the\n user. Their definitions may change with future versions of the\n interpreter, but they are mentioned here for completeness.\n\n Code objects\n Code objects represent *byte-compiled* executable Python code,\n or *bytecode*. The difference between a code object and a\n function object is that the function object contains an explicit\n reference to the function\'s globals (the module in which it was\n defined), while a code object contains no context; also the\n default argument values are stored in the function object, not\n in the code object (because they represent values calculated at\n run-time). Unlike function objects, code objects are immutable\n and contain no references (directly or indirectly) to mutable\n objects.\n\n Special read-only attributes: ``co_name`` gives the function\n name; ``co_argcount`` is the number of positional arguments\n (including arguments with default values); ``co_nlocals`` is the\n number of local variables used by the function (including\n arguments); ``co_varnames`` is a tuple containing the names of\n the local variables (starting with the argument names);\n ``co_cellvars`` is a tuple containing the names of local\n variables that are referenced by nested functions;\n ``co_freevars`` is a tuple containing the names of free\n variables; ``co_code`` is a string representing the sequence of\n bytecode instructions; ``co_consts`` is a tuple containing the\n literals used by the bytecode; ``co_names`` is a tuple\n containing the names used by the bytecode; ``co_filename`` is\n the filename from which the code was compiled;\n ``co_firstlineno`` is the first line number of the function;\n ``co_lnotab`` is a string encoding the mapping from bytecode\n offsets to line numbers (for details see the source code of the\n interpreter); ``co_stacksize`` is the required stack size\n (including local variables); ``co_flags`` is an integer encoding\n a number of flags for the interpreter.\n\n The following flag bits are defined for ``co_flags``: bit\n ``0x04`` is set if the function uses the ``*arguments`` syntax\n to accept an arbitrary number of positional arguments; bit\n ``0x08`` is set if the function uses the ``**keywords`` syntax\n to accept arbitrary keyword arguments; bit ``0x20`` is set if\n the function is a generator.\n\n Future feature declarations (``from __future__ import\n division``) also use bits in ``co_flags`` to indicate whether a\n code object was compiled with a particular feature enabled: bit\n ``0x2000`` is set if the function was compiled with future\n division enabled; bits ``0x10`` and ``0x1000`` were used in\n earlier versions of Python.\n\n Other bits in ``co_flags`` are reserved for internal use.\n\n If a code object represents a function, the first item in\n ``co_consts`` is the documentation string of the function, or\n ``None`` if undefined.\n\n Frame objects\n Frame objects represent execution frames. They may occur in\n traceback objects (see below).\n\n Special read-only attributes: ``f_back`` is to the previous\n stack frame (towards the caller), or ``None`` if this is the\n bottom stack frame; ``f_code`` is the code object being executed\n in this frame; ``f_locals`` is the dictionary used to look up\n local variables; ``f_globals`` is used for global variables;\n ``f_builtins`` is used for built-in (intrinsic) names;\n ``f_lasti`` gives the precise instruction (this is an index into\n the bytecode string of the code object).\n\n Special writable attributes: ``f_trace``, if not ``None``, is a\n function called at the start of each source code line (this is\n used by the debugger); ``f_lineno`` is the current line number\n of the frame --- writing to this from within a trace function\n jumps to the given line (only for the bottom-most frame). A\n debugger can implement a Jump command (aka Set Next Statement)\n by writing to f_lineno.\n\n Traceback objects\n Traceback objects represent a stack trace of an exception. A\n traceback object is created when an exception occurs. When the\n search for an exception handler unwinds the execution stack, at\n each unwound level a traceback object is inserted in front of\n the current traceback. When an exception handler is entered,\n the stack trace is made available to the program. (See section\n *The try statement*.) It is accessible as the third item of the\n tuple returned by ``sys.exc_info()``. When the program contains\n no suitable handler, the stack trace is written (nicely\n formatted) to the standard error stream; if the interpreter is\n interactive, it is also made available to the user as\n ``sys.last_traceback``.\n\n Special read-only attributes: ``tb_next`` is the next level in\n the stack trace (towards the frame where the exception\n occurred), or ``None`` if there is no next level; ``tb_frame``\n points to the execution frame of the current level;\n ``tb_lineno`` gives the line number where the exception\n occurred; ``tb_lasti`` indicates the precise instruction. The\n line number and last instruction in the traceback may differ\n from the line number of its frame object if the exception\n occurred in a ``try`` statement with no matching except clause\n or with a finally clause.\n\n Slice objects\n Slice objects are used to represent slices for ``__getitem__()``\n methods. They are also created by the built-in ``slice()``\n function.\n\n Special read-only attributes: ``start`` is the lower bound;\n ``stop`` is the upper bound; ``step`` is the step value; each is\n ``None`` if omitted. These attributes can have any type.\n\n Slice objects support one method:\n\n slice.indices(self, length)\n\n This method takes a single integer argument *length* and\n computes information about the slice that the slice object\n would describe if applied to a sequence of *length* items.\n It returns a tuple of three integers; respectively these are\n the *start* and *stop* indices and the *step* or stride\n length of the slice. Missing or out-of-bounds indices are\n handled in a manner consistent with regular slices.\n\n Static method objects\n Static method objects provide a way of defeating the\n transformation of function objects to method objects described\n above. A static method object is a wrapper around any other\n object, usually a user-defined method object. When a static\n method object is retrieved from a class or a class instance, the\n object actually returned is the wrapped object, which is not\n subject to any further transformation. Static method objects are\n not themselves callable, although the objects they wrap usually\n are. Static method objects are created by the built-in\n ``staticmethod()`` constructor.\n\n Class method objects\n A class method object, like a static method object, is a wrapper\n around another object that alters the way in which that object\n is retrieved from classes and class instances. The behaviour of\n class method objects upon such retrieval is described above,\n under "User-defined methods". Class method objects are created\n by the built-in ``classmethod()`` constructor.\n',
+ 'types': '\nThe standard type hierarchy\n***************************\n\nBelow is a list of the types that are built into Python. Extension\nmodules (written in C, Java, or other languages, depending on the\nimplementation) can define additional types. Future versions of\nPython may add types to the type hierarchy (e.g., rational numbers,\nefficiently stored arrays of integers, etc.), although such additions\nwill often be provided via the standard library instead.\n\nSome of the type descriptions below contain a paragraph listing\n\'special attributes.\' These are attributes that provide access to the\nimplementation and are not intended for general use. Their definition\nmay change in the future.\n\nNone\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name ``None``.\n It is used to signify the absence of a value in many situations,\n e.g., it is returned from functions that don\'t explicitly return\n anything. Its truth value is false.\n\nNotImplemented\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name\n ``NotImplemented``. Numeric methods and rich comparison methods may\n return this value if they do not implement the operation for the\n operands provided. (The interpreter will then try the reflected\n operation, or some other fallback, depending on the operator.) Its\n truth value is true.\n\nEllipsis\n This type has a single value. There is a single object with this\n value. This object is accessed through the literal ``...`` or the\n built-in name ``Ellipsis``. Its truth value is true.\n\n``numbers.Number``\n These are created by numeric literals and returned as results by\n arithmetic operators and arithmetic built-in functions. Numeric\n objects are immutable; once created their value never changes.\n Python numbers are of course strongly related to mathematical\n numbers, but subject to the limitations of numerical representation\n in computers.\n\n Python distinguishes between integers, floating point numbers, and\n complex numbers:\n\n ``numbers.Integral``\n These represent elements from the mathematical set of integers\n (positive and negative).\n\n There are two types of integers:\n\n Integers (``int``)\n\n These represent numbers in an unlimited range, subject to\n available (virtual) memory only. For the purpose of shift\n and mask operations, a binary representation is assumed, and\n negative numbers are represented in a variant of 2\'s\n complement which gives the illusion of an infinite string of\n sign bits extending to the left.\n\n Booleans (``bool``)\n These represent the truth values False and True. The two\n objects representing the values False and True are the only\n Boolean objects. The Boolean type is a subtype of the integer\n type, and Boolean values behave like the values 0 and 1,\n respectively, in almost all contexts, the exception being\n that when converted to a string, the strings ``"False"`` or\n ``"True"`` are returned, respectively.\n\n The rules for integer representation are intended to give the\n most meaningful interpretation of shift and mask operations\n involving negative integers.\n\n ``numbers.Real`` (``float``)\n These represent machine-level double precision floating point\n numbers. You are at the mercy of the underlying machine\n architecture (and C or Java implementation) for the accepted\n range and handling of overflow. Python does not support single-\n precision floating point numbers; the savings in processor and\n memory usage that are usually the reason for using these is\n dwarfed by the overhead of using objects in Python, so there is\n no reason to complicate the language with two kinds of floating\n point numbers.\n\n ``numbers.Complex`` (``complex``)\n These represent complex numbers as a pair of machine-level\n double precision floating point numbers. The same caveats apply\n as for floating point numbers. The real and imaginary parts of a\n complex number ``z`` can be retrieved through the read-only\n attributes ``z.real`` and ``z.imag``.\n\nSequences\n These represent finite ordered sets indexed by non-negative\n numbers. The built-in function ``len()`` returns the number of\n items of a sequence. When the length of a sequence is *n*, the\n index set contains the numbers 0, 1, ..., *n*-1. Item *i* of\n sequence *a* is selected by ``a[i]``.\n\n Sequences also support slicing: ``a[i:j]`` selects all items with\n index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an\n expression, a slice is a sequence of the same type. This implies\n that the index set is renumbered so that it starts at 0.\n\n Some sequences also support "extended slicing" with a third "step"\n parameter: ``a[i:j:k]`` selects all items of *a* with index *x*\n where ``x = i + n*k``, *n* ``>=`` ``0`` and *i* ``<=`` *x* ``<``\n *j*.\n\n Sequences are distinguished according to their mutability:\n\n Immutable sequences\n An object of an immutable sequence type cannot change once it is\n created. (If the object contains references to other objects,\n these other objects may be mutable and may be changed; however,\n the collection of objects directly referenced by an immutable\n object cannot change.)\n\n The following types are immutable sequences:\n\n Strings\n A string is a sequence of values that represent Unicode\n codepoints. All the codepoints in range ``U+0000 - U+10FFFF``\n can be represented in a string. Python doesn\'t have a\n ``chr`` type, and every character in the string is\n represented as a string object with length ``1``. The built-\n in function ``ord()`` converts a character to its codepoint\n (as an integer); ``chr()`` converts an integer in range ``0 -\n 10FFFF`` to the corresponding character. ``str.encode()`` can\n be used to convert a ``str`` to ``bytes`` using the given\n encoding, and ``bytes.decode()`` can be used to achieve the\n opposite.\n\n Tuples\n The items of a tuple are arbitrary Python objects. Tuples of\n two or more items are formed by comma-separated lists of\n expressions. A tuple of one item (a \'singleton\') can be\n formed by affixing a comma to an expression (an expression by\n itself does not create a tuple, since parentheses must be\n usable for grouping of expressions). An empty tuple can be\n formed by an empty pair of parentheses.\n\n Bytes\n A bytes object is an immutable array. The items are 8-bit\n bytes, represented by integers in the range 0 <= x < 256.\n Bytes literals (like ``b\'abc\'`` and the built-in function\n ``bytes()`` can be used to construct bytes objects. Also,\n bytes objects can be decoded to strings via the ``decode()``\n method.\n\n Mutable sequences\n Mutable sequences can be changed after they are created. The\n subscription and slicing notations can be used as the target of\n assignment and ``del`` (delete) statements.\n\n There are currently two intrinsic mutable sequence types:\n\n Lists\n The items of a list are arbitrary Python objects. Lists are\n formed by placing a comma-separated list of expressions in\n square brackets. (Note that there are no special cases needed\n to form lists of length 0 or 1.)\n\n Byte Arrays\n A bytearray object is a mutable array. They are created by\n the built-in ``bytearray()`` constructor. Aside from being\n mutable (and hence unhashable), byte arrays otherwise provide\n the same interface and functionality as immutable bytes\n objects.\n\n The extension module ``array`` provides an additional example of\n a mutable sequence type, as does the ``collections`` module.\n\nSet types\n These represent unordered, finite sets of unique, immutable\n objects. As such, they cannot be indexed by any subscript. However,\n they can be iterated over, and the built-in function ``len()``\n returns the number of items in a set. Common uses for sets are fast\n membership testing, removing duplicates from a sequence, and\n computing mathematical operations such as intersection, union,\n difference, and symmetric difference.\n\n For set elements, the same immutability rules apply as for\n dictionary keys. Note that numeric types obey the normal rules for\n numeric comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``), only one of them can be contained in a set.\n\n There are currently two intrinsic set types:\n\n Sets\n These represent a mutable set. They are created by the built-in\n ``set()`` constructor and can be modified afterwards by several\n methods, such as ``add()``.\n\n Frozen sets\n These represent an immutable set. They are created by the\n built-in ``frozenset()`` constructor. As a frozenset is\n immutable and *hashable*, it can be used again as an element of\n another set, or as a dictionary key.\n\nMappings\n These represent finite sets of objects indexed by arbitrary index\n sets. The subscript notation ``a[k]`` selects the item indexed by\n ``k`` from the mapping ``a``; this can be used in expressions and\n as the target of assignments or ``del`` statements. The built-in\n function ``len()`` returns the number of items in a mapping.\n\n There is currently a single intrinsic mapping type:\n\n Dictionaries\n These represent finite sets of objects indexed by nearly\n arbitrary values. The only types of values not acceptable as\n keys are values containing lists or dictionaries or other\n mutable types that are compared by value rather than by object\n identity, the reason being that the efficient implementation of\n dictionaries requires a key\'s hash value to remain constant.\n Numeric types used for keys obey the normal rules for numeric\n comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``) then they can be used interchangeably to index the same\n dictionary entry.\n\n Dictionaries are mutable; they can be created by the ``{...}``\n notation (see section *Dictionary displays*).\n\n The extension modules ``dbm.ndbm`` and ``dbm.gnu`` provide\n additional examples of mapping types, as does the\n ``collections`` module.\n\nCallable types\n These are the types to which the function call operation (see\n section *Calls*) can be applied:\n\n User-defined functions\n A user-defined function object is created by a function\n definition (see section *Function definitions*). It should be\n called with an argument list containing the same number of items\n as the function\'s formal parameter list.\n\n Special attributes:\n\n +---------------------------+---------------------------------+-------------+\n | Attribute | Meaning | |\n +===========================+=================================+=============+\n | ``__doc__`` | The function\'s documentation | Writable |\n | | string, or ``None`` if | |\n | | unavailable | |\n +---------------------------+---------------------------------+-------------+\n | ``__name__`` | The function\'s name | Writable |\n +---------------------------+---------------------------------+-------------+\n | ``__qualname__`` | The function\'s *qualified name* | Writable |\n | | New in version 3.3. | |\n +---------------------------+---------------------------------+-------------+\n | ``__module__`` | The name of the module the | Writable |\n | | function was defined in, or | |\n | | ``None`` if unavailable. | |\n +---------------------------+---------------------------------+-------------+\n | ``__defaults__`` | A tuple containing default | Writable |\n | | argument values for those | |\n | | arguments that have defaults, | |\n | | or ``None`` if no arguments | |\n | | have a default value | |\n +---------------------------+---------------------------------+-------------+\n | ``__code__`` | The code object representing | Writable |\n | | the compiled function body. | |\n +---------------------------+---------------------------------+-------------+\n | ``__globals__`` | A reference to the dictionary | Read-only |\n | | that holds the function\'s | |\n | | global variables --- the global | |\n | | namespace of the module in | |\n | | which the function was defined. | |\n +---------------------------+---------------------------------+-------------+\n | ``__dict__`` | The namespace supporting | Writable |\n | | arbitrary function attributes. | |\n +---------------------------+---------------------------------+-------------+\n | ``__closure__`` | ``None`` or a tuple of cells | Read-only |\n | | that contain bindings for the | |\n | | function\'s free variables. | |\n +---------------------------+---------------------------------+-------------+\n | ``__annotations__`` | A dict containing annotations | Writable |\n | | of parameters. The keys of the | |\n | | dict are the parameter names, | |\n | | or ``\'return\'`` for the return | |\n | | annotation, if provided. | |\n +---------------------------+---------------------------------+-------------+\n | ``__kwdefaults__`` | A dict containing defaults for | Writable |\n | | keyword-only parameters. | |\n +---------------------------+---------------------------------+-------------+\n\n Most of the attributes labelled "Writable" check the type of the\n assigned value.\n\n Function objects also support getting and setting arbitrary\n attributes, which can be used, for example, to attach metadata\n to functions. Regular attribute dot-notation is used to get and\n set such attributes. *Note that the current implementation only\n supports function attributes on user-defined functions. Function\n attributes on built-in functions may be supported in the\n future.*\n\n Additional information about a function\'s definition can be\n retrieved from its code object; see the description of internal\n types below.\n\n Instance methods\n An instance method object combines a class, a class instance and\n any callable object (normally a user-defined function).\n\n Special read-only attributes: ``__self__`` is the class instance\n object, ``__func__`` is the function object; ``__doc__`` is the\n method\'s documentation (same as ``__func__.__doc__``);\n ``__name__`` is the method name (same as ``__func__.__name__``);\n ``__module__`` is the name of the module the method was defined\n in, or ``None`` if unavailable.\n\n Methods also support accessing (but not setting) the arbitrary\n function attributes on the underlying function object.\n\n User-defined method objects may be created when getting an\n attribute of a class (perhaps via an instance of that class), if\n that attribute is a user-defined function object or a class\n method object.\n\n When an instance method object is created by retrieving a user-\n defined function object from a class via one of its instances,\n its ``__self__`` attribute is the instance, and the method\n object is said to be bound. The new method\'s ``__func__``\n attribute is the original function object.\n\n When a user-defined method object is created by retrieving\n another method object from a class or instance, the behaviour is\n the same as for a function object, except that the ``__func__``\n attribute of the new instance is not the original method object\n but its ``__func__`` attribute.\n\n When an instance method object is created by retrieving a class\n method object from a class or instance, its ``__self__``\n attribute is the class itself, and its ``__func__`` attribute is\n the function object underlying the class method.\n\n When an instance method object is called, the underlying\n function (``__func__``) is called, inserting the class instance\n (``__self__``) in front of the argument list. For instance,\n when ``C`` is a class which contains a definition for a function\n ``f()``, and ``x`` is an instance of ``C``, calling ``x.f(1)``\n is equivalent to calling ``C.f(x, 1)``.\n\n When an instance method object is derived from a class method\n object, the "class instance" stored in ``__self__`` will\n actually be the class itself, so that calling either ``x.f(1)``\n or ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is\n the underlying function.\n\n Note that the transformation from function object to instance\n method object happens each time the attribute is retrieved from\n the instance. In some cases, a fruitful optimization is to\n assign the attribute to a local variable and call that local\n variable. Also notice that this transformation only happens for\n user-defined functions; other callable objects (and all non-\n callable objects) are retrieved without transformation. It is\n also important to note that user-defined functions which are\n attributes of a class instance are not converted to bound\n methods; this *only* happens when the function is an attribute\n of the class.\n\n Generator functions\n A function or method which uses the ``yield`` statement (see\n section *The yield statement*) is called a *generator function*.\n Such a function, when called, always returns an iterator object\n which can be used to execute the body of the function: calling\n the iterator\'s ``__next__()`` method will cause the function to\n execute until it provides a value using the ``yield`` statement.\n When the function executes a ``return`` statement or falls off\n the end, a ``StopIteration`` exception is raised and the\n iterator will have reached the end of the set of values to be\n returned.\n\n Built-in functions\n A built-in function object is a wrapper around a C function.\n Examples of built-in functions are ``len()`` and ``math.sin()``\n (``math`` is a standard built-in module). The number and type of\n the arguments are determined by the C function. Special read-\n only attributes: ``__doc__`` is the function\'s documentation\n string, or ``None`` if unavailable; ``__name__`` is the\n function\'s name; ``__self__`` is set to ``None`` (but see the\n next item); ``__module__`` is the name of the module the\n function was defined in or ``None`` if unavailable.\n\n Built-in methods\n This is really a different disguise of a built-in function, this\n time containing an object passed to the C function as an\n implicit extra argument. An example of a built-in method is\n ``alist.append()``, assuming *alist* is a list object. In this\n case, the special read-only attribute ``__self__`` is set to the\n object denoted by *alist*.\n\n Classes\n Classes are callable. These objects normally act as factories\n for new instances of themselves, but variations are possible for\n class types that override ``__new__()``. The arguments of the\n call are passed to ``__new__()`` and, in the typical case, to\n ``__init__()`` to initialize the new instance.\n\n Class Instances\n Instances of arbitrary classes can be made callable by defining\n a ``__call__()`` method in their class.\n\nModules\n Modules are imported by the ``import`` statement (see section *The\n import statement*). A module object has a namespace implemented by\n a dictionary object (this is the dictionary referenced by the\n __globals__ attribute of functions defined in the module).\n Attribute references are translated to lookups in this dictionary,\n e.g., ``m.x`` is equivalent to ``m.__dict__["x"]``. A module object\n does not contain the code object used to initialize the module\n (since it isn\'t needed once the initialization is done).\n\n Attribute assignment updates the module\'s namespace dictionary,\n e.g., ``m.x = 1`` is equivalent to ``m.__dict__["x"] = 1``.\n\n Special read-only attribute: ``__dict__`` is the module\'s namespace\n as a dictionary object.\n\n **CPython implementation detail:** Because of the way CPython\n clears module dictionaries, the module dictionary will be cleared\n when the module falls out of scope even if the dictionary still has\n live references. To avoid this, copy the dictionary or keep the\n module around while using its dictionary directly.\n\n Predefined (writable) attributes: ``__name__`` is the module\'s\n name; ``__doc__`` is the module\'s documentation string, or ``None``\n if unavailable; ``__file__`` is the pathname of the file from which\n the module was loaded, if it was loaded from a file. The\n ``__file__`` attribute is not present for C modules that are\n statically linked into the interpreter; for extension modules\n loaded dynamically from a shared library, it is the pathname of the\n shared library file.\n\nCustom classes\n Custom class types are typically created by class definitions (see\n section *Class definitions*). A class has a namespace implemented\n by a dictionary object. Class attribute references are translated\n to lookups in this dictionary, e.g., ``C.x`` is translated to\n ``C.__dict__["x"]`` (although there are a number of hooks which\n allow for other means of locating attributes). When the attribute\n name is not found there, the attribute search continues in the base\n classes. This search of the base classes uses the C3 method\n resolution order which behaves correctly even in the presence of\n \'diamond\' inheritance structures where there are multiple\n inheritance paths leading back to a common ancestor. Additional\n details on the C3 MRO used by Python can be found in the\n documentation accompanying the 2.3 release at\n http://www.python.org/download/releases/2.3/mro/.\n\n When a class attribute reference (for class ``C``, say) would yield\n a class method object, it is transformed into an instance method\n object whose ``__self__`` attributes is ``C``. When it would yield\n a static method object, it is transformed into the object wrapped\n by the static method object. See section *Implementing Descriptors*\n for another way in which attributes retrieved from a class may\n differ from those actually contained in its ``__dict__``.\n\n Class attribute assignments update the class\'s dictionary, never\n the dictionary of a base class.\n\n A class object can be called (see above) to yield a class instance\n (see below).\n\n Special attributes: ``__name__`` is the class name; ``__module__``\n is the module name in which the class was defined; ``__dict__`` is\n the dictionary containing the class\'s namespace; ``__bases__`` is a\n tuple (possibly empty or a singleton) containing the base classes,\n in the order of their occurrence in the base class list;\n ``__doc__`` is the class\'s documentation string, or None if\n undefined.\n\nClass instances\n A class instance is created by calling a class object (see above).\n A class instance has a namespace implemented as a dictionary which\n is the first place in which attribute references are searched.\n When an attribute is not found there, and the instance\'s class has\n an attribute by that name, the search continues with the class\n attributes. If a class attribute is found that is a user-defined\n function object, it is transformed into an instance method object\n whose ``__self__`` attribute is the instance. Static method and\n class method objects are also transformed; see above under\n "Classes". See section *Implementing Descriptors* for another way\n in which attributes of a class retrieved via its instances may\n differ from the objects actually stored in the class\'s\n ``__dict__``. If no class attribute is found, and the object\'s\n class has a ``__getattr__()`` method, that is called to satisfy the\n lookup.\n\n Attribute assignments and deletions update the instance\'s\n dictionary, never a class\'s dictionary. If the class has a\n ``__setattr__()`` or ``__delattr__()`` method, this is called\n instead of updating the instance dictionary directly.\n\n Class instances can pretend to be numbers, sequences, or mappings\n if they have methods with certain special names. See section\n *Special method names*.\n\n Special attributes: ``__dict__`` is the attribute dictionary;\n ``__class__`` is the instance\'s class.\n\nI/O objects (also known as file objects)\n A *file object* represents an open file. Various shortcuts are\n available to create file objects: the ``open()`` built-in function,\n and also ``os.popen()``, ``os.fdopen()``, and the ``makefile()``\n method of socket objects (and perhaps by other functions or methods\n provided by extension modules).\n\n The objects ``sys.stdin``, ``sys.stdout`` and ``sys.stderr`` are\n initialized to file objects corresponding to the interpreter\'s\n standard input, output and error streams; they are all open in text\n mode and therefore follow the interface defined by the\n ``io.TextIOBase`` abstract class.\n\nInternal types\n A few types used internally by the interpreter are exposed to the\n user. Their definitions may change with future versions of the\n interpreter, but they are mentioned here for completeness.\n\n Code objects\n Code objects represent *byte-compiled* executable Python code,\n or *bytecode*. The difference between a code object and a\n function object is that the function object contains an explicit\n reference to the function\'s globals (the module in which it was\n defined), while a code object contains no context; also the\n default argument values are stored in the function object, not\n in the code object (because they represent values calculated at\n run-time). Unlike function objects, code objects are immutable\n and contain no references (directly or indirectly) to mutable\n objects.\n\n Special read-only attributes: ``co_name`` gives the function\n name; ``co_argcount`` is the number of positional arguments\n (including arguments with default values); ``co_nlocals`` is the\n number of local variables used by the function (including\n arguments); ``co_varnames`` is a tuple containing the names of\n the local variables (starting with the argument names);\n ``co_cellvars`` is a tuple containing the names of local\n variables that are referenced by nested functions;\n ``co_freevars`` is a tuple containing the names of free\n variables; ``co_code`` is a string representing the sequence of\n bytecode instructions; ``co_consts`` is a tuple containing the\n literals used by the bytecode; ``co_names`` is a tuple\n containing the names used by the bytecode; ``co_filename`` is\n the filename from which the code was compiled;\n ``co_firstlineno`` is the first line number of the function;\n ``co_lnotab`` is a string encoding the mapping from bytecode\n offsets to line numbers (for details see the source code of the\n interpreter); ``co_stacksize`` is the required stack size\n (including local variables); ``co_flags`` is an integer encoding\n a number of flags for the interpreter.\n\n The following flag bits are defined for ``co_flags``: bit\n ``0x04`` is set if the function uses the ``*arguments`` syntax\n to accept an arbitrary number of positional arguments; bit\n ``0x08`` is set if the function uses the ``**keywords`` syntax\n to accept arbitrary keyword arguments; bit ``0x20`` is set if\n the function is a generator.\n\n Future feature declarations (``from __future__ import\n division``) also use bits in ``co_flags`` to indicate whether a\n code object was compiled with a particular feature enabled: bit\n ``0x2000`` is set if the function was compiled with future\n division enabled; bits ``0x10`` and ``0x1000`` were used in\n earlier versions of Python.\n\n Other bits in ``co_flags`` are reserved for internal use.\n\n If a code object represents a function, the first item in\n ``co_consts`` is the documentation string of the function, or\n ``None`` if undefined.\n\n Frame objects\n Frame objects represent execution frames. They may occur in\n traceback objects (see below).\n\n Special read-only attributes: ``f_back`` is to the previous\n stack frame (towards the caller), or ``None`` if this is the\n bottom stack frame; ``f_code`` is the code object being executed\n in this frame; ``f_locals`` is the dictionary used to look up\n local variables; ``f_globals`` is used for global variables;\n ``f_builtins`` is used for built-in (intrinsic) names;\n ``f_lasti`` gives the precise instruction (this is an index into\n the bytecode string of the code object).\n\n Special writable attributes: ``f_trace``, if not ``None``, is a\n function called at the start of each source code line (this is\n used by the debugger); ``f_lineno`` is the current line number\n of the frame --- writing to this from within a trace function\n jumps to the given line (only for the bottom-most frame). A\n debugger can implement a Jump command (aka Set Next Statement)\n by writing to f_lineno.\n\n Traceback objects\n Traceback objects represent a stack trace of an exception. A\n traceback object is created when an exception occurs. When the\n search for an exception handler unwinds the execution stack, at\n each unwound level a traceback object is inserted in front of\n the current traceback. When an exception handler is entered,\n the stack trace is made available to the program. (See section\n *The try statement*.) It is accessible as the third item of the\n tuple returned by ``sys.exc_info()``. When the program contains\n no suitable handler, the stack trace is written (nicely\n formatted) to the standard error stream; if the interpreter is\n interactive, it is also made available to the user as\n ``sys.last_traceback``.\n\n Special read-only attributes: ``tb_next`` is the next level in\n the stack trace (towards the frame where the exception\n occurred), or ``None`` if there is no next level; ``tb_frame``\n points to the execution frame of the current level;\n ``tb_lineno`` gives the line number where the exception\n occurred; ``tb_lasti`` indicates the precise instruction. The\n line number and last instruction in the traceback may differ\n from the line number of its frame object if the exception\n occurred in a ``try`` statement with no matching except clause\n or with a finally clause.\n\n Slice objects\n Slice objects are used to represent slices for ``__getitem__()``\n methods. They are also created by the built-in ``slice()``\n function.\n\n Special read-only attributes: ``start`` is the lower bound;\n ``stop`` is the upper bound; ``step`` is the step value; each is\n ``None`` if omitted. These attributes can have any type.\n\n Slice objects support one method:\n\n slice.indices(self, length)\n\n This method takes a single integer argument *length* and\n computes information about the slice that the slice object\n would describe if applied to a sequence of *length* items.\n It returns a tuple of three integers; respectively these are\n the *start* and *stop* indices and the *step* or stride\n length of the slice. Missing or out-of-bounds indices are\n handled in a manner consistent with regular slices.\n\n Static method objects\n Static method objects provide a way of defeating the\n transformation of function objects to method objects described\n above. A static method object is a wrapper around any other\n object, usually a user-defined method object. When a static\n method object is retrieved from a class or a class instance, the\n object actually returned is the wrapped object, which is not\n subject to any further transformation. Static method objects are\n not themselves callable, although the objects they wrap usually\n are. Static method objects are created by the built-in\n ``staticmethod()`` constructor.\n\n Class method objects\n A class method object, like a static method object, is a wrapper\n around another object that alters the way in which that object\n is retrieved from classes and class instances. The behaviour of\n class method objects upon such retrieval is described above,\n under "User-defined methods". Class method objects are created\n by the built-in ``classmethod()`` constructor.\n',
'typesfunctions': '\nFunctions\n*********\n\nFunction objects are created by function definitions. The only\noperation on a function object is to call it: ``func(argument-list)``.\n\nThere are really two flavors of function objects: built-in functions\nand user-defined functions. Both support the same operation (to call\nthe function), but the implementation is different, hence the\ndifferent object types.\n\nSee *Function definitions* for more information.\n',
'typesmapping': '\nMapping Types --- ``dict``\n**************************\n\nA *mapping* object maps *hashable* values to arbitrary objects.\nMappings are mutable objects. There is currently only one standard\nmapping type, the *dictionary*. (For other containers see the built\nin ``list``, ``set``, and ``tuple`` classes, and the ``collections``\nmodule.)\n\nA dictionary\'s keys are *almost* arbitrary values. Values that are\nnot *hashable*, that is, values containing lists, dictionaries or\nother mutable types (that are compared by value rather than by object\nidentity) may not be used as keys. Numeric types used for keys obey\nthe normal rules for numeric comparison: if two numbers compare equal\n(such as ``1`` and ``1.0``) then they can be used interchangeably to\nindex the same dictionary entry. (Note however, that since computers\nstore floating-point numbers as approximations it is usually unwise to\nuse them as dictionary keys.)\n\nDictionaries can be created by placing a comma-separated list of\n``key: value`` pairs within braces, for example: ``{\'jack\': 4098,\n\'sjoerd\': 4127}`` or ``{4098: \'jack\', 4127: \'sjoerd\'}``, or by the\n``dict`` constructor.\n\nclass class dict([arg])\n\n Return a new dictionary initialized from an optional positional\n argument or from a set of keyword arguments. If no arguments are\n given, return a new empty dictionary. If the positional argument\n *arg* is a mapping object, return a dictionary mapping the same\n keys to the same values as does the mapping object. Otherwise the\n positional argument must be a sequence, a container that supports\n iteration, or an iterator object. The elements of the argument\n must each also be of one of those kinds, and each must in turn\n contain exactly two objects. The first is used as a key in the new\n dictionary, and the second as the key\'s value. If a given key is\n seen more than once, the last value associated with it is retained\n in the new dictionary.\n\n If keyword arguments are given, the keywords themselves with their\n associated values are added as items to the dictionary. If a key\n is specified both in the positional argument and as a keyword\n argument, the value associated with the keyword is retained in the\n dictionary. For example, these all return a dictionary equal to\n ``{"one": 1, "two": 2}``:\n\n * ``dict(one=1, two=2)``\n\n * ``dict({\'one\': 1, \'two\': 2})``\n\n * ``dict(zip((\'one\', \'two\'), (1, 2)))``\n\n * ``dict([[\'two\', 2], [\'one\', 1]])``\n\n The first example only works for keys that are valid Python\n identifiers; the others work with any valid keys.\n\n These are the operations that dictionaries support (and therefore,\n custom mapping types should support too):\n\n len(d)\n\n Return the number of items in the dictionary *d*.\n\n d[key]\n\n Return the item of *d* with key *key*. Raises a ``KeyError`` if\n *key* is not in the map.\n\n If a subclass of dict defines a method ``__missing__()``, if the\n key *key* is not present, the ``d[key]`` operation calls that\n method with the key *key* as argument. The ``d[key]`` operation\n then returns or raises whatever is returned or raised by the\n ``__missing__(key)`` call if the key is not present. No other\n operations or methods invoke ``__missing__()``. If\n ``__missing__()`` is not defined, ``KeyError`` is raised.\n ``__missing__()`` must be a method; it cannot be an instance\n variable:\n\n >>> class Counter(dict):\n ... def __missing__(self, key):\n ... return 0\n >>> c = Counter()\n >>> c[\'red\']\n 0\n >>> c[\'red\'] += 1\n >>> c[\'red\']\n 1\n\n See ``collections.Counter`` for a complete implementation\n including other methods helpful for accumulating and managing\n tallies.\n\n d[key] = value\n\n Set ``d[key]`` to *value*.\n\n del d[key]\n\n Remove ``d[key]`` from *d*. Raises a ``KeyError`` if *key* is\n not in the map.\n\n key in d\n\n Return ``True`` if *d* has a key *key*, else ``False``.\n\n key not in d\n\n Equivalent to ``not key in d``.\n\n iter(d)\n\n Return an iterator over the keys of the dictionary. This is a\n shortcut for ``iter(d.keys())``.\n\n clear()\n\n Remove all items from the dictionary.\n\n copy()\n\n Return a shallow copy of the dictionary.\n\n classmethod fromkeys(seq[, value])\n\n Create a new dictionary with keys from *seq* and values set to\n *value*.\n\n ``fromkeys()`` is a class method that returns a new dictionary.\n *value* defaults to ``None``.\n\n get(key[, default])\n\n Return the value for *key* if *key* is in the dictionary, else\n *default*. If *default* is not given, it defaults to ``None``,\n so that this method never raises a ``KeyError``.\n\n items()\n\n Return a new view of the dictionary\'s items (``(key, value)``\n pairs). See below for documentation of view objects.\n\n keys()\n\n Return a new view of the dictionary\'s keys. See below for\n documentation of view objects.\n\n pop(key[, default])\n\n If *key* is in the dictionary, remove it and return its value,\n else return *default*. If *default* is not given and *key* is\n not in the dictionary, a ``KeyError`` is raised.\n\n popitem()\n\n Remove and return an arbitrary ``(key, value)`` pair from the\n dictionary.\n\n ``popitem()`` is useful to destructively iterate over a\n dictionary, as often used in set algorithms. If the dictionary\n is empty, calling ``popitem()`` raises a ``KeyError``.\n\n setdefault(key[, default])\n\n If *key* is in the dictionary, return its value. If not, insert\n *key* with a value of *default* and return *default*. *default*\n defaults to ``None``.\n\n update([other])\n\n Update the dictionary with the key/value pairs from *other*,\n overwriting existing keys. Return ``None``.\n\n ``update()`` accepts either another dictionary object or an\n iterable of key/value pairs (as tuples or other iterables of\n length two). If keyword arguments are specified, the dictionary\n is then updated with those key/value pairs: ``d.update(red=1,\n blue=2)``.\n\n values()\n\n Return a new view of the dictionary\'s values. See below for\n documentation of view objects.\n\n\nDictionary view objects\n=======================\n\nThe objects returned by ``dict.keys()``, ``dict.values()`` and\n``dict.items()`` are *view objects*. They provide a dynamic view on\nthe dictionary\'s entries, which means that when the dictionary\nchanges, the view reflects these changes.\n\nDictionary views can be iterated over to yield their respective data,\nand support membership tests:\n\nlen(dictview)\n\n Return the number of entries in the dictionary.\n\niter(dictview)\n\n Return an iterator over the keys, values or items (represented as\n tuples of ``(key, value)``) in the dictionary.\n\n Keys and values are iterated over in an arbitrary order which is\n non-random, varies across Python implementations, and depends on\n the dictionary\'s history of insertions and deletions. If keys,\n values and items views are iterated over with no intervening\n modifications to the dictionary, the order of items will directly\n correspond. This allows the creation of ``(value, key)`` pairs\n using ``zip()``: ``pairs = zip(d.values(), d.keys())``. Another\n way to create the same list is ``pairs = [(v, k) for (k, v) in\n d.items()]``.\n\n Iterating views while adding or deleting entries in the dictionary\n may raise a ``RuntimeError`` or fail to iterate over all entries.\n\nx in dictview\n\n Return ``True`` if *x* is in the underlying dictionary\'s keys,\n values or items (in the latter case, *x* should be a ``(key,\n value)`` tuple).\n\nKeys views are set-like since their entries are unique and hashable.\nIf all values are hashable, so that ``(key, value)`` pairs are unique\nand hashable, then the items view is also set-like. (Values views are\nnot treated as set-like since the entries are generally not unique.)\nFor set-like views, all of the operations defined for the abstract\nbase class ``collections.Set`` are available (for example, ``==``,\n``<``, or ``^``).\n\nAn example of dictionary view usage:\n\n >>> dishes = {\'eggs\': 2, \'sausage\': 1, \'bacon\': 1, \'spam\': 500}\n >>> keys = dishes.keys()\n >>> values = dishes.values()\n\n >>> # iteration\n >>> n = 0\n >>> for val in values:\n ... n += val\n >>> print(n)\n 504\n\n >>> # keys and values are iterated over in the same order\n >>> list(keys)\n [\'eggs\', \'bacon\', \'sausage\', \'spam\']\n >>> list(values)\n [2, 1, 1, 500]\n\n >>> # view objects are dynamic and reflect dict changes\n >>> del dishes[\'eggs\']\n >>> del dishes[\'sausage\']\n >>> list(keys)\n [\'spam\', \'bacon\']\n\n >>> # set operations\n >>> keys & {\'eggs\', \'bacon\', \'salad\'}\n {\'bacon\'}\n >>> keys ^ {\'sausage\', \'juice\'}\n {\'juice\', \'sausage\', \'bacon\', \'spam\'}\n',
'typesmethods': "\nMethods\n*******\n\nMethods are functions that are called using the attribute notation.\nThere are two flavors: built-in methods (such as ``append()`` on\nlists) and class instance methods. Built-in methods are described\nwith the types that support them.\n\nIf you access a method (a function defined in a class namespace)\nthrough an instance, you get a special object: a *bound method* (also\ncalled *instance method*) object. When called, it will add the\n``self`` argument to the argument list. Bound methods have two\nspecial read-only attributes: ``m.__self__`` is the object on which\nthe method operates, and ``m.__func__`` is the function implementing\nthe method. Calling ``m(arg-1, arg-2, ..., arg-n)`` is completely\nequivalent to calling ``m.__func__(m.__self__, arg-1, arg-2, ...,\narg-n)``.\n\nLike function objects, bound method objects support getting arbitrary\nattributes. However, since method attributes are actually stored on\nthe underlying function object (``meth.__func__``), setting method\nattributes on bound methods is disallowed. Attempting to set a method\nattribute results in a ``TypeError`` being raised. In order to set a\nmethod attribute, you need to explicitly set it on the underlying\nfunction object:\n\n class C:\n def method(self):\n pass\n\n c = C()\n c.method.__func__.whoami = 'my name is c'\n\nSee *The standard type hierarchy* for more information.\n",
'typesmodules': "\nModules\n*******\n\nThe only special operation on a module is attribute access:\n``m.name``, where *m* is a module and *name* accesses a name defined\nin *m*'s symbol table. Module attributes can be assigned to. (Note\nthat the ``import`` statement is not, strictly speaking, an operation\non a module object; ``import foo`` does not require a module object\nnamed *foo* to exist, rather it requires an (external) *definition*\nfor a module named *foo* somewhere.)\n\nA special attribute of every module is ``__dict__``. This is the\ndictionary containing the module's symbol table. Modifying this\ndictionary will actually change the module's symbol table, but direct\nassignment to the ``__dict__`` attribute is not possible (you can\nwrite ``m.__dict__['a'] = 1``, which defines ``m.a`` to be ``1``, but\nyou can't write ``m.__dict__ = {}``). Modifying ``__dict__`` directly\nis not recommended.\n\nModules built into the interpreter are written like this: ``<module\n'sys' (built-in)>``. If loaded from a file, they are written as\n``<module 'os' from '/usr/local/lib/pythonX.Y/os.pyc'>``.\n",
- 'typesseq': '\nSequence Types --- ``str``, ``bytes``, ``bytearray``, ``list``, ``tuple``, ``range``\n************************************************************************************\n\nThere are six sequence types: strings, byte sequences (``bytes``\nobjects), byte arrays (``bytearray`` objects), lists, tuples, and\nrange objects. For other containers see the built in ``dict`` and\n``set`` classes, and the ``collections`` module.\n\nStrings contain Unicode characters. Their literals are written in\nsingle or double quotes: ``\'xyzzy\'``, ``"frobozz"``. See *String and\nBytes literals* for more about string literals. In addition to the\nfunctionality described here, there are also string-specific methods\ndescribed in the *String Methods* section.\n\nBytes and bytearray objects contain single bytes -- the former is\nimmutable while the latter is a mutable sequence. Bytes objects can\nbe constructed the constructor, ``bytes()``, and from literals; use a\n``b`` prefix with normal string syntax: ``b\'xyzzy\'``. To construct\nbyte arrays, use the ``bytearray()`` function.\n\nWhile string objects are sequences of characters (represented by\nstrings of length 1), bytes and bytearray objects are sequences of\n*integers* (between 0 and 255), representing the ASCII value of single\nbytes. That means that for a bytes or bytearray object *b*, ``b[0]``\nwill be an integer, while ``b[0:1]`` will be a bytes or bytearray\nobject of length 1. The representation of bytes objects uses the\nliteral format (``b\'...\'``) since it is generally more useful than\ne.g. ``bytes([50, 19, 100])``. You can always convert a bytes object\ninto a list of integers using ``list(b)``.\n\nAlso, while in previous Python versions, byte strings and Unicode\nstrings could be exchanged for each other rather freely (barring\nencoding issues), strings and bytes are now completely separate\nconcepts. There\'s no implicit en-/decoding if you pass an object of\nthe wrong type. A string always compares unequal to a bytes or\nbytearray object.\n\nLists are constructed with square brackets, separating items with\ncommas: ``[a, b, c]``. Tuples are constructed by the comma operator\n(not within square brackets), with or without enclosing parentheses,\nbut an empty tuple must have the enclosing parentheses, such as ``a,\nb, c`` or ``()``. A single item tuple must have a trailing comma,\nsuch as ``(d,)``.\n\nObjects of type range are created using the ``range()`` function.\nThey don\'t support concatenation or repetition, and using ``min()`` or\n``max()`` on them is inefficient.\n\nMost sequence types support the following operations. The ``in`` and\n``not in`` operations have the same priorities as the comparison\noperations. The ``+`` and ``*`` operations have the same priority as\nthe corresponding numeric operations. [3] Additional methods are\nprovided for *Mutable Sequence Types*.\n\nThis table lists the sequence operations sorted in ascending priority\n(operations in the same box have the same priority). In the table,\n*s* and *t* are sequences of the same type; *n*, *i*, *j* and *k* are\nintegers.\n\n+--------------------+----------------------------------+------------+\n| Operation | Result | Notes |\n+====================+==================================+============+\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\n| | equal to *x*, else ``False`` | |\n+--------------------+----------------------------------+------------+\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\n| | equal to *x*, else ``True`` | |\n+--------------------+----------------------------------+------------+\n| ``s + t`` | the concatenation of *s* and *t* | (6) |\n+--------------------+----------------------------------+------------+\n| ``s * n, n * s`` | *n* shallow copies of *s* | (2) |\n| | concatenated | |\n+--------------------+----------------------------------+------------+\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\n| | with step *k* | |\n+--------------------+----------------------------------+------------+\n| ``len(s)`` | length of *s* | |\n+--------------------+----------------------------------+------------+\n| ``min(s)`` | smallest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``max(s)`` | largest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.index(i)`` | index of the first occurence of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.count(i)`` | total number of occurences of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n\nSequence types also support comparisons. In particular, tuples and\nlists are compared lexicographically by comparing corresponding\nelements. This means that to compare equal, every element must\ncompare equal and the two sequences must be of the same type and have\nthe same length. (For full details see *Comparisons* in the language\nreference.)\n\nNotes:\n\n1. When *s* is a string object, the ``in`` and ``not in`` operations\n act like a substring test.\n\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\n empty sequence of the same type as *s*). Note also that the copies\n are shallow; nested structures are not copied. This often haunts\n new Python programmers; consider:\n\n >>> lists = [[]] * 3\n >>> lists\n [[], [], []]\n >>> lists[0].append(3)\n >>> lists\n [[3], [3], [3]]\n\n What has happened is that ``[[]]`` is a one-element list containing\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\n to) this single empty list. Modifying any of the elements of\n ``lists`` modifies this single list. You can create a list of\n different lists this way:\n\n >>> lists = [[] for i in range(3)]\n >>> lists[0].append(3)\n >>> lists[1].append(5)\n >>> lists[2].append(7)\n >>> lists\n [[3], [5], [7]]\n\n3. If *i* or *j* is negative, the index is relative to the end of the\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\n that ``-0`` is still ``0``.\n\n4. The slice of *s* from *i* to *j* is defined as the sequence of\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\n ``None``, use ``0``. If *j* is omitted or ``None``, use\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\n empty.\n\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\n never including *j*). If *i* or *j* is greater than ``len(s)``,\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\n "end" values (which end depends on the sign of *k*). Note, *k*\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\n\n6. Concatenating immutable strings always results in a new object.\n This means that building up a string by repeated concatenation will\n have a quadratic runtime cost in the total string length. To get a\n linear runtime cost, you must switch to one of the alternatives\n below:\n\n * if concatenating ``str`` objects, you can build a list and use\n ``str.join()`` at the end;\n\n * if concatenating ``bytes`` objects, you can similarly use\n ``bytes.join()``, or you can do in-place concatenation with a\n ``bytearray`` object. ``bytearray`` objects are mutable and have\n an efficient overallocation mechanism.\n\n\nString Methods\n==============\n\nString objects support the methods listed below.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, bytes, bytearray, list,\ntuple, range* section. To output formatted strings, see the *String\nFormatting* section. Also, see the ``re`` module for string functions\nbased on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is ``\'utf-8\'``. *errors* may be given to set a different\n error handling scheme. The default for *errors* is ``\'strict\'``,\n meaning that encoding errors raise a ``UnicodeError``. Other\n possible values are ``\'ignore\'``, ``\'replace\'``,\n ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by zero or more spaces, depending on the current column and the\n given tab size. The column number is reset to zero after each\n newline occurring in the string. If *tabsize* is not given, a tab\n size of ``8`` characters is assumed. This doesn\'t understand other\n non-printing characters or escape sequences.\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\n used directly and not copied to a ``dict`` . This is useful if for\n example ``mapping`` is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character\n ``c`` is alphanumeric if one of the following returns ``True``:\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\n ``c.isnumeric()``.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when ``repr()`` is\n invoked on a string. It has no bearing on the handling of strings\n written to ``sys.stdout`` or ``sys.stderr``.)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\n any non-string values in *iterable*, including ``bytes`` objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n ``str.translate()``.\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified, then there is no limit\n on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n lambda mo: mo.group(0)[0].upper() +\n mo.group(0)[1:].lower(),\n s)\n\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\n characters are left untouched. Characters mapped to ``None`` are\n deleted.\n\n You can use ``str.maketrans()`` to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see\n ``encodings.cp1251`` for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n\n\nOld String Formatting Operations\n================================\n\nNote: The formatting operations described here are obsolete and may go\n away in future versions of Python. Use the new *String Formatting*\n in new code.\n\nString objects have one unique built-in operation: the ``%`` operator\n(modulo). This is also known as the string *formatting* or\n*interpolation* operator. Given ``format % values`` (where *format* is\na string), ``%`` conversion specifications in *format* are replaced\nwith zero or more elements of *values*. The effect is similar to the\nusing ``sprintf()`` in the C language.\n\nIf *format* requires a single argument, *values* may be a single non-\ntuple object. [5] Otherwise, *values* must be a tuple with exactly\nthe number of items specified by the format string, or a single\nmapping object (for example, a dictionary).\n\nA conversion specifier contains two or more characters and has the\nfollowing components, which must occur in this order:\n\n1. The ``\'%\'`` character, which marks the start of the specifier.\n\n2. Mapping key (optional), consisting of a parenthesised sequence of\n characters (for example, ``(somename)``).\n\n3. Conversion flags (optional), which affect the result of some\n conversion types.\n\n4. Minimum field width (optional). If specified as an ``\'*\'``\n (asterisk), the actual width is read from the next element of the\n tuple in *values*, and the object to convert comes after the\n minimum field width and optional precision.\n\n5. Precision (optional), given as a ``\'.\'`` (dot) followed by the\n precision. If specified as ``\'*\'`` (an asterisk), the actual\n precision is read from the next element of the tuple in *values*,\n and the value to convert comes after the precision.\n\n6. Length modifier (optional).\n\n7. Conversion type.\n\nWhen the right argument is a dictionary (or other mapping type), then\nthe formats in the string *must* include a parenthesised mapping key\ninto that dictionary inserted immediately after the ``\'%\'`` character.\nThe mapping key selects the value to be formatted from the mapping.\nFor example:\n\n>>> print(\'%(language)s has %(number)03d quote types.\' %\n... {\'language\': "Python", "number": 2})\nPython has 002 quote types.\n\nIn this case no ``*`` specifiers may occur in a format (since they\nrequire a sequential parameter list).\n\nThe conversion flag characters are:\n\n+-----------+-----------------------------------------------------------------------+\n| Flag | Meaning |\n+===========+=======================================================================+\n| ``\'#\'`` | The value conversion will use the "alternate form" (where defined |\n| | below). |\n+-----------+-----------------------------------------------------------------------+\n| ``\'0\'`` | The conversion will be zero padded for numeric values. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'-\'`` | The converted value is left adjusted (overrides the ``\'0\'`` |\n| | conversion if both are given). |\n+-----------+-----------------------------------------------------------------------+\n| ``\' \'`` | (a space) A blank should be left before a positive number (or empty |\n| | string) produced by a signed conversion. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'+\'`` | A sign character (``\'+\'`` or ``\'-\'``) will precede the conversion |\n| | (overrides a "space" flag). |\n+-----------+-----------------------------------------------------------------------+\n\nA length modifier (``h``, ``l``, or ``L``) may be present, but is\nignored as it is not necessary for Python -- so e.g. ``%ld`` is\nidentical to ``%d``.\n\nThe conversion types are:\n\n+--------------+-------------------------------------------------------+---------+\n| Conversion | Meaning | Notes |\n+==============+=======================================================+=========+\n| ``\'d\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'i\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'o\'`` | Signed octal value. | (1) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'u\'`` | Obsolete type -- it is identical to ``\'d\'``. | (7) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'x\'`` | Signed hexadecimal (lowercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'X\'`` | Signed hexadecimal (uppercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'e\'`` | Floating point exponential format (lowercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'E\'`` | Floating point exponential format (uppercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'f\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'F\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'g\'`` | Floating point format. Uses lowercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'G\'`` | Floating point format. Uses uppercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'c\'`` | Single character (accepts integer or single character | |\n| | string). | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'r\'`` | String (converts any Python object using ``repr()``). | (5) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'s\'`` | String (converts any Python object using ``str()``). | (5) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'a\'`` | String (converts any Python object using | (5) |\n| | ``ascii()``). | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'%\'`` | No argument is converted, results in a ``\'%\'`` | |\n| | character in the result. | |\n+--------------+-------------------------------------------------------+---------+\n\nNotes:\n\n1. The alternate form causes a leading zero (``\'0\'``) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n2. The alternate form causes a leading ``\'0x\'`` or ``\'0X\'`` (depending\n on whether the ``\'x\'`` or ``\'X\'`` format was used) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n3. The alternate form causes the result to always contain a decimal\n point, even if no digits follow it.\n\n The precision determines the number of digits after the decimal\n point and defaults to 6.\n\n4. The alternate form causes the result to always contain a decimal\n point, and trailing zeroes are not removed as they would otherwise\n be.\n\n The precision determines the number of significant digits before\n and after the decimal point and defaults to 6.\n\n5. If precision is ``N``, the output is truncated to ``N`` characters.\n\n1. See **PEP 237**.\n\nSince Python strings have an explicit length, ``%s`` conversions do\nnot assume that ``\'\\0\'`` is the end of the string.\n\nChanged in version 3.1: ``%f`` conversions for numbers whose absolute\nvalue is over 1e50 are no longer replaced by ``%g`` conversions.\n\nAdditional string operations are defined in standard modules\n``string`` and ``re``.\n\n\nRange Type\n==========\n\nThe ``range`` type is an immutable sequence which is commonly used for\nlooping. The advantage of the ``range`` type is that an ``range``\nobject will always take the same amount of memory, no matter the size\nof the range it represents.\n\nRange objects have relatively little behavior: they support indexing,\ncontains, iteration, the ``len()`` function, and the following\nmethods:\n\nrange.count(x)\n\n Return the number of *i*\'s for which ``s[i] == x``.\n\n New in version 3.2.\n\nrange.index(x)\n\n Return the smallest *i* such that ``s[i] == x``. Raises\n ``ValueError`` when *x* is not in the range.\n\n New in version 3.2.\n\n\nMutable Sequence Types\n======================\n\nList and bytearray objects support additional operations that allow\nin-place modification of the object. Other mutable sequence types\n(when added to the language) should also support these operations.\nStrings and tuples are immutable sequence types: such objects cannot\nbe modified once created. The following operations are defined on\nmutable sequence types (where *x* is an arbitrary object).\n\nNote that while lists allow their items to be of any type, bytearray\nobject "items" are all integers in the range 0 <= x < 256.\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | same as ``s[len(s):len(s)] = | |\n| | [x]`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(x)`` | same as ``s[len(s):len(s)] = x`` | (2) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.count(x)`` | return number of *i*\'s for which | |\n| | ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.index(x[, i[, j]])`` | return smallest *k* such that | (3) |\n| | ``s[k] == x`` and ``i <= k < j`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | same as ``s[i:i] = [x]`` | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | same as ``x = s[i]; del s[i]; | (5) |\n| | return x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | same as ``del s[s.index(x)]`` | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (6) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.sort([key[, reverse]])`` | sort the items of *s* in place | (6), (7), (8) |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. *x* can be any iterable object.\n\n3. Raises ``ValueError`` when *x* is not found in *s*. When a negative\n index is passed as the second or third parameter to the ``index()``\n method, the sequence length is added, as for slice indices. If it\n is still negative, it is truncated to zero, as for slice indices.\n\n4. When a negative index is passed as the first parameter to the\n ``insert()`` method, the sequence length is added, as for slice\n indices. If it is still negative, it is truncated to zero, as for\n slice indices.\n\n5. The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n6. The ``sort()`` and ``reverse()`` methods modify the sequence in\n place for economy of space when sorting or reversing a large\n sequence. To remind you that they operate by side effect, they\n don\'t return the sorted or reversed sequence.\n\n7. The ``sort()`` method takes optional arguments for controlling the\n comparisons. Each must be specified as a keyword argument.\n\n *key* specifies a function of one argument that is used to extract\n a comparison key from each list element: ``key=str.lower``. The\n default value is ``None``. Use ``functools.cmp_to_key()`` to\n convert an old-style *cmp* function to a *key* function.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n The ``sort()`` method is guaranteed to be stable. A sort is stable\n if it guarantees not to change the relative order of elements that\n compare equal --- this is helpful for sorting in multiple passes\n (for example, sort by department, then by salary grade).\n\n **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python makes the list appear\n empty for the duration, and raises ``ValueError`` if it can detect\n that the list has been mutated during a sort.\n\n8. ``sort()`` is not supported by ``bytearray`` objects.\n\n\nBytes and Byte Array Methods\n============================\n\nBytes and bytearray objects, being "strings of bytes", have all\nmethods found on strings, with the exception of ``encode()``,\n``format()`` and ``isidentifier()``, which do not make sense with\nthese types. For converting the objects to strings, they have a\n``decode()`` method.\n\nWherever one of these methods needs to interpret the bytes as\ncharacters (e.g. the ``is...()`` methods), the ASCII character set is\nassumed.\n\nNote: The methods on bytes and bytearray objects don\'t accept strings as\n their arguments, just as the methods on strings don\'t accept bytes\n as their arguments. For example, you have to write\n\n a = "abc"\n b = a.replace("a", "f")\n\n and\n\n a = b"abc"\n b = a.replace(b"a", b"f")\n\nbytes.decode(encoding="utf-8", errors="strict")\nbytearray.decode(encoding="utf-8", errors="strict")\n\n Return a string decoded from the given bytes. Default encoding is\n ``\'utf-8\'``. *errors* may be given to set a different error\n handling scheme. The default for *errors* is ``\'strict\'``, meaning\n that encoding errors raise a ``UnicodeError``. Other possible\n values are ``\'ignore\'``, ``\'replace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Added support for keyword arguments.\n\nThe bytes and bytearray types have an additional class method:\n\nclassmethod bytes.fromhex(string)\nclassmethod bytearray.fromhex(string)\n\n This ``bytes`` class method returns a bytes or bytearray object,\n decoding the given string object. The string must contain two\n hexadecimal digits per byte, spaces are ignored.\n\n >>> bytes.fromhex(\'f0 f1f2 \')\n b\'\\xf0\\xf1\\xf2\'\n\nThe maketrans and translate methods differ in semantics from the\nversions available on strings:\n\nbytes.translate(table[, delete])\nbytearray.translate(table[, delete])\n\n Return a copy of the bytes or bytearray object where all bytes\n occurring in the optional argument *delete* are removed, and the\n remaining bytes have been mapped through the given translation\n table, which must be a bytes object of length 256.\n\n You can use the ``bytes.maketrans()`` method to create a\n translation table.\n\n Set the *table* argument to ``None`` for translations that only\n delete characters:\n\n >>> b\'read this short text\'.translate(None, b\'aeiou\')\n b\'rd ths shrt txt\'\n\nstatic bytes.maketrans(from, to)\nstatic bytearray.maketrans(from, to)\n\n This static method returns a translation table usable for\n ``bytes.translate()`` that will map each character in *from* into\n the character at the same position in *to*; *from* and *to* must be\n bytes objects and have the same length.\n\n New in version 3.1.\n',
- 'typesseq-mutable': '\nMutable Sequence Types\n**********************\n\nList and bytearray objects support additional operations that allow\nin-place modification of the object. Other mutable sequence types\n(when added to the language) should also support these operations.\nStrings and tuples are immutable sequence types: such objects cannot\nbe modified once created. The following operations are defined on\nmutable sequence types (where *x* is an arbitrary object).\n\nNote that while lists allow their items to be of any type, bytearray\nobject "items" are all integers in the range 0 <= x < 256.\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | same as ``s[len(s):len(s)] = | |\n| | [x]`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(x)`` | same as ``s[len(s):len(s)] = x`` | (2) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.count(x)`` | return number of *i*\'s for which | |\n| | ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.index(x[, i[, j]])`` | return smallest *k* such that | (3) |\n| | ``s[k] == x`` and ``i <= k < j`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | same as ``s[i:i] = [x]`` | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | same as ``x = s[i]; del s[i]; | (5) |\n| | return x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | same as ``del s[s.index(x)]`` | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (6) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.sort([key[, reverse]])`` | sort the items of *s* in place | (6), (7), (8) |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. *x* can be any iterable object.\n\n3. Raises ``ValueError`` when *x* is not found in *s*. When a negative\n index is passed as the second or third parameter to the ``index()``\n method, the sequence length is added, as for slice indices. If it\n is still negative, it is truncated to zero, as for slice indices.\n\n4. When a negative index is passed as the first parameter to the\n ``insert()`` method, the sequence length is added, as for slice\n indices. If it is still negative, it is truncated to zero, as for\n slice indices.\n\n5. The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n6. The ``sort()`` and ``reverse()`` methods modify the sequence in\n place for economy of space when sorting or reversing a large\n sequence. To remind you that they operate by side effect, they\n don\'t return the sorted or reversed sequence.\n\n7. The ``sort()`` method takes optional arguments for controlling the\n comparisons. Each must be specified as a keyword argument.\n\n *key* specifies a function of one argument that is used to extract\n a comparison key from each list element: ``key=str.lower``. The\n default value is ``None``. Use ``functools.cmp_to_key()`` to\n convert an old-style *cmp* function to a *key* function.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n The ``sort()`` method is guaranteed to be stable. A sort is stable\n if it guarantees not to change the relative order of elements that\n compare equal --- this is helpful for sorting in multiple passes\n (for example, sort by department, then by salary grade).\n\n **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python makes the list appear\n empty for the duration, and raises ``ValueError`` if it can detect\n that the list has been mutated during a sort.\n\n8. ``sort()`` is not supported by ``bytearray`` objects.\n',
+ 'typesseq': '\nSequence Types --- ``str``, ``bytes``, ``bytearray``, ``list``, ``tuple``, ``range``\n************************************************************************************\n\nThere are six sequence types: strings, byte sequences (``bytes``\nobjects), byte arrays (``bytearray`` objects), lists, tuples, and\nrange objects. For other containers see the built in ``dict`` and\n``set`` classes, and the ``collections`` module.\n\nStrings contain Unicode characters. Their literals are written in\nsingle or double quotes: ``\'xyzzy\'``, ``"frobozz"``. See *String and\nBytes literals* for more about string literals. In addition to the\nfunctionality described here, there are also string-specific methods\ndescribed in the *String Methods* section.\n\nBytes and bytearray objects contain single bytes -- the former is\nimmutable while the latter is a mutable sequence. Bytes objects can\nbe constructed the constructor, ``bytes()``, and from literals; use a\n``b`` prefix with normal string syntax: ``b\'xyzzy\'``. To construct\nbyte arrays, use the ``bytearray()`` function.\n\nWhile string objects are sequences of characters (represented by\nstrings of length 1), bytes and bytearray objects are sequences of\n*integers* (between 0 and 255), representing the ASCII value of single\nbytes. That means that for a bytes or bytearray object *b*, ``b[0]``\nwill be an integer, while ``b[0:1]`` will be a bytes or bytearray\nobject of length 1. The representation of bytes objects uses the\nliteral format (``b\'...\'``) since it is generally more useful than\ne.g. ``bytes([50, 19, 100])``. You can always convert a bytes object\ninto a list of integers using ``list(b)``.\n\nAlso, while in previous Python versions, byte strings and Unicode\nstrings could be exchanged for each other rather freely (barring\nencoding issues), strings and bytes are now completely separate\nconcepts. There\'s no implicit en-/decoding if you pass an object of\nthe wrong type. A string always compares unequal to a bytes or\nbytearray object.\n\nLists are constructed with square brackets, separating items with\ncommas: ``[a, b, c]``. Tuples are constructed by the comma operator\n(not within square brackets), with or without enclosing parentheses,\nbut an empty tuple must have the enclosing parentheses, such as ``a,\nb, c`` or ``()``. A single item tuple must have a trailing comma,\nsuch as ``(d,)``.\n\nObjects of type range are created using the ``range()`` function.\nThey don\'t support concatenation or repetition, and using ``min()`` or\n``max()`` on them is inefficient.\n\nMost sequence types support the following operations. The ``in`` and\n``not in`` operations have the same priorities as the comparison\noperations. The ``+`` and ``*`` operations have the same priority as\nthe corresponding numeric operations. [3] Additional methods are\nprovided for *Mutable Sequence Types*.\n\nThis table lists the sequence operations sorted in ascending priority\n(operations in the same box have the same priority). In the table,\n*s* and *t* are sequences of the same type; *n*, *i*, *j* and *k* are\nintegers.\n\n+--------------------+----------------------------------+------------+\n| Operation | Result | Notes |\n+====================+==================================+============+\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\n| | equal to *x*, else ``False`` | |\n+--------------------+----------------------------------+------------+\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\n| | equal to *x*, else ``True`` | |\n+--------------------+----------------------------------+------------+\n| ``s + t`` | the concatenation of *s* and *t* | (6) |\n+--------------------+----------------------------------+------------+\n| ``s * n, n * s`` | *n* shallow copies of *s* | (2) |\n| | concatenated | |\n+--------------------+----------------------------------+------------+\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\n| | with step *k* | |\n+--------------------+----------------------------------+------------+\n| ``len(s)`` | length of *s* | |\n+--------------------+----------------------------------+------------+\n| ``min(s)`` | smallest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``max(s)`` | largest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.index(i)`` | index of the first occurence of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.count(i)`` | total number of occurences of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n\nSequence types also support comparisons. In particular, tuples and\nlists are compared lexicographically by comparing corresponding\nelements. This means that to compare equal, every element must\ncompare equal and the two sequences must be of the same type and have\nthe same length. (For full details see *Comparisons* in the language\nreference.)\n\nNotes:\n\n1. When *s* is a string object, the ``in`` and ``not in`` operations\n act like a substring test.\n\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\n empty sequence of the same type as *s*). Note also that the copies\n are shallow; nested structures are not copied. This often haunts\n new Python programmers; consider:\n\n >>> lists = [[]] * 3\n >>> lists\n [[], [], []]\n >>> lists[0].append(3)\n >>> lists\n [[3], [3], [3]]\n\n What has happened is that ``[[]]`` is a one-element list containing\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\n to) this single empty list. Modifying any of the elements of\n ``lists`` modifies this single list. You can create a list of\n different lists this way:\n\n >>> lists = [[] for i in range(3)]\n >>> lists[0].append(3)\n >>> lists[1].append(5)\n >>> lists[2].append(7)\n >>> lists\n [[3], [5], [7]]\n\n3. If *i* or *j* is negative, the index is relative to the end of the\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\n that ``-0`` is still ``0``.\n\n4. The slice of *s* from *i* to *j* is defined as the sequence of\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\n ``None``, use ``0``. If *j* is omitted or ``None``, use\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\n empty.\n\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\n never including *j*). If *i* or *j* is greater than ``len(s)``,\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\n "end" values (which end depends on the sign of *k*). Note, *k*\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\n\n6. Concatenating immutable strings always results in a new object.\n This means that building up a string by repeated concatenation will\n have a quadratic runtime cost in the total string length. To get a\n linear runtime cost, you must switch to one of the alternatives\n below:\n\n * if concatenating ``str`` objects, you can build a list and use\n ``str.join()`` at the end;\n\n * if concatenating ``bytes`` objects, you can similarly use\n ``bytes.join()``, or you can do in-place concatenation with a\n ``bytearray`` object. ``bytearray`` objects are mutable and have\n an efficient overallocation mechanism.\n\n\nString Methods\n==============\n\nString objects support the methods listed below.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, bytes, bytearray, list,\ntuple, range* section. To output formatted strings, see the *String\nFormatting* section. Also, see the ``re`` module for string functions\nbased on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.casefold()\n\n Return a casefolded copy of the string. Casefolded strings may be\n used for caseless matching.\n\n Casefolding is similar to lowercasing but more aggressive because\n it is intended to remove all case distinctions in a string. For\n example, the German lowercase letter ``\'\xc3\x9f\'`` is equivalent to\n ``"ss"``. Since it is already lowercase, ``lower()`` would do\n nothing to ``\'\xc3\x9f\'``; ``casefold()`` converts it to ``"ss"``.\n\n The casefolding algorithm is described in section 3.13 of the\n Unicode Standard.\n\n New in version 3.3.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is ``\'utf-8\'``. *errors* may be given to set a different\n error handling scheme. The default for *errors* is ``\'strict\'``,\n meaning that encoding errors raise a ``UnicodeError``. Other\n possible values are ``\'ignore\'``, ``\'replace\'``,\n ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by zero or more spaces, depending on the current column and the\n given tab size. The column number is reset to zero after each\n newline occurring in the string. If *tabsize* is not given, a tab\n size of ``8`` characters is assumed. This doesn\'t understand other\n non-printing characters or escape sequences.\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\n used directly and not copied to a ``dict`` . This is useful if for\n example ``mapping`` is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character\n ``c`` is alphanumeric if one of the following returns ``True``:\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\n ``c.isnumeric()``.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when ``repr()`` is\n invoked on a string. It has no bearing on the handling of strings\n written to ``sys.stdout`` or ``sys.stderr``.)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\n any non-string values in *iterable*, including ``bytes`` objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n The lowercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n ``str.translate()``.\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified, then there is no limit\n on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa. Note that it is not necessarily true that\n ``s.swapcase().swapcase() == s``.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n lambda mo: mo.group(0)[0].upper() +\n mo.group(0)[1:].lower(),\n s)\n\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\n characters are left untouched. Characters mapped to ``None`` are\n deleted.\n\n You can use ``str.maketrans()`` to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see\n ``encodings.cp1251`` for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n The uppercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n\n\nOld String Formatting Operations\n================================\n\nNote: The formatting operations described here are modelled on C\'s\n printf() syntax. They only support formatting of certain builtin\n types. The use of a binary operator means that care may be needed\n in order to format tuples and dictionaries correctly. As the new\n *String Formatting* syntax is more flexible and handles tuples and\n dictionaries naturally, it is recommended for new code. However,\n there are no current plans to deprecate printf-style formatting.\n\nString objects have one unique built-in operation: the ``%`` operator\n(modulo). This is also known as the string *formatting* or\n*interpolation* operator. Given ``format % values`` (where *format* is\na string), ``%`` conversion specifications in *format* are replaced\nwith zero or more elements of *values*. The effect is similar to the\nusing ``sprintf()`` in the C language.\n\nIf *format* requires a single argument, *values* may be a single non-\ntuple object. [5] Otherwise, *values* must be a tuple with exactly\nthe number of items specified by the format string, or a single\nmapping object (for example, a dictionary).\n\nA conversion specifier contains two or more characters and has the\nfollowing components, which must occur in this order:\n\n1. The ``\'%\'`` character, which marks the start of the specifier.\n\n2. Mapping key (optional), consisting of a parenthesised sequence of\n characters (for example, ``(somename)``).\n\n3. Conversion flags (optional), which affect the result of some\n conversion types.\n\n4. Minimum field width (optional). If specified as an ``\'*\'``\n (asterisk), the actual width is read from the next element of the\n tuple in *values*, and the object to convert comes after the\n minimum field width and optional precision.\n\n5. Precision (optional), given as a ``\'.\'`` (dot) followed by the\n precision. If specified as ``\'*\'`` (an asterisk), the actual\n precision is read from the next element of the tuple in *values*,\n and the value to convert comes after the precision.\n\n6. Length modifier (optional).\n\n7. Conversion type.\n\nWhen the right argument is a dictionary (or other mapping type), then\nthe formats in the string *must* include a parenthesised mapping key\ninto that dictionary inserted immediately after the ``\'%\'`` character.\nThe mapping key selects the value to be formatted from the mapping.\nFor example:\n\n>>> print(\'%(language)s has %(number)03d quote types.\' %\n... {\'language\': "Python", "number": 2})\nPython has 002 quote types.\n\nIn this case no ``*`` specifiers may occur in a format (since they\nrequire a sequential parameter list).\n\nThe conversion flag characters are:\n\n+-----------+-----------------------------------------------------------------------+\n| Flag | Meaning |\n+===========+=======================================================================+\n| ``\'#\'`` | The value conversion will use the "alternate form" (where defined |\n| | below). |\n+-----------+-----------------------------------------------------------------------+\n| ``\'0\'`` | The conversion will be zero padded for numeric values. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'-\'`` | The converted value is left adjusted (overrides the ``\'0\'`` |\n| | conversion if both are given). |\n+-----------+-----------------------------------------------------------------------+\n| ``\' \'`` | (a space) A blank should be left before a positive number (or empty |\n| | string) produced by a signed conversion. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'+\'`` | A sign character (``\'+\'`` or ``\'-\'``) will precede the conversion |\n| | (overrides a "space" flag). |\n+-----------+-----------------------------------------------------------------------+\n\nA length modifier (``h``, ``l``, or ``L``) may be present, but is\nignored as it is not necessary for Python -- so e.g. ``%ld`` is\nidentical to ``%d``.\n\nThe conversion types are:\n\n+--------------+-------------------------------------------------------+---------+\n| Conversion | Meaning | Notes |\n+==============+=======================================================+=========+\n| ``\'d\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'i\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'o\'`` | Signed octal value. | (1) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'u\'`` | Obsolete type -- it is identical to ``\'d\'``. | (7) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'x\'`` | Signed hexadecimal (lowercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'X\'`` | Signed hexadecimal (uppercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'e\'`` | Floating point exponential format (lowercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'E\'`` | Floating point exponential format (uppercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'f\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'F\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'g\'`` | Floating point format. Uses lowercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'G\'`` | Floating point format. Uses uppercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'c\'`` | Single character (accepts integer or single character | |\n| | string). | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'r\'`` | String (converts any Python object using ``repr()``). | (5) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'s\'`` | String (converts any Python object using ``str()``). | (5) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'a\'`` | String (converts any Python object using | (5) |\n| | ``ascii()``). | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'%\'`` | No argument is converted, results in a ``\'%\'`` | |\n| | character in the result. | |\n+--------------+-------------------------------------------------------+---------+\n\nNotes:\n\n1. The alternate form causes a leading zero (``\'0\'``) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n2. The alternate form causes a leading ``\'0x\'`` or ``\'0X\'`` (depending\n on whether the ``\'x\'`` or ``\'X\'`` format was used) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n3. The alternate form causes the result to always contain a decimal\n point, even if no digits follow it.\n\n The precision determines the number of digits after the decimal\n point and defaults to 6.\n\n4. The alternate form causes the result to always contain a decimal\n point, and trailing zeroes are not removed as they would otherwise\n be.\n\n The precision determines the number of significant digits before\n and after the decimal point and defaults to 6.\n\n5. If precision is ``N``, the output is truncated to ``N`` characters.\n\n1. See **PEP 237**.\n\nSince Python strings have an explicit length, ``%s`` conversions do\nnot assume that ``\'\\0\'`` is the end of the string.\n\nChanged in version 3.1: ``%f`` conversions for numbers whose absolute\nvalue is over 1e50 are no longer replaced by ``%g`` conversions.\n\nAdditional string operations are defined in standard modules\n``string`` and ``re``.\n\n\nRange Type\n==========\n\nThe ``range`` type is an immutable sequence which is commonly used for\nlooping. The advantage of the ``range`` type is that an ``range``\nobject will always take the same amount of memory, no matter the size\nof the range it represents.\n\nRange objects have relatively little behavior: they support indexing,\ncontains, iteration, the ``len()`` function, and the following\nmethods:\n\nrange.count(x)\n\n Return the number of *i*\'s for which ``s[i] == x``.\n\n New in version 3.2.\n\nrange.index(x)\n\n Return the smallest *i* such that ``s[i] == x``. Raises\n ``ValueError`` when *x* is not in the range.\n\n New in version 3.2.\n\n\nMutable Sequence Types\n======================\n\nList and bytearray objects support additional operations that allow\nin-place modification of the object. Other mutable sequence types\n(when added to the language) should also support these operations.\nStrings and tuples are immutable sequence types: such objects cannot\nbe modified once created. The following operations are defined on\nmutable sequence types (where *x* is an arbitrary object).\n\nNote that while lists allow their items to be of any type, bytearray\nobject "items" are all integers in the range 0 <= x < 256.\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | same as ``s[len(s):len(s)] = | |\n| | [x]`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(x)`` | same as ``s[len(s):len(s)] = x`` | (2) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.clear()`` | remove all items from ``s`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.copy()`` | return a shallow copy of ``s`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.count(x)`` | return number of *i*\'s for which | |\n| | ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.index(x[, i[, j]])`` | return smallest *k* such that | (3) |\n| | ``s[k] == x`` and ``i <= k < j`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | same as ``s[i:i] = [x]`` | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | same as ``x = s[i]; del s[i]; | (5) |\n| | return x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | same as ``del s[s.index(x)]`` | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (6) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.sort([key[, reverse]])`` | sort the items of *s* in place | (6), (7), (8) |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. *x* can be any iterable object.\n\n3. Raises ``ValueError`` when *x* is not found in *s*. When a negative\n index is passed as the second or third parameter to the ``index()``\n method, the sequence length is added, as for slice indices. If it\n is still negative, it is truncated to zero, as for slice indices.\n\n4. When a negative index is passed as the first parameter to the\n ``insert()`` method, the sequence length is added, as for slice\n indices. If it is still negative, it is truncated to zero, as for\n slice indices.\n\n5. The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n6. The ``sort()`` and ``reverse()`` methods modify the sequence in\n place for economy of space when sorting or reversing a large\n sequence. To remind you that they operate by side effect, they\n don\'t return the sorted or reversed sequence.\n\n7. The ``sort()`` method takes optional arguments for controlling the\n comparisons. Each must be specified as a keyword argument.\n\n *key* specifies a function of one argument that is used to extract\n a comparison key from each list element: ``key=str.lower``. The\n default value is ``None``. Use ``functools.cmp_to_key()`` to\n convert an old-style *cmp* function to a *key* function.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n The ``sort()`` method is guaranteed to be stable. A sort is stable\n if it guarantees not to change the relative order of elements that\n compare equal --- this is helpful for sorting in multiple passes\n (for example, sort by department, then by salary grade).\n\n **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python makes the list appear\n empty for the duration, and raises ``ValueError`` if it can detect\n that the list has been mutated during a sort.\n\n8. ``sort()`` is not supported by ``bytearray`` objects.\n\n New in version 3.3: ``clear()`` and ``copy()`` methods.\n\n\nBytes and Byte Array Methods\n============================\n\nBytes and bytearray objects, being "strings of bytes", have all\nmethods found on strings, with the exception of ``encode()``,\n``format()`` and ``isidentifier()``, which do not make sense with\nthese types. For converting the objects to strings, they have a\n``decode()`` method.\n\nWherever one of these methods needs to interpret the bytes as\ncharacters (e.g. the ``is...()`` methods), the ASCII character set is\nassumed.\n\nNew in version 3.3: The functions ``count()``, ``find()``,\n``index()``, ``rfind()`` and ``rindex()`` have additional semantics\ncompared to the corresponding string functions: They also accept an\ninteger in range 0 to 255 (a byte) as their first argument.\n\nNote: The methods on bytes and bytearray objects don\'t accept strings as\n their arguments, just as the methods on strings don\'t accept bytes\n as their arguments. For example, you have to write\n\n a = "abc"\n b = a.replace("a", "f")\n\n and\n\n a = b"abc"\n b = a.replace(b"a", b"f")\n\nbytes.decode(encoding="utf-8", errors="strict")\nbytearray.decode(encoding="utf-8", errors="strict")\n\n Return a string decoded from the given bytes. Default encoding is\n ``\'utf-8\'``. *errors* may be given to set a different error\n handling scheme. The default for *errors* is ``\'strict\'``, meaning\n that encoding errors raise a ``UnicodeError``. Other possible\n values are ``\'ignore\'``, ``\'replace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Added support for keyword arguments.\n\nThe bytes and bytearray types have an additional class method:\n\nclassmethod bytes.fromhex(string)\nclassmethod bytearray.fromhex(string)\n\n This ``bytes`` class method returns a bytes or bytearray object,\n decoding the given string object. The string must contain two\n hexadecimal digits per byte, spaces are ignored.\n\n >>> bytes.fromhex(\'f0 f1f2 \')\n b\'\\xf0\\xf1\\xf2\'\n\nThe maketrans and translate methods differ in semantics from the\nversions available on strings:\n\nbytes.translate(table[, delete])\nbytearray.translate(table[, delete])\n\n Return a copy of the bytes or bytearray object where all bytes\n occurring in the optional argument *delete* are removed, and the\n remaining bytes have been mapped through the given translation\n table, which must be a bytes object of length 256.\n\n You can use the ``bytes.maketrans()`` method to create a\n translation table.\n\n Set the *table* argument to ``None`` for translations that only\n delete characters:\n\n >>> b\'read this short text\'.translate(None, b\'aeiou\')\n b\'rd ths shrt txt\'\n\nstatic bytes.maketrans(from, to)\nstatic bytearray.maketrans(from, to)\n\n This static method returns a translation table usable for\n ``bytes.translate()`` that will map each character in *from* into\n the character at the same position in *to*; *from* and *to* must be\n bytes objects and have the same length.\n\n New in version 3.1.\n',
+ 'typesseq-mutable': '\nMutable Sequence Types\n**********************\n\nList and bytearray objects support additional operations that allow\nin-place modification of the object. Other mutable sequence types\n(when added to the language) should also support these operations.\nStrings and tuples are immutable sequence types: such objects cannot\nbe modified once created. The following operations are defined on\nmutable sequence types (where *x* is an arbitrary object).\n\nNote that while lists allow their items to be of any type, bytearray\nobject "items" are all integers in the range 0 <= x < 256.\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | same as ``s[len(s):len(s)] = | |\n| | [x]`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(x)`` | same as ``s[len(s):len(s)] = x`` | (2) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.clear()`` | remove all items from ``s`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.copy()`` | return a shallow copy of ``s`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.count(x)`` | return number of *i*\'s for which | |\n| | ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.index(x[, i[, j]])`` | return smallest *k* such that | (3) |\n| | ``s[k] == x`` and ``i <= k < j`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | same as ``s[i:i] = [x]`` | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | same as ``x = s[i]; del s[i]; | (5) |\n| | return x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | same as ``del s[s.index(x)]`` | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (6) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.sort([key[, reverse]])`` | sort the items of *s* in place | (6), (7), (8) |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. *x* can be any iterable object.\n\n3. Raises ``ValueError`` when *x* is not found in *s*. When a negative\n index is passed as the second or third parameter to the ``index()``\n method, the sequence length is added, as for slice indices. If it\n is still negative, it is truncated to zero, as for slice indices.\n\n4. When a negative index is passed as the first parameter to the\n ``insert()`` method, the sequence length is added, as for slice\n indices. If it is still negative, it is truncated to zero, as for\n slice indices.\n\n5. The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n6. The ``sort()`` and ``reverse()`` methods modify the sequence in\n place for economy of space when sorting or reversing a large\n sequence. To remind you that they operate by side effect, they\n don\'t return the sorted or reversed sequence.\n\n7. The ``sort()`` method takes optional arguments for controlling the\n comparisons. Each must be specified as a keyword argument.\n\n *key* specifies a function of one argument that is used to extract\n a comparison key from each list element: ``key=str.lower``. The\n default value is ``None``. Use ``functools.cmp_to_key()`` to\n convert an old-style *cmp* function to a *key* function.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n The ``sort()`` method is guaranteed to be stable. A sort is stable\n if it guarantees not to change the relative order of elements that\n compare equal --- this is helpful for sorting in multiple passes\n (for example, sort by department, then by salary grade).\n\n **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python makes the list appear\n empty for the duration, and raises ``ValueError`` if it can detect\n that the list has been mutated during a sort.\n\n8. ``sort()`` is not supported by ``bytearray`` objects.\n\n New in version 3.3: ``clear()`` and ``copy()`` methods.\n',
'unary': '\nUnary arithmetic and bitwise operations\n***************************************\n\nAll unary arithmetic and bitwise operations have the same priority:\n\n u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n\nThe unary ``-`` (minus) operator yields the negation of its numeric\nargument.\n\nThe unary ``+`` (plus) operator yields its numeric argument unchanged.\n\nThe unary ``~`` (invert) operator yields the bitwise inversion of its\ninteger argument. The bitwise inversion of ``x`` is defined as\n``-(x+1)``. It only applies to integral numbers.\n\nIn all three cases, if the argument does not have the proper type, a\n``TypeError`` exception is raised.\n',
'while': '\nThe ``while`` statement\n***********************\n\nThe ``while`` statement is used for repeated execution as long as an\nexpression is true:\n\n while_stmt ::= "while" expression ":" suite\n ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the ``else`` clause, if present, is\nexecuted and the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ngoes back to testing the expression.\n',
'with': '\nThe ``with`` statement\n**********************\n\nThe ``with`` statement is used to wrap the execution of a block with\nmethods defined by a context manager (see section *With Statement\nContext Managers*). This allows common\n``try``...``except``...``finally`` usage patterns to be encapsulated\nfor convenient reuse.\n\n with_stmt ::= "with" with_item ("," with_item)* ":" suite\n with_item ::= expression ["as" target]\n\nThe execution of the ``with`` statement with one "item" proceeds as\nfollows:\n\n1. The context expression (the expression given in the ``with_item``)\n is evaluated to obtain a context manager.\n\n2. The context manager\'s ``__exit__()`` is loaded for later use.\n\n3. The context manager\'s ``__enter__()`` method is invoked.\n\n4. If a target was included in the ``with`` statement, the return\n value from ``__enter__()`` is assigned to it.\n\n Note: The ``with`` statement guarantees that if the ``__enter__()``\n method returns without an error, then ``__exit__()`` will always\n be called. Thus, if an error occurs during the assignment to the\n target list, it will be treated the same as an error occurring\n within the suite would be. See step 6 below.\n\n5. The suite is executed.\n\n6. The context manager\'s ``__exit__()`` method is invoked. If an\n exception caused the suite to be exited, its type, value, and\n traceback are passed as arguments to ``__exit__()``. Otherwise,\n three ``None`` arguments are supplied.\n\n If the suite was exited due to an exception, and the return value\n from the ``__exit__()`` method was false, the exception is\n reraised. If the return value was true, the exception is\n suppressed, and execution continues with the statement following\n the ``with`` statement.\n\n If the suite was exited for any reason other than an exception, the\n return value from ``__exit__()`` is ignored, and execution proceeds\n at the normal location for the kind of exit that was taken.\n\nWith more than one item, the context managers are processed as if\nmultiple ``with`` statements were nested:\n\n with A() as a, B() as b:\n suite\n\nis equivalent to\n\n with A() as a:\n with B() as b:\n suite\n\nChanged in version 3.1: Support for multiple context expressions.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n',
- 'yield': '\nThe ``yield`` statement\n***********************\n\n yield_stmt ::= yield_expression\n\nThe ``yield`` statement is only used when defining a generator\nfunction, and is only used in the body of the generator function.\nUsing a ``yield`` statement in a function definition is sufficient to\ncause that definition to create a generator function instead of a\nnormal function. When a generator function is called, it returns an\niterator known as a generator iterator, or more commonly, a generator.\nThe body of the generator function is executed by calling the\n``next()`` function on the generator repeatedly until it raises an\nexception.\n\nWhen a ``yield`` statement is executed, the state of the generator is\nfrozen and the value of ``expression_list`` is returned to\n``next()``\'s caller. By "frozen" we mean that all local state is\nretained, including the current bindings of local variables, the\ninstruction pointer, and the internal evaluation stack: enough\ninformation is saved so that the next time ``next()`` is invoked, the\nfunction can proceed exactly as if the ``yield`` statement were just\nanother external call.\n\nThe ``yield`` statement is allowed in the ``try`` clause of a ``try``\n... ``finally`` construct. If the generator is not resumed before it\nis finalized (by reaching a zero reference count or by being garbage\ncollected), the generator-iterator\'s ``close()`` method will be\ncalled, allowing any pending ``finally`` clauses to execute.\n\nSee also:\n\n **PEP 0255** - Simple Generators\n The proposal for adding generators and the ``yield`` statement\n to Python.\n\n **PEP 0342** - Coroutines via Enhanced Generators\n The proposal that, among other generator enhancements, proposed\n allowing ``yield`` to appear inside a ``try`` ... ``finally``\n block.\n'}
+ 'yield': '\nThe ``yield`` statement\n***********************\n\n yield_stmt ::= yield_expression\n\nThe ``yield`` statement is only used when defining a generator\nfunction, and is only used in the body of the generator function.\nUsing a ``yield`` statement in a function definition is sufficient to\ncause that definition to create a generator function instead of a\nnormal function.\n\nWhen a generator function is called, it returns an iterator known as a\ngenerator iterator, or more commonly, a generator. The body of the\ngenerator function is executed by calling the ``next()`` function on\nthe generator repeatedly until it raises an exception.\n\nWhen a ``yield`` statement is executed, the state of the generator is\nfrozen and the value of ``expression_list`` is returned to\n``next()``\'s caller. By "frozen" we mean that all local state is\nretained, including the current bindings of local variables, the\ninstruction pointer, and the internal evaluation stack: enough\ninformation is saved so that the next time ``next()`` is invoked, the\nfunction can proceed exactly as if the ``yield`` statement were just\nanother external call.\n\nThe ``yield`` statement is allowed in the ``try`` clause of a ``try``\n... ``finally`` construct. If the generator is not resumed before it\nis finalized (by reaching a zero reference count or by being garbage\ncollected), the generator-iterator\'s ``close()`` method will be\ncalled, allowing any pending ``finally`` clauses to execute.\n\nWhen ``yield from <expr>`` is used, it treats the supplied expression\nas a subiterator, producing values from it until the underlying\niterator is exhausted.\n\n Changed in version 3.3: Added ``yield from <expr>`` to delegate\n control flow to a subiterator\n\nFor full details of ``yield`` semantics, refer to the *Yield\nexpressions* section.\n\nSee also:\n\n **PEP 0255** - Simple Generators\n The proposal for adding generators and the ``yield`` statement\n to Python.\n\n **PEP 0342** - Coroutines via Enhanced Generators\n The proposal to enhance the API and syntax of generators, making\n them usable as simple coroutines.\n\n **PEP 0380** - Syntax for Delegating to a Subgenerator\n The proposal to introduce the ``yield_from`` syntax, making\n delegation to sub-generators easy.\n'}
diff --git a/Lib/queue.py b/Lib/queue.py
index bee7ed4..1dc72c4 100644
--- a/Lib/queue.py
+++ b/Lib/queue.py
@@ -1,49 +1,54 @@
-"""A multi-producer, multi-consumer queue."""
+'''A multi-producer, multi-consumer queue.'''
-from time import time as _time
try:
- import threading as _threading
+ import threading
except ImportError:
- import dummy_threading as _threading
+ import dummy_threading as threading
from collections import deque
-import heapq
+from heapq import heappush, heappop
+from time import steady as time
__all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue']
class Empty(Exception):
- "Exception raised by Queue.get(block=0)/get_nowait()."
+ 'Exception raised by Queue.get(block=0)/get_nowait().'
pass
class Full(Exception):
- "Exception raised by Queue.put(block=0)/put_nowait()."
+ 'Exception raised by Queue.put(block=0)/put_nowait().'
pass
class Queue:
- """Create a queue object with a given maximum size.
+ '''Create a queue object with a given maximum size.
If maxsize is <= 0, the queue size is infinite.
- """
+ '''
+
def __init__(self, maxsize=0):
self.maxsize = maxsize
self._init(maxsize)
+
# mutex must be held whenever the queue is mutating. All methods
# that acquire mutex must release it before returning. mutex
# is shared between the three conditions, so acquiring and
# releasing the conditions also acquires and releases mutex.
- self.mutex = _threading.Lock()
+ self.mutex = threading.Lock()
+
# Notify not_empty whenever an item is added to the queue; a
# thread waiting to get is notified then.
- self.not_empty = _threading.Condition(self.mutex)
+ self.not_empty = threading.Condition(self.mutex)
+
# Notify not_full whenever an item is removed from the queue;
# a thread waiting to put is notified then.
- self.not_full = _threading.Condition(self.mutex)
+ self.not_full = threading.Condition(self.mutex)
+
# Notify all_tasks_done whenever the number of unfinished tasks
# drops to zero; thread waiting to join() is notified to resume
- self.all_tasks_done = _threading.Condition(self.mutex)
+ self.all_tasks_done = threading.Condition(self.mutex)
self.unfinished_tasks = 0
def task_done(self):
- """Indicate that a formerly enqueued task is complete.
+ '''Indicate that a formerly enqueued task is complete.
Used by Queue consumer threads. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
@@ -55,43 +60,35 @@ class Queue:
Raises a ValueError if called more times than there were items
placed in the queue.
- """
- self.all_tasks_done.acquire()
- try:
+ '''
+ with self.all_tasks_done:
unfinished = self.unfinished_tasks - 1
if unfinished <= 0:
if unfinished < 0:
raise ValueError('task_done() called too many times')
self.all_tasks_done.notify_all()
self.unfinished_tasks = unfinished
- finally:
- self.all_tasks_done.release()
def join(self):
- """Blocks until all items in the Queue have been gotten and processed.
+ '''Blocks until all items in the Queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
- """
- self.all_tasks_done.acquire()
- try:
+ '''
+ with self.all_tasks_done:
while self.unfinished_tasks:
self.all_tasks_done.wait()
- finally:
- self.all_tasks_done.release()
def qsize(self):
- """Return the approximate size of the queue (not reliable!)."""
- self.mutex.acquire()
- n = self._qsize()
- self.mutex.release()
- return n
+ '''Return the approximate size of the queue (not reliable!).'''
+ with self.mutex:
+ return self._qsize()
def empty(self):
- """Return True if the queue is empty, False otherwise (not reliable!).
+ '''Return True if the queue is empty, False otherwise (not reliable!).
This method is likely to be removed at some point. Use qsize() == 0
as a direct substitute, but be aware that either approach risks a race
@@ -100,29 +97,23 @@ class Queue:
To create code that needs to wait for all queued tasks to be
completed, the preferred technique is to use the join() method.
-
- """
- self.mutex.acquire()
- n = not self._qsize()
- self.mutex.release()
- return n
+ '''
+ with self.mutex:
+ return not self._qsize()
def full(self):
- """Return True if the queue is full, False otherwise (not reliable!).
+ '''Return True if the queue is full, False otherwise (not reliable!).
This method is likely to be removed at some point. Use qsize() >= n
as a direct substitute, but be aware that either approach risks a race
condition where a queue can shrink before the result of full() or
qsize() can be used.
-
- """
- self.mutex.acquire()
- n = 0 < self.maxsize <= self._qsize()
- self.mutex.release()
- return n
+ '''
+ with self.mutex:
+ return 0 < self.maxsize <= self._qsize()
def put(self, item, block=True, timeout=None):
- """Put an item into the queue.
+ '''Put an item into the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until a free slot is available. If 'timeout' is
@@ -131,9 +122,8 @@ class Queue:
Otherwise ('block' is false), put an item on the queue if a free slot
is immediately available, else raise the Full exception ('timeout'
is ignored in that case).
- """
- self.not_full.acquire()
- try:
+ '''
+ with self.not_full:
if self.maxsize > 0:
if not block:
if self._qsize() >= self.maxsize:
@@ -144,28 +134,18 @@ class Queue:
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
- endtime = _time() + timeout
+ endtime = time() + timeout
while self._qsize() >= self.maxsize:
- remaining = endtime - _time()
+ remaining = endtime - time()
if remaining <= 0.0:
raise Full
self.not_full.wait(remaining)
self._put(item)
self.unfinished_tasks += 1
self.not_empty.notify()
- finally:
- self.not_full.release()
-
- def put_nowait(self, item):
- """Put an item into the queue without blocking.
-
- Only enqueue the item if a free slot is immediately available.
- Otherwise raise the Full exception.
- """
- return self.put(item, False)
def get(self, block=True, timeout=None):
- """Remove and return an item from the queue.
+ '''Remove and return an item from the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until an item is available. If 'timeout' is
@@ -174,9 +154,8 @@ class Queue:
Otherwise ('block' is false), return an item if one is immediately
available, else raise the Empty exception ('timeout' is ignored
in that case).
- """
- self.not_empty.acquire()
- try:
+ '''
+ with self.not_empty:
if not block:
if not self._qsize():
raise Empty
@@ -186,25 +165,31 @@ class Queue:
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
- endtime = _time() + timeout
+ endtime = time() + timeout
while not self._qsize():
- remaining = endtime - _time()
+ remaining = endtime - time()
if remaining <= 0.0:
raise Empty
self.not_empty.wait(remaining)
item = self._get()
self.not_full.notify()
return item
- finally:
- self.not_empty.release()
+
+ def put_nowait(self, item):
+ '''Put an item into the queue without blocking.
+
+ Only enqueue the item if a free slot is immediately available.
+ Otherwise raise the Full exception.
+ '''
+ return self.put(item, block=False)
def get_nowait(self):
- """Remove and return an item from the queue without blocking.
+ '''Remove and return an item from the queue without blocking.
Only get an item if one is immediately available. Otherwise
raise the Empty exception.
- """
- return self.get(False)
+ '''
+ return self.get(block=False)
# Override these methods to implement other queue organizations
# (e.g. stack or priority queue).
@@ -214,7 +199,7 @@ class Queue:
def _init(self, maxsize):
self.queue = deque()
- def _qsize(self, len=len):
+ def _qsize(self):
return len(self.queue)
# Put a new item in the queue
@@ -235,13 +220,13 @@ class PriorityQueue(Queue):
def _init(self, maxsize):
self.queue = []
- def _qsize(self, len=len):
+ def _qsize(self):
return len(self.queue)
- def _put(self, item, heappush=heapq.heappush):
+ def _put(self, item):
heappush(self.queue, item)
- def _get(self, heappop=heapq.heappop):
+ def _get(self):
return heappop(self.queue)
@@ -251,7 +236,7 @@ class LifoQueue(Queue):
def _init(self, maxsize):
self.queue = []
- def _qsize(self, len=len):
+ def _qsize(self):
return len(self.queue)
def _put(self, item):
diff --git a/Lib/random.py b/Lib/random.py
index d4006e5..ac80ca7 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -41,7 +41,7 @@ from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethod
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
from os import urandom as _urandom
-from collections import Set as _Set, Sequence as _Sequence
+from collections.abc import Set as _Set, Sequence as _Sequence
from hashlib import sha512 as _sha512
__all__ = ["Random","seed","random","uniform","randint","choice","sample",
diff --git a/Lib/re.py b/Lib/re.py
index 3fd59df..9195e0a 100644
--- a/Lib/re.py
+++ b/Lib/re.py
@@ -212,7 +212,7 @@ def compile(pattern, flags=0):
def purge():
"Clear the regular expression caches"
- _compile_typed.cache_clear()
+ _compile.cache_clear()
_compile_repl.cache_clear()
def template(pattern, flags=0):
@@ -220,12 +220,14 @@ def template(pattern, flags=0):
return _compile(pattern, flags|T)
_alphanum_str = frozenset(
- "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
+ "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
_alphanum_bytes = frozenset(
- b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
+ b"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
def escape(pattern):
- "Escape all non-alphanumeric characters in pattern."
+ """
+ Escape all the characters in pattern except ASCII letters, numbers and '_'.
+ """
if isinstance(pattern, str):
alphanum = _alphanum_str
s = list(pattern)
@@ -256,11 +258,8 @@ def escape(pattern):
_pattern_type = type(sre_compile.compile("", 0))
+@functools.lru_cache(maxsize=500, typed=True)
def _compile(pattern, flags):
- return _compile_typed(type(pattern), pattern, flags)
-
-@functools.lru_cache(maxsize=500)
-def _compile_typed(text_bytes_type, pattern, flags):
# internal: compile pattern
if isinstance(pattern, _pattern_type):
if flags:
diff --git a/Lib/sched.py b/Lib/sched.py
index a119892..a89a118 100644
--- a/Lib/sched.py
+++ b/Lib/sched.py
@@ -28,12 +28,17 @@ has another way to reference private data (besides global variables).
# XXX instead of having to define a module or class just to hold
# XXX the global state of your particular time and delay functions.
+import time
import heapq
from collections import namedtuple
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading
__all__ = ["scheduler"]
-class Event(namedtuple('Event', 'time, priority, action, argument')):
+class Event(namedtuple('Event', 'time, priority, action, argument, kwargs')):
def __eq__(s, o): return (s.time, s.priority) == (o.time, o.priority)
def __ne__(s, o): return (s.time, s.priority) != (o.time, o.priority)
def __lt__(s, o): return (s.time, s.priority) < (o.time, o.priority)
@@ -42,32 +47,36 @@ class Event(namedtuple('Event', 'time, priority, action, argument')):
def __ge__(s, o): return (s.time, s.priority) >= (o.time, o.priority)
class scheduler:
- def __init__(self, timefunc, delayfunc):
+
+ def __init__(self, timefunc=time.time, delayfunc=time.sleep):
"""Initialize a new instance, passing the time and delay
functions"""
self._queue = []
+ self._lock = threading.RLock()
self.timefunc = timefunc
self.delayfunc = delayfunc
- def enterabs(self, time, priority, action, argument):
+ def enterabs(self, time, priority, action, argument=[], kwargs={}):
"""Enter a new event in the queue at an absolute time.
Returns an ID for the event which can be used to remove it,
if necessary.
"""
- event = Event(time, priority, action, argument)
- heapq.heappush(self._queue, event)
- return event # The ID
+ with self._lock:
+ event = Event(time, priority, action, argument, kwargs)
+ heapq.heappush(self._queue, event)
+ return event # The ID
- def enter(self, delay, priority, action, argument):
+ def enter(self, delay, priority, action, argument=[], kwargs={}):
"""A variant that specifies the time as a relative time.
This is actually the more commonly used interface.
"""
- time = self.timefunc() + delay
- return self.enterabs(time, priority, action, argument)
+ with self._lock:
+ time = self.timefunc() + delay
+ return self.enterabs(time, priority, action, argument, kwargs)
def cancel(self, event):
"""Remove an event from the queue.
@@ -76,15 +85,20 @@ class scheduler:
If the event is not in the queue, this raises ValueError.
"""
- self._queue.remove(event)
- heapq.heapify(self._queue)
+ with self._lock:
+ self._queue.remove(event)
+ heapq.heapify(self._queue)
def empty(self):
"""Check whether the queue is empty."""
- return not self._queue
+ with self._lock:
+ return not self._queue
- def run(self):
+ def run(self, blocking=True):
"""Execute events until the queue is empty.
+ If blocking is False executes the scheduled events due to
+ expire soonest (if any) and then return the deadline of the
+ next scheduled call in the scheduler.
When there is a positive delay until the first event, the
delay function is called and the event is left in the queue;
@@ -106,24 +120,27 @@ class scheduler:
"""
# localize variable access to minimize overhead
# and to improve thread safety
- q = self._queue
- delayfunc = self.delayfunc
- timefunc = self.timefunc
- pop = heapq.heappop
- while q:
- time, priority, action, argument = checked_event = q[0]
- now = timefunc()
- if now < time:
- delayfunc(time - now)
- else:
- event = pop(q)
- # Verify that the event was not removed or altered
- # by another thread after we last looked at q[0].
- if event is checked_event:
- action(*argument)
- delayfunc(0) # Let other threads run
+ with self._lock:
+ q = self._queue
+ delayfunc = self.delayfunc
+ timefunc = self.timefunc
+ pop = heapq.heappop
+ while q:
+ time, priority, action, argument, kwargs = checked_event = q[0]
+ now = timefunc()
+ if now < time:
+ if not blocking:
+ return time - now
+ delayfunc(time - now)
else:
- heapq.heappush(q, event)
+ event = pop(q)
+ # Verify that the event was not removed or altered
+ # by another thread after we last looked at q[0].
+ if event is checked_event:
+ action(*argument, **kwargs)
+ delayfunc(0) # Let other threads run
+ else:
+ heapq.heappush(q, event)
@property
def queue(self):
@@ -136,5 +153,6 @@ class scheduler:
# Use heapq to sort the queue rather than using 'sorted(self._queue)'.
# With heapq, two events scheduled at the same time will show in
# the actual order they would be retrieved.
- events = self._queue[:]
- return map(heapq.heappop, [events]*len(events))
+ with self._lock:
+ events = self._queue[:]
+ return map(heapq.heappop, [events]*len(events))
diff --git a/Lib/shlex.py b/Lib/shlex.py
index 3edd3db..69f3b45 100644
--- a/Lib/shlex.py
+++ b/Lib/shlex.py
@@ -6,13 +6,14 @@
# Posix compliance, split(), string arguments, and
# iterator interface by Gustavo Niemeyer, April 2003.
-import os.path
+import os
+import re
import sys
from collections import deque
from io import StringIO
-__all__ = ["shlex", "split"]
+__all__ = ["shlex", "split", "quote"]
class shlex:
"A lexical analyzer class for simple shell-like syntaxes."
@@ -274,6 +275,21 @@ def split(s, comments=False, posix=True):
lex.commenters = ''
return list(lex)
+
+_find_unsafe = re.compile(r'[^\w@%+=:,./-]', re.ASCII).search
+
+def quote(s):
+ """Return a shell-escaped version of the string *s*."""
+ if not s:
+ return "''"
+ if _find_unsafe(s) is None:
+ return s
+
+ # use single quotes, and put single quotes into double quotes
+ # the string $'b is then quoted as '$'"'"'b'
+ return "'" + s.replace("'", "'\"'\"'") + "'"
+
+
if __name__ == '__main__':
if len(sys.argv) == 1:
lexer = shlex()
diff --git a/Lib/shutil.py b/Lib/shutil.py
index d1b1af3..6664599 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -15,6 +15,7 @@ import tarfile
try:
import bz2
+ del bz2
_BZ2_SUPPORTED = True
except ImportError:
_BZ2_SUPPORTED = False
@@ -34,7 +35,9 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"ExecError", "make_archive", "get_archive_formats",
"register_archive_format", "unregister_archive_format",
"get_unpack_formats", "register_unpack_format",
- "unregister_unpack_format", "unpack_archive", "ignore_patterns"]
+ "unregister_unpack_format", "unpack_archive",
+ "ignore_patterns", "chown"]
+ # disk_usage is added later, if available on the platform
class Error(EnvironmentError):
pass
@@ -79,8 +82,13 @@ def _samefile(src, dst):
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
-def copyfile(src, dst):
- """Copy data from src to dst"""
+def copyfile(src, dst, symlinks=False):
+ """Copy data from src to dst.
+
+ If optional flag `symlinks` is set and `src` is a symbolic link, a new
+ symlink will be created instead of copying the file it points to.
+
+ """
if _samefile(src, dst):
raise Error("`%s` and `%s` are the same file" % (src, dst))
@@ -95,54 +103,94 @@ def copyfile(src, dst):
if stat.S_ISFIFO(st.st_mode):
raise SpecialFileError("`%s` is a named pipe" % fn)
- with open(src, 'rb') as fsrc:
- with open(dst, 'wb') as fdst:
- copyfileobj(fsrc, fdst)
+ if symlinks and os.path.islink(src):
+ os.symlink(os.readlink(src), dst)
+ else:
+ with open(src, 'rb') as fsrc:
+ with open(dst, 'wb') as fdst:
+ copyfileobj(fsrc, fdst)
+
+def copymode(src, dst, symlinks=False):
+ """Copy mode bits from src to dst.
+
+ If the optional flag `symlinks` is set, symlinks aren't followed if and
+ only if both `src` and `dst` are symlinks. If `lchmod` isn't available (eg.
+ Linux), in these cases, this method does nothing.
+
+ """
+ if symlinks and os.path.islink(src) and os.path.islink(dst):
+ if hasattr(os, 'lchmod'):
+ stat_func, chmod_func = os.lstat, os.lchmod
+ else:
+ return
+ elif hasattr(os, 'chmod'):
+ stat_func, chmod_func = os.stat, os.chmod
+ else:
+ return
+
+ st = stat_func(src)
+ chmod_func(dst, stat.S_IMODE(st.st_mode))
-def copymode(src, dst):
- """Copy mode bits from src to dst"""
- if hasattr(os, 'chmod'):
- st = os.stat(src)
- mode = stat.S_IMODE(st.st_mode)
- os.chmod(dst, mode)
+def copystat(src, dst, symlinks=False):
+ """Copy all stat info (mode bits, atime, mtime, flags) from src to dst.
-def copystat(src, dst):
- """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
- st = os.stat(src)
+ If the optional flag `symlinks` is set, symlinks aren't followed if and
+ only if both `src` and `dst` are symlinks.
+
+ """
+ def _nop(*args):
+ pass
+
+ if symlinks and os.path.islink(src) and os.path.islink(dst):
+ stat_func = os.lstat
+ utime_func = os.lutimes if hasattr(os, 'lutimes') else _nop
+ chmod_func = os.lchmod if hasattr(os, 'lchmod') else _nop
+ chflags_func = os.lchflags if hasattr(os, 'lchflags') else _nop
+ else:
+ stat_func = os.stat
+ utime_func = os.utime if hasattr(os, 'utime') else _nop
+ chmod_func = os.chmod if hasattr(os, 'chmod') else _nop
+ chflags_func = os.chflags if hasattr(os, 'chflags') else _nop
+
+ st = stat_func(src)
mode = stat.S_IMODE(st.st_mode)
- if hasattr(os, 'utime'):
- os.utime(dst, (st.st_atime, st.st_mtime))
- if hasattr(os, 'chmod'):
- os.chmod(dst, mode)
- if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
+ utime_func(dst, (st.st_atime, st.st_mtime))
+ chmod_func(dst, mode)
+ if hasattr(st, 'st_flags'):
try:
- os.chflags(dst, st.st_flags)
+ chflags_func(dst, st.st_flags)
except OSError as why:
if (not hasattr(errno, 'EOPNOTSUPP') or
why.errno != errno.EOPNOTSUPP):
raise
-def copy(src, dst):
+def copy(src, dst, symlinks=False):
"""Copy data and mode bits ("cp src dst").
The destination may be a directory.
+ If the optional flag `symlinks` is set, symlinks won't be followed. This
+ resembles GNU's "cp -P src dst".
+
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
- copyfile(src, dst)
- copymode(src, dst)
+ copyfile(src, dst, symlinks=symlinks)
+ copymode(src, dst, symlinks=symlinks)
-def copy2(src, dst):
+def copy2(src, dst, symlinks=False):
"""Copy data and all stat info ("cp -p src dst").
The destination may be a directory.
+ If the optional flag `symlinks` is set, symlinks won't be followed. This
+ resembles GNU's "cp -P src dst".
+
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
- copyfile(src, dst)
- copystat(src, dst)
+ copyfile(src, dst, symlinks=symlinks)
+ copystat(src, dst, symlinks=symlinks)
def ignore_patterns(*patterns):
"""Function that can be used as copytree() ignore parameter.
@@ -209,7 +257,11 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
if os.path.islink(srcname):
linkto = os.readlink(srcname)
if symlinks:
+ # We can't just leave it to `copy_function` because legacy
+ # code with a custom `copy_function` may rely on copytree
+ # doing the right thing.
os.symlink(linkto, dstname)
+ copystat(srcname, dstname, symlinks=symlinks)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and ignore_dangling_symlinks:
@@ -266,7 +318,7 @@ def rmtree(path, ignore_errors=False, onerror=None):
names = []
try:
names = os.listdir(path)
- except os.error as err:
+ except os.error:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
@@ -279,7 +331,7 @@ def rmtree(path, ignore_errors=False, onerror=None):
else:
try:
os.remove(fullname)
- except os.error as err:
+ except os.error:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
@@ -304,7 +356,10 @@ def move(src, dst):
overwritten depending on os.rename() semantics.
If the destination is on our current filesystem, then rename() is used.
- Otherwise, src is copied to the destination and then removed.
+ Otherwise, src is copied to the destination and then removed. Symlinks are
+ recreated under the new name if os.rename() fails because of cross
+ filesystem renames.
+
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
@@ -322,8 +377,12 @@ def move(src, dst):
raise Error("Destination path '%s' already exists" % real_dst)
try:
os.rename(src, real_dst)
- except OSError as exc:
- if os.path.isdir(src):
+ except OSError:
+ if os.path.islink(src):
+ linkto = os.readlink(src)
+ os.symlink(linkto, real_dst)
+ os.unlink(src)
+ elif os.path.isdir(src):
if _destinsrc(src, dst):
raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
copytree(src, real_dst, symlinks=True)
@@ -389,7 +448,7 @@ def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
compress_ext['bzip2'] = '.bz2'
# flags for compression program, each element of list will be an argument
- if compress is not None and compress not in compress_ext.keys():
+ if compress is not None and compress not in compress_ext:
raise ValueError("bad value for 'compress', or compression format not "
"supported : {0}".format(compress))
@@ -494,7 +553,7 @@ def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
_ARCHIVE_FORMATS = {
'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
- 'zip': (_make_zipfile, [],"ZIP file")
+ 'zip': (_make_zipfile, [], "ZIP file")
}
if _BZ2_SUPPORTED:
@@ -527,7 +586,7 @@ def register_archive_format(name, function, extra_args=None, description=''):
if not isinstance(extra_args, (tuple, list)):
raise TypeError('extra_args needs to be a sequence')
for element in extra_args:
- if not isinstance(element, (tuple, list)) or len(element) !=2 :
+ if not isinstance(element, (tuple, list)) or len(element) !=2:
raise TypeError('extra_args elements are : (arg_name, value)')
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
@@ -679,7 +738,7 @@ def _unpack_zipfile(filename, extract_dir):
if not name.endswith('/'):
# file
data = zip.read(info.filename)
- f = open(target,'wb')
+ f = open(target, 'wb')
try:
f.write(data)
finally:
@@ -753,3 +812,112 @@ def unpack_archive(filename, extract_dir=None, format=None):
func = _UNPACK_FORMATS[format][1]
kwargs = dict(_UNPACK_FORMATS[format][2])
func(filename, extract_dir, **kwargs)
+
+
+if hasattr(os, 'statvfs'):
+
+ __all__.append('disk_usage')
+ _ntuple_diskusage = collections.namedtuple('usage', 'total used free')
+
+ def disk_usage(path):
+ """Return disk usage statistics about the given path.
+
+ Returned valus is a named tuple with attributes 'total', 'used' and
+ 'free', which are the amount of total, used and free space, in bytes.
+ """
+ st = os.statvfs(path)
+ free = st.f_bavail * st.f_frsize
+ total = st.f_blocks * st.f_frsize
+ used = (st.f_blocks - st.f_bfree) * st.f_frsize
+ return _ntuple_diskusage(total, used, free)
+
+elif os.name == 'nt':
+
+ import nt
+ __all__.append('disk_usage')
+ _ntuple_diskusage = collections.namedtuple('usage', 'total used free')
+
+ def disk_usage(path):
+ """Return disk usage statistics about the given path.
+
+ Returned valus is a named tuple with attributes 'total', 'used' and
+ 'free', which are the amount of total, used and free space, in bytes.
+ """
+ total, free = nt._getdiskusage(path)
+ used = total - free
+ return _ntuple_diskusage(total, used, free)
+
+
+def chown(path, user=None, group=None):
+ """Change owner user and group of the given path.
+
+ user and group can be the uid/gid or the user/group names, and in that case,
+ they are converted to their respective uid/gid.
+ """
+
+ if user is None and group is None:
+ raise ValueError("user and/or group must be set")
+
+ _user = user
+ _group = group
+
+ # -1 means don't change it
+ if user is None:
+ _user = -1
+ # user can either be an int (the uid) or a string (the system username)
+ elif isinstance(user, str):
+ _user = _get_uid(user)
+ if _user is None:
+ raise LookupError("no such user: {!r}".format(user))
+
+ if group is None:
+ _group = -1
+ elif not isinstance(group, int):
+ _group = _get_gid(group)
+ if _group is None:
+ raise LookupError("no such group: {!r}".format(group))
+
+ os.chown(path, _user, _group)
+
+def get_terminal_size(fallback=(80, 24)):
+ """Get the size of the terminal window.
+
+ For each of the two dimensions, the environment variable, COLUMNS
+ and LINES respectively, is checked. If the variable is defined and
+ the value is a positive integer, it is used.
+
+ When COLUMNS or LINES is not defined, which is the common case,
+ the terminal connected to sys.__stdout__ is queried
+ by invoking os.get_terminal_size.
+
+ If the terminal size cannot be successfully queried, either because
+ the system doesn't support querying, or because we are not
+ connected to a terminal, the value given in fallback parameter
+ is used. Fallback defaults to (80, 24) which is the default
+ size used by many terminal emulators.
+
+ The value returned is a named tuple of type os.terminal_size.
+ """
+ # columns, lines are the working values
+ try:
+ columns = int(os.environ['COLUMNS'])
+ except (KeyError, ValueError):
+ columns = 0
+
+ try:
+ lines = int(os.environ['LINES'])
+ except (KeyError, ValueError):
+ lines = 0
+
+ # only query if necessary
+ if columns <= 0 or lines <= 0:
+ try:
+ size = os.get_terminal_size(sys.__stdout__.fileno())
+ except (NameError, OSError):
+ size = os.terminal_size(fallback)
+ if columns <= 0:
+ columns = size.columns
+ if lines <= 0:
+ lines = size.lines
+
+ return os.terminal_size((columns, lines))
diff --git a/Lib/site.py b/Lib/site.py
index a2c0bec..b83498e 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -55,7 +55,6 @@ ImportError exception, it is silently ignored.
import sys
import os
import builtins
-import traceback
# Prefixes for site-packages; add additional prefixes like /usr/local here
PREFIXES = [sys.prefix, sys.exec_prefix]
@@ -138,7 +137,7 @@ def addpackage(sitedir, name, known_paths):
reset = 0
fullname = os.path.join(sitedir, name)
try:
- f = open(fullname, "rU")
+ f = open(fullname, "r")
except IOError:
return
with f:
@@ -154,9 +153,10 @@ def addpackage(sitedir, name, known_paths):
if not dircase in known_paths and os.path.exists(dir):
sys.path.append(dir)
known_paths.add(dircase)
- except Exception as err:
+ except Exception:
print("Error processing line {:d} of {}:\n".format(n+1, fullname),
file=sys.stderr)
+ import traceback
for record in traceback.format_exception(*sys.exc_info()):
for line in record.splitlines():
print(' '+line, file=sys.stderr)
@@ -241,7 +241,6 @@ def getusersitepackages():
return USER_SITE
from sysconfig import get_path
- import os
if sys.platform == 'darwin':
from sysconfig import get_config_var
@@ -385,7 +384,7 @@ class _Printer(object):
for filename in self.__files:
filename = os.path.join(dir, filename)
try:
- fp = open(filename, "rU")
+ fp = open(filename, "r")
data = fp.read()
fp.close()
break
@@ -508,6 +507,11 @@ def execusercustomize():
def main():
+ """Add standard site-specific directories to the module search path.
+
+ This function is called automatically when this module is imported,
+ unless the python interpreter was started with the -S flag.
+ """
global ENABLE_USER_SITE
abs_paths()
@@ -526,7 +530,10 @@ def main():
if ENABLE_USER_SITE:
execusercustomize()
-main()
+# Prevent edition of sys.path when python was started with -S and
+# site is imported later.
+if not sys.flags.no_site:
+ main()
def _script():
help = """\
diff --git a/Lib/smtpd.py b/Lib/smtpd.py
index 8cd405c..d66b0d7 100755
--- a/Lib/smtpd.py
+++ b/Lib/smtpd.py
@@ -142,122 +142,122 @@ class SMTPChannel(asynchat.async_chat):
@property
def __server(self):
warn("Access to __server attribute on SMTPChannel is deprecated, "
- "use 'smtp_server' instead", PendingDeprecationWarning, 2)
+ "use 'smtp_server' instead", DeprecationWarning, 2)
return self.smtp_server
@__server.setter
def __server(self, value):
warn("Setting __server attribute on SMTPChannel is deprecated, "
- "set 'smtp_server' instead", PendingDeprecationWarning, 2)
+ "set 'smtp_server' instead", DeprecationWarning, 2)
self.smtp_server = value
@property
def __line(self):
warn("Access to __line attribute on SMTPChannel is deprecated, "
- "use 'received_lines' instead", PendingDeprecationWarning, 2)
+ "use 'received_lines' instead", DeprecationWarning, 2)
return self.received_lines
@__line.setter
def __line(self, value):
warn("Setting __line attribute on SMTPChannel is deprecated, "
- "set 'received_lines' instead", PendingDeprecationWarning, 2)
+ "set 'received_lines' instead", DeprecationWarning, 2)
self.received_lines = value
@property
def __state(self):
warn("Access to __state attribute on SMTPChannel is deprecated, "
- "use 'smtp_state' instead", PendingDeprecationWarning, 2)
+ "use 'smtp_state' instead", DeprecationWarning, 2)
return self.smtp_state
@__state.setter
def __state(self, value):
warn("Setting __state attribute on SMTPChannel is deprecated, "
- "set 'smtp_state' instead", PendingDeprecationWarning, 2)
+ "set 'smtp_state' instead", DeprecationWarning, 2)
self.smtp_state = value
@property
def __greeting(self):
warn("Access to __greeting attribute on SMTPChannel is deprecated, "
- "use 'seen_greeting' instead", PendingDeprecationWarning, 2)
+ "use 'seen_greeting' instead", DeprecationWarning, 2)
return self.seen_greeting
@__greeting.setter
def __greeting(self, value):
warn("Setting __greeting attribute on SMTPChannel is deprecated, "
- "set 'seen_greeting' instead", PendingDeprecationWarning, 2)
+ "set 'seen_greeting' instead", DeprecationWarning, 2)
self.seen_greeting = value
@property
def __mailfrom(self):
warn("Access to __mailfrom attribute on SMTPChannel is deprecated, "
- "use 'mailfrom' instead", PendingDeprecationWarning, 2)
+ "use 'mailfrom' instead", DeprecationWarning, 2)
return self.mailfrom
@__mailfrom.setter
def __mailfrom(self, value):
warn("Setting __mailfrom attribute on SMTPChannel is deprecated, "
- "set 'mailfrom' instead", PendingDeprecationWarning, 2)
+ "set 'mailfrom' instead", DeprecationWarning, 2)
self.mailfrom = value
@property
def __rcpttos(self):
warn("Access to __rcpttos attribute on SMTPChannel is deprecated, "
- "use 'rcpttos' instead", PendingDeprecationWarning, 2)
+ "use 'rcpttos' instead", DeprecationWarning, 2)
return self.rcpttos
@__rcpttos.setter
def __rcpttos(self, value):
warn("Setting __rcpttos attribute on SMTPChannel is deprecated, "
- "set 'rcpttos' instead", PendingDeprecationWarning, 2)
+ "set 'rcpttos' instead", DeprecationWarning, 2)
self.rcpttos = value
@property
def __data(self):
warn("Access to __data attribute on SMTPChannel is deprecated, "
- "use 'received_data' instead", PendingDeprecationWarning, 2)
+ "use 'received_data' instead", DeprecationWarning, 2)
return self.received_data
@__data.setter
def __data(self, value):
warn("Setting __data attribute on SMTPChannel is deprecated, "
- "set 'received_data' instead", PendingDeprecationWarning, 2)
+ "set 'received_data' instead", DeprecationWarning, 2)
self.received_data = value
@property
def __fqdn(self):
warn("Access to __fqdn attribute on SMTPChannel is deprecated, "
- "use 'fqdn' instead", PendingDeprecationWarning, 2)
+ "use 'fqdn' instead", DeprecationWarning, 2)
return self.fqdn
@__fqdn.setter
def __fqdn(self, value):
warn("Setting __fqdn attribute on SMTPChannel is deprecated, "
- "set 'fqdn' instead", PendingDeprecationWarning, 2)
+ "set 'fqdn' instead", DeprecationWarning, 2)
self.fqdn = value
@property
def __peer(self):
warn("Access to __peer attribute on SMTPChannel is deprecated, "
- "use 'peer' instead", PendingDeprecationWarning, 2)
+ "use 'peer' instead", DeprecationWarning, 2)
return self.peer
@__peer.setter
def __peer(self, value):
warn("Setting __peer attribute on SMTPChannel is deprecated, "
- "set 'peer' instead", PendingDeprecationWarning, 2)
+ "set 'peer' instead", DeprecationWarning, 2)
self.peer = value
@property
def __conn(self):
warn("Access to __conn attribute on SMTPChannel is deprecated, "
- "use 'conn' instead", PendingDeprecationWarning, 2)
+ "use 'conn' instead", DeprecationWarning, 2)
return self.conn
@__conn.setter
def __conn(self, value):
warn("Setting __conn attribute on SMTPChannel is deprecated, "
- "set 'conn' instead", PendingDeprecationWarning, 2)
+ "set 'conn' instead", DeprecationWarning, 2)
self.conn = value
@property
def __addr(self):
warn("Access to __addr attribute on SMTPChannel is deprecated, "
- "use 'addr' instead", PendingDeprecationWarning, 2)
+ "use 'addr' instead", DeprecationWarning, 2)
return self.addr
@__addr.setter
def __addr(self, value):
warn("Setting __addr attribute on SMTPChannel is deprecated, "
- "set 'addr' instead", PendingDeprecationWarning, 2)
+ "set 'addr' instead", DeprecationWarning, 2)
self.addr = value
# Overrides base class for convenience
@@ -275,7 +275,7 @@ class SMTPChannel(asynchat.async_chat):
return
elif limit:
self.num_bytes += len(data)
- self.received_lines.append(str(data, "utf8"))
+ self.received_lines.append(str(data, "utf-8"))
# Implementation of base class abstract method
def found_terminator(self):
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
index fbef96e..d37b0e2 100644
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -133,24 +133,18 @@ class SMTPAuthenticationError(SMTPResponseException):
combination provided.
"""
-def quoteaddr(addr):
+def quoteaddr(addrstring):
"""Quote a subset of the email addresses defined by RFC 821.
Should be able to handle anything email.utils.parseaddr can handle.
"""
- m = (None, None)
- try:
- m = email.utils.parseaddr(addr)[1]
- except AttributeError:
- pass
- if m == (None, None): # Indicates parse failure or AttributeError
- # something weird here.. punt -ddm
- return "<%s>" % addr
- elif m is None:
- # the sender wants an empty return address
- return "<>"
- else:
- return "<%s>" % m
+ displayname, addr = email.utils.parseaddr(addrstring)
+ if (displayname, addr) == ('', ''):
+ # parseaddr couldn't parse it, use it as is and hope for the best.
+ if addrstring.strip().startswith('<'):
+ return addrstring
+ return "<%s>" % addrstring
+ return "<%s>" % addr
def _addr_only(addrstring):
displayname, addr = email.utils.parseaddr(addrstring)
@@ -180,27 +174,6 @@ try:
except ImportError:
_have_ssl = False
else:
- class SSLFakeFile:
- """A fake file like object that really wraps a SSLObject.
-
- It only supports what is needed in smtplib.
- """
- def __init__(self, sslobj):
- self.sslobj = sslobj
-
- def readline(self):
- str = b""
- chr = None
- while chr != b"\n":
- chr = self.sslobj.read(1)
- if not chr:
- break
- str += chr
- return str
-
- def close(self):
- pass
-
_have_ssl = True
@@ -242,7 +215,8 @@ class SMTP:
default_port = SMTP_PORT
def __init__(self, host='', port=0, local_hostname=None,
- timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None):
"""Initialize a new instance.
If specified, `host' is the name of the remote host to which to
@@ -250,11 +224,16 @@ class SMTP:
By default, smtplib.SMTP_PORT is used. An SMTPConnectError is raised
if the specified `host' doesn't respond correctly. If specified,
`local_hostname` is used as the FQDN of the local host. By default,
- the local hostname is found using socket.getfqdn().
+ the local hostname is found using socket.getfqdn(). The
+ `source_address` parameter takes a 2-tuple (host, port) for the socket
+ to bind to as its source address before connecting. If the host is ''
+ and port is 0, the OS default behavior will be used.
"""
self.timeout = timeout
self.esmtp_features = {}
+ self.source_address = source_address
+
if host:
(code, msg) = self.connect(host, port)
if code != 220:
@@ -277,6 +256,19 @@ class SMTP:
pass
self.local_hostname = '[%s]' % addr
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ try:
+ code, message = self.docmd("QUIT")
+ if code != 221:
+ raise SMTPResponseException(code, message)
+ except SMTPServerDisconnected:
+ pass
+ finally:
+ self.close()
+
def set_debuglevel(self, debuglevel):
"""Set the debug output level.
@@ -290,10 +282,12 @@ class SMTP:
# This makes it simpler for SMTP_SSL to use the SMTP connect code
# and just alter the socket connection bit.
if self.debuglevel > 0:
- print('connect:', (host, port), file=stderr)
- return socket.create_connection((host, port), timeout)
+ print('connect: to', (host, port), self.source_address,
+ file=stderr)
+ return socket.create_connection((host, port), timeout,
+ self.source_address)
- def connect(self, host='localhost', port=0):
+ def connect(self, host='localhost', port=0, source_address=None):
"""Connect to a host on a given port.
If the hostname ends with a colon (`:') followed by a number, and
@@ -304,6 +298,10 @@ class SMTP:
specified during instantiation.
"""
+
+ if source_address:
+ self.source_address = source_address
+
if not port and (host.find(':') == host.rfind(':')):
i = host.rfind(':')
if i >= 0:
@@ -317,6 +315,7 @@ class SMTP:
if self.debuglevel > 0:
print('connect:', (host, port), file=stderr)
self.sock = self._get_socket(host, port, self.timeout)
+ self.file = None
(code, msg) = self.getreply()
if self.debuglevel > 0:
print("connect:", msg, file=stderr)
@@ -388,7 +387,8 @@ class SMTP:
errmsg = b"\n".join(resp)
if self.debuglevel > 0:
- print('reply: retcode (%s); Msg: %s' % (errcode, errmsg), file=stderr)
+ print('reply: retcode (%s); Msg: %s' % (errcode, errmsg),
+ file=stderr)
return errcode, errmsg
def docmd(self, cmd, args=""):
@@ -632,7 +632,7 @@ class SMTP:
# We could not login sucessfully. Return result of last attempt.
raise SMTPAuthenticationError(code, resp)
- def starttls(self, keyfile=None, certfile=None):
+ def starttls(self, keyfile=None, certfile=None, context=None):
"""Puts the connection to the SMTP server into TLS mode.
If there has been no previous EHLO or HELO command this session, this
@@ -656,8 +656,17 @@ class SMTP:
if resp == 220:
if not _have_ssl:
raise RuntimeError("No SSL support included in this Python")
- self.sock = ssl.wrap_socket(self.sock, keyfile, certfile)
- self.file = SSLFakeFile(self.sock)
+ if context is not None and keyfile is not None:
+ raise ValueError("context and keyfile arguments are mutually "
+ "exclusive")
+ if context is not None and certfile is not None:
+ raise ValueError("context and certfile arguments are mutually "
+ "exclusive")
+ if context is not None:
+ self.sock = context.wrap_socket(self.sock)
+ else:
+ self.sock = ssl.wrap_socket(self.sock, keyfile, certfile)
+ self.file = None
# RFC 3207:
# The client MUST discard any knowledge obtained from
# the server, such as the list of SMTP service extensions,
@@ -786,7 +795,8 @@ class SMTP:
# TODO implement heuristics to guess the correct Resent-* block with an
# option allowing the user to enable the heuristics. (It should be
# possible to guess correctly almost all of the time.)
- resent =msg.get_all('Resent-Date')
+
+ resent = msg.get_all('Resent-Date')
if resent is None:
header_prefix = ''
elif len(resent) == 1:
@@ -795,13 +805,13 @@ class SMTP:
raise ValueError("message has more than one 'Resent-' header block")
if from_addr is None:
# Prefer the sender field per RFC 2822:3.6.2.
- from_addr = (msg[header_prefix+'Sender']
- if (header_prefix+'Sender') in msg
- else msg[header_prefix+'From'])
+ from_addr = (msg[header_prefix + 'Sender']
+ if (header_prefix + 'Sender') in msg
+ else msg[header_prefix + 'From'])
if to_addrs is None:
- addr_fields = [f for f in (msg[header_prefix+'To'],
- msg[header_prefix+'Bcc'],
- msg[header_prefix+'Cc']) if f is not None]
+ addr_fields = [f for f in (msg[header_prefix + 'To'],
+ msg[header_prefix + 'Bcc'],
+ msg[header_prefix + 'Cc']) if f is not None]
to_addrs = [a[1] for a in email.utils.getaddresses(addr_fields)]
# Make a local copy so we can delete the bcc headers.
msg_copy = copy.copy(msg)
@@ -835,26 +845,41 @@ if _have_ssl:
""" This is a subclass derived from SMTP that connects over an SSL encrypted
socket (to use this class you need a socket module that was compiled with SSL
support). If host is not specified, '' (the local host) is used. If port is
- omitted, the standard SMTP-over-SSL port (465) is used. keyfile and certfile
+ omitted, the standard SMTP-over-SSL port (465) is used. The optional
+ source_address takes a two-tuple (host,port) for socket to bind to. keyfile and certfile
are also optional - they can contain a PEM formatted private key and
- certificate chain file for the SSL connection.
+ certificate chain file for the SSL connection. context also optional, can contain
+ a SSLContext, and is an alternative to keyfile and certfile; If it is specified both
+ keyfile and certfile must be None.
"""
default_port = SMTP_SSL_PORT
def __init__(self, host='', port=0, local_hostname=None,
keyfile=None, certfile=None,
- timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None, context=None):
+ if context is not None and keyfile is not None:
+ raise ValueError("context and keyfile arguments are mutually "
+ "exclusive")
+ if context is not None and certfile is not None:
+ raise ValueError("context and certfile arguments are mutually "
+ "exclusive")
self.keyfile = keyfile
self.certfile = certfile
- SMTP.__init__(self, host, port, local_hostname, timeout)
+ self.context = context
+ SMTP.__init__(self, host, port, local_hostname, timeout,
+ source_address)
def _get_socket(self, host, port, timeout):
if self.debuglevel > 0:
print('connect:', (host, port), file=stderr)
- new_socket = socket.create_connection((host, port), timeout)
- new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
- self.file = SSLFakeFile(new_socket)
+ new_socket = socket.create_connection((host, port), timeout,
+ self.source_address)
+ if self.context is not None:
+ new_socket = self.context.wrap_socket(new_socket)
+ else:
+ new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
return new_socket
__all__.append("SMTP_SSL")
@@ -879,18 +904,21 @@ class LMTP(SMTP):
ehlo_msg = "lhlo"
- def __init__(self, host='', port=LMTP_PORT, local_hostname=None):
+ def __init__(self, host='', port=LMTP_PORT, local_hostname=None,
+ source_address=None):
"""Initialize a new instance."""
- SMTP.__init__(self, host, port, local_hostname)
+ SMTP.__init__(self, host, port, local_hostname=local_hostname,
+ source_address=source_address)
- def connect(self, host='localhost', port=0):
+ def connect(self, host='localhost', port=0, source_address=None):
"""Connect to the LMTP daemon, on either a Unix or a TCP socket."""
if host[0] != '/':
- return SMTP.connect(self, host, port)
+ return SMTP.connect(self, host, port, source_address=source_address)
# Handle Unix-domain sockets.
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ self.file = None
self.sock.connect(host)
except socket.error as msg:
if self.debuglevel > 0:
diff --git a/Lib/socket.py b/Lib/socket.py
index 1e28549..b2954b5 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -53,7 +53,6 @@ try:
except ImportError:
errno = None
EBADF = getattr(errno, 'EBADF', 9)
-EINTR = getattr(errno, 'EINTR', 4)
EAGAIN = getattr(errno, 'EAGAIN', 11)
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
@@ -112,6 +111,9 @@ class socket(_socket.socket):
s[7:])
return s
+ def __getstate__(self):
+ raise TypeError("Cannot serialize socket object")
+
def dup(self):
"""dup() -> socket object
@@ -277,11 +279,10 @@ class SocketIO(io.RawIOBase):
except timeout:
self._timeout_occurred = True
raise
+ except InterruptedError:
+ continue
except error as e:
- n = e.args[0]
- if n == EINTR:
- continue
- if n in _blocking_errnos:
+ if e.args[0] in _blocking_errnos:
return None
raise
diff --git a/Lib/socketserver.py b/Lib/socketserver.py
index cd367f1..a487e63 100644
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -170,6 +170,7 @@ class BaseServer:
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
+ - service_actions()
- handle_error()
Methods for derived classes:
@@ -225,6 +226,8 @@ class BaseServer:
r, w, e = select.select([self], [], [], poll_interval)
if self in r:
self._handle_request_noblock()
+
+ self.service_actions()
finally:
self.__shutdown_request = False
self.__is_shut_down.set()
@@ -239,6 +242,14 @@ class BaseServer:
self.__shutdown_request = True
self.__is_shut_down.wait()
+ def service_actions(self):
+ """Called by the serve_forever() loop.
+
+ May be overridden by a subclass / Mixin to implement any code that
+ needs to be run during the loop.
+ """
+ pass
+
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
@@ -539,9 +550,15 @@ class ForkingMixIn:
"""
self.collect_children()
+ def service_actions(self):
+ """Collect the zombie child processes regularly in the ForkingMixin.
+
+ service_actions is called in the BaseServer's serve_forver loop.
+ """
+ self.collect_children()
+
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
- self.collect_children()
pid = os.fork()
if pid:
# Parent process
@@ -549,6 +566,7 @@ class ForkingMixIn:
self.active_children = []
self.active_children.append(pid)
self.close_request(request)
+ return
else:
# Child process.
# This must never return, hence os._exit()!
diff --git a/Lib/sqlite3/test/dbapi.py b/Lib/sqlite3/test/dbapi.py
index 202bd38..b7ec1ad 100644
--- a/Lib/sqlite3/test/dbapi.py
+++ b/Lib/sqlite3/test/dbapi.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/dbapi.py: tests for DB-API compliance
#
# Copyright (C) 2004-2010 Gerhard Hring <gh@ghaering.de>
diff --git a/Lib/sqlite3/test/factory.py b/Lib/sqlite3/test/factory.py
index 7f6f347..9e833ae 100644
--- a/Lib/sqlite3/test/factory.py
+++ b/Lib/sqlite3/test/factory.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/factory.py: tests for the various factories in pysqlite
#
# Copyright (C) 2005-2007 Gerhard Hring <gh@ghaering.de>
@@ -178,6 +178,8 @@ class TextFactoryTests(unittest.TestCase):
self.assertTrue(row[0].endswith("reich"), "column must contain original data")
def CheckOptimizedUnicode(self):
+ # In py3k, str objects are always returned when text_factory
+ # is OptimizedUnicode
self.con.text_factory = sqlite.OptimizedUnicode
austria = "sterreich"
germany = "Deutchland"
diff --git a/Lib/sqlite3/test/hooks.py b/Lib/sqlite3/test/hooks.py
index a92e838..3dc44f6 100644
--- a/Lib/sqlite3/test/hooks.py
+++ b/Lib/sqlite3/test/hooks.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/hooks.py: tests for various SQLite-specific hooks
#
# Copyright (C) 2006-2007 Gerhard Hring <gh@ghaering.de>
@@ -176,10 +176,60 @@ class ProgressTests(unittest.TestCase):
con.execute("select 1 union select 2 union select 3").fetchall()
self.assertEqual(action, 0, "progress handler was not cleared")
+class TraceCallbackTests(unittest.TestCase):
+ def CheckTraceCallbackUsed(self):
+ """
+ Test that the trace callback is invoked once it is set.
+ """
+ con = sqlite.connect(":memory:")
+ traced_statements = []
+ def trace(statement):
+ traced_statements.append(statement)
+ con.set_trace_callback(trace)
+ con.execute("create table foo(a, b)")
+ self.assertTrue(traced_statements)
+ self.assertTrue(any("create table foo" in stmt for stmt in traced_statements))
+
+ def CheckClearTraceCallback(self):
+ """
+ Test that setting the trace callback to None clears the previously set callback.
+ """
+ con = sqlite.connect(":memory:")
+ traced_statements = []
+ def trace(statement):
+ traced_statements.append(statement)
+ con.set_trace_callback(trace)
+ con.set_trace_callback(None)
+ con.execute("create table foo(a, b)")
+ self.assertFalse(traced_statements, "trace callback was not cleared")
+
+ def CheckUnicodeContent(self):
+ """
+ Test that the statement can contain unicode literals.
+ """
+ unicode_value = '\xf6\xe4\xfc\xd6\xc4\xdc\xdf\u20ac'
+ con = sqlite.connect(":memory:")
+ traced_statements = []
+ def trace(statement):
+ traced_statements.append(statement)
+ con.set_trace_callback(trace)
+ con.execute("create table foo(x)")
+ # Can't execute bound parameters as their values don't appear
+ # in traced statements before SQLite 3.6.21
+ # (cf. http://www.sqlite.org/draft/releaselog/3_6_21.html)
+ con.execute('insert into foo(x) values ("%s")' % unicode_value)
+ con.commit()
+ self.assertTrue(any(unicode_value in stmt for stmt in traced_statements),
+ "Unicode data %s garbled in trace callback: %s"
+ % (ascii(unicode_value), ', '.join(map(ascii, traced_statements))))
+
+
+
def suite():
collation_suite = unittest.makeSuite(CollationTests, "Check")
progress_suite = unittest.makeSuite(ProgressTests, "Check")
- return unittest.TestSuite((collation_suite, progress_suite))
+ trace_suite = unittest.makeSuite(TraceCallbackTests, "Check")
+ return unittest.TestSuite((collation_suite, progress_suite, trace_suite))
def test():
runner = unittest.TextTestRunner()
diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py
index c7551e3..9d7b276 100644
--- a/Lib/sqlite3/test/regression.py
+++ b/Lib/sqlite3/test/regression.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/regression.py: pysqlite regression tests
#
# Copyright (C) 2006-2010 Gerhard Hring <gh@ghaering.de>
diff --git a/Lib/sqlite3/test/transactions.py b/Lib/sqlite3/test/transactions.py
index 70e96a1..feb4fa1 100644
--- a/Lib/sqlite3/test/transactions.py
+++ b/Lib/sqlite3/test/transactions.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/transactions.py: tests transactions
#
# Copyright (C) 2005-2007 Gerhard Hring <gh@ghaering.de>
diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py
index 29413e1..3b4cb6d 100644
--- a/Lib/sqlite3/test/types.py
+++ b/Lib/sqlite3/test/types.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/types.py: tests for type conversion and detection
#
# Copyright (C) 2005 Gerhard Hring <gh@ghaering.de>
@@ -85,7 +85,7 @@ class DeclTypesTests(unittest.TestCase):
if isinstance(_val, bytes):
# sqlite3 always calls __init__ with a bytes created from a
# UTF-8 string when __conform__ was used to store the object.
- _val = _val.decode('utf8')
+ _val = _val.decode('utf-8')
self.val = _val
def __cmp__(self, other):
diff --git a/Lib/sqlite3/test/userfunctions.py b/Lib/sqlite3/test/userfunctions.py
index e01341e..14f6b65 100644
--- a/Lib/sqlite3/test/userfunctions.py
+++ b/Lib/sqlite3/test/userfunctions.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/userfunctions.py: tests for user-defined functions and
# aggregates.
#
diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py
index f52ea01..75f3a09 100644
--- a/Lib/sre_compile.py
+++ b/Lib/sre_compile.py
@@ -318,11 +318,13 @@ def _optimize_unicode(charset, fixup):
# XXX: could expand category
return charset # cannot compress
except IndexError:
- # non-BMP characters
+ # non-BMP characters; XXX now they should work
return charset
if negate:
if sys.maxunicode != 65535:
# XXX: negation does not work with big charsets
+ # XXX2: now they should work, but removing this will make the
+ # charmap 17 times bigger
return charset
for i in range(65536):
charmap[i] = not charmap[i]
diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py
index 13737ca..ae63c31 100644
--- a/Lib/sre_parse.py
+++ b/Lib/sre_parse.py
@@ -791,7 +791,7 @@ def parse_template(source, pattern):
else:
# The tokenizer implicitly decodes bytes objects as latin-1, we must
# therefore re-encode the final representation.
- encode = lambda x: x.encode('latin1')
+ encode = lambda x: x.encode('latin-1')
for c, s in p:
if c is MARK:
groupsappend((i, s))
diff --git a/Lib/ssl.py b/Lib/ssl.py
index 8137231..e899059 100644
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -60,10 +60,25 @@ import re
import _ssl # if we can't import it, let the error propagate
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
-from _ssl import _SSLContext, SSLError
+from _ssl import _SSLContext
+from _ssl import (
+ SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
+ SSLSyscallError, SSLEOFError,
+ )
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
-from _ssl import OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1
-from _ssl import RAND_status, RAND_egd, RAND_add
+from _ssl import (
+ OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1,
+ OP_CIPHER_SERVER_PREFERENCE, OP_SINGLE_DH_USE
+ )
+try:
+ from _ssl import OP_NO_COMPRESSION
+except ImportError:
+ pass
+try:
+ from _ssl import OP_SINGLE_ECDH_USE
+except ImportError:
+ pass
+from _ssl import RAND_status, RAND_egd, RAND_add, RAND_bytes, RAND_pseudo_bytes
from _ssl import (
SSL_ERROR_ZERO_RETURN,
SSL_ERROR_WANT_READ,
@@ -75,8 +90,9 @@ from _ssl import (
SSL_ERROR_EOF,
SSL_ERROR_INVALID_ERROR_CODE,
)
-from _ssl import HAS_SNI
-from _ssl import PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1
+from _ssl import HAS_SNI, HAS_ECDH
+from _ssl import (PROTOCOL_SSLv3, PROTOCOL_SSLv23,
+ PROTOCOL_TLSv1)
from _ssl import _OPENSSL_API_VERSION
_PROTOCOL_NAMES = {
@@ -94,11 +110,16 @@ else:
from socket import getnameinfo as _getnameinfo
from socket import error as socket_error
-from socket import socket, AF_INET, SOCK_STREAM
+from socket import socket, AF_INET, SOCK_STREAM, create_connection
import base64 # for DER-to-PEM translation
import traceback
import errno
+if _ssl.HAS_TLS_UNIQUE:
+ CHANNEL_BINDING_TYPES = ['tls-unique']
+else:
+ CHANNEL_BINDING_TYPES = []
+
# Disable weak or insecure ciphers by default
# (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL')
_DEFAULT_CIPHERS = 'DEFAULT:!aNULL:!eNULL:!LOW:!EXPORT:!SSLv2'
@@ -326,6 +347,13 @@ class SSLSocket(socket):
else:
return self._sslobj.cipher()
+ def compression(self):
+ self._checkClosed()
+ if not self._sslobj:
+ return None
+ else:
+ return self._sslobj.compression()
+
def send(self, data, flags=0):
self._checkClosed()
if self._sslobj:
@@ -358,6 +386,12 @@ class SSLSocket(socket):
else:
return socket.sendto(self, data, flags_or_addr, addr)
+ def sendmsg(self, *args, **kwargs):
+ # Ensure programs don't send data unencrypted if they try to
+ # use this method.
+ raise NotImplementedError("sendmsg not allowed on instances of %s" %
+ self.__class__)
+
def sendall(self, data, flags=0):
self._checkClosed()
if self._sslobj:
@@ -416,6 +450,14 @@ class SSLSocket(socket):
else:
return socket.recvfrom_into(self, buffer, nbytes, flags)
+ def recvmsg(self, *args, **kwargs):
+ raise NotImplementedError("recvmsg not allowed on instances of %s" %
+ self.__class__)
+
+ def recvmsg_into(self, *args, **kwargs):
+ raise NotImplementedError("recvmsg_into not allowed on instances of "
+ "%s" % self.__class__)
+
def pending(self):
self._checkClosed()
if self._sslobj:
@@ -502,6 +544,21 @@ class SSLSocket(socket):
self.do_handshake_on_connect),
addr)
+ def get_channel_binding(self, cb_type="tls-unique"):
+ """Get channel binding data for current connection. Raise ValueError
+ if the requested `cb_type` is not supported. Return bytes of the data
+ or None if the data is not available (e.g. before the handshake).
+ """
+ if cb_type not in CHANNEL_BINDING_TYPES:
+ raise ValueError("Unsupported channel binding type")
+ if cb_type != "tls-unique":
+ raise NotImplementedError(
+ "{0} channel binding type not implemented"
+ .format(cb_type))
+ if self._sslobj is None:
+ return None
+ return self._sslobj.tls_unique_cb()
+
def __del__(self):
# sys.stderr.write("__del__ on %s\n" % repr(self))
self._real_close()
@@ -566,9 +623,9 @@ def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv3, ca_certs=None):
cert_reqs = CERT_REQUIRED
else:
cert_reqs = CERT_NONE
- s = wrap_socket(socket(), ssl_version=ssl_version,
+ s = create_connection(addr)
+ s = wrap_socket(s, ssl_version=ssl_version,
cert_reqs=cert_reqs, ca_certs=ca_certs)
- s.connect(addr)
dercert = s.getpeercert(True)
s.close()
return DER_cert_to_PEM_cert(dercert)
diff --git a/Lib/string.py b/Lib/string.py
index ef0334c..8bcd1dc 100644
--- a/Lib/string.py
+++ b/Lib/string.py
@@ -46,23 +46,7 @@ def capwords(s, sep=None):
####################################################################
import re as _re
-
-class _multimap:
- """Helper class for combining multiple mappings.
-
- Used by .{safe_,}substitute() to combine the mapping and keyword
- arguments.
- """
- def __init__(self, primary, secondary):
- self._primary = primary
- self._secondary = secondary
-
- def __getitem__(self, key):
- try:
- return self._primary[key]
- except KeyError:
- return self._secondary[key]
-
+from collections import ChainMap
class _TemplateMetaclass(type):
pattern = r"""
@@ -100,7 +84,7 @@ class Template(metaclass=_TemplateMetaclass):
def _invalid(self, mo):
i = mo.start('invalid')
- lines = self.template[:i].splitlines(True)
+ lines = self.template[:i].splitlines(keepends=True)
if not lines:
colno = 1
lineno = 1
@@ -116,7 +100,7 @@ class Template(metaclass=_TemplateMetaclass):
if not args:
mapping = kws
elif kws:
- mapping = _multimap(kws, args[0])
+ mapping = ChainMap(kws, args[0])
else:
mapping = args[0]
# Helper function for .sub()
@@ -142,7 +126,7 @@ class Template(metaclass=_TemplateMetaclass):
if not args:
mapping = kws
elif kws:
- mapping = _multimap(kws, args[0])
+ mapping = ChainMap(kws, args[0])
else:
mapping = args[0]
# Helper function for .sub()
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index 179f41a..684086b 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -191,8 +191,10 @@ should prepare for OSErrors.
A ValueError will be raised if Popen is called with invalid arguments.
-check_call() and check_output() will raise CalledProcessError, if the
-called process returns a non-zero return code.
+Exceptions defined within this module inherit from SubprocessError.
+check_call() and check_output() will raise CalledProcessError if the
+called process returns a non-zero return code. TimeoutExpired
+be raised if a timeout was specified and expired.
Security
@@ -340,6 +342,7 @@ mswindows = (sys.platform == "win32")
import io
import os
+import time
import traceback
import gc
import signal
@@ -348,7 +351,10 @@ import warnings
import errno
# Exception classes used by this module.
-class CalledProcessError(Exception):
+class SubprocessError(Exception): pass
+
+
+class CalledProcessError(SubprocessError):
"""This exception is raised when a process run by check_call() or
check_output() returns a non-zero exit status.
The exit status will be stored in the returncode attribute;
@@ -362,6 +368,20 @@ class CalledProcessError(Exception):
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
+class TimeoutExpired(SubprocessError):
+ """This exception is raised when the timeout expires while waiting for a
+ child process.
+ """
+ def __init__(self, cmd, timeout, output=None):
+ self.cmd = cmd
+ self.timeout = timeout
+ self.output = output
+
+ def __str__(self):
+ return ("Command '%s' timed out after %s seconds" %
+ (self.cmd, self.timeout))
+
+
if mswindows:
import threading
import msvcrt
@@ -377,42 +397,17 @@ if mswindows:
else:
import select
_has_poll = hasattr(select, 'poll')
- import fcntl
- import pickle
-
- try:
- import _posixsubprocess
- except ImportError:
- _posixsubprocess = None
- warnings.warn("The _posixsubprocess module is not being used. "
- "Child process reliability may suffer if your "
- "program uses threads.", RuntimeWarning)
+ import _posixsubprocess
+ _create_pipe = _posixsubprocess.cloexec_pipe
# When select or poll has indicated that the file is writable,
# we can write up to _PIPE_BUF bytes without risk of blocking.
# POSIX defines PIPE_BUF as >= 512.
_PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
- _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1)
-
- def _set_cloexec(fd, cloexec):
- old = fcntl.fcntl(fd, fcntl.F_GETFD)
- if cloexec:
- fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC)
- else:
- fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC)
-
- if _posixsubprocess:
- _create_pipe = _posixsubprocess.cloexec_pipe
- else:
- def _create_pipe():
- fds = os.pipe()
- _set_cloexec(fds[0], True)
- _set_cloexec(fds[1], True)
- return fds
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
- "getoutput", "check_output", "CalledProcessError"]
+ "getoutput", "check_output", "CalledProcessError", "DEVNULL"]
if mswindows:
from _subprocess import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
@@ -448,27 +443,32 @@ def _cleanup():
PIPE = -1
STDOUT = -2
+DEVNULL = -3
def _eintr_retry_call(func, *args):
while True:
try:
return func(*args)
- except (OSError, IOError) as e:
- if e.errno == errno.EINTR:
- continue
- raise
+ except InterruptedError:
+ continue
-def call(*popenargs, **kwargs):
- """Run command with arguments. Wait for command to complete, then
- return the returncode attribute.
+def call(*popenargs, timeout=None, **kwargs):
+ """Run command with arguments. Wait for command to complete or
+ timeout, then return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
- return Popen(*popenargs, **kwargs).wait()
+ with Popen(*popenargs, **kwargs) as p:
+ try:
+ return p.wait(timeout=timeout)
+ except:
+ p.kill()
+ p.wait()
+ raise
def check_call(*popenargs, **kwargs):
@@ -477,7 +477,7 @@ def check_call(*popenargs, **kwargs):
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
- The arguments are the same as for the Popen constructor. Example:
+ The arguments are the same as for the call function. Example:
check_call(["ls", "-l"])
"""
@@ -490,7 +490,7 @@ def check_call(*popenargs, **kwargs):
return 0
-def check_output(*popenargs, **kwargs):
+def check_output(*popenargs, timeout=None, **kwargs):
r"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
@@ -512,14 +512,20 @@ def check_output(*popenargs, **kwargs):
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
- process = Popen(*popenargs, stdout=PIPE, **kwargs)
- output, unused_err = process.communicate()
- retcode = process.poll()
- if retcode:
- cmd = kwargs.get("args")
- if cmd is None:
- cmd = popenargs[0]
- raise CalledProcessError(retcode, cmd, output=output)
+ with Popen(*popenargs, stdout=PIPE, **kwargs) as process:
+ try:
+ output, unused_err = process.communicate(timeout=timeout)
+ except TimeoutExpired:
+ process.kill()
+ output, unused_err = process.communicate()
+ raise TimeoutExpired(process.args, timeout, output=output)
+ except:
+ process.kill()
+ process.wait()
+ raise
+ retcode = process.poll()
+ if retcode:
+ raise CalledProcessError(retcode, process.args, output=output)
return output
@@ -614,11 +620,19 @@ def getstatusoutput(cmd):
>>> subprocess.getstatusoutput('/bin/junk')
(256, 'sh: /bin/junk: not found')
"""
- pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
- text = pipe.read()
- sts = pipe.close()
- if sts is None: sts = 0
- if text[-1:] == '\n': text = text[:-1]
+ with os.popen('{ ' + cmd + '; } 2>&1', 'r') as pipe:
+ try:
+ text = pipe.read()
+ sts = pipe.close()
+ except:
+ process = pipe._proc
+ process.kill()
+ process.wait()
+ raise
+ if sts is None:
+ sts = 0
+ if text[-1:] == '\n':
+ text = text[:-1]
return sts, text
@@ -650,6 +664,8 @@ class Popen(object):
_cleanup()
self._child_created = False
+ self._input = None
+ self._communication_started = False
if bufsize is None:
bufsize = 0 # Restore default
if not isinstance(bufsize, int):
@@ -684,6 +700,7 @@ class Popen(object):
raise ValueError("creationflags is only supported on Windows "
"platforms")
+ self.args = args
self.stdin = None
self.stdout = None
self.stderr = None
@@ -784,19 +801,28 @@ class Popen(object):
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
+ def _get_devnull(self):
+ if not hasattr(self, '_devnull'):
+ self._devnull = os.open(os.devnull, os.O_RDWR)
+ return self._devnull
- def communicate(self, input=None):
+ def communicate(self, input=None, timeout=None):
"""Interact with process: Send data to stdin. Read data from
stdout and stderr, until end-of-file is reached. Wait for
- process to terminate. The optional input argument should be a
- string to be sent to the child process, or None, if no data
+ process to terminate. The optional input argument should be
+ bytes to be sent to the child process, or None, if no data
should be sent to the child.
communicate() returns a tuple (stdout, stderr)."""
- # Optimization: If we are only using one pipe, or no pipe at
- # all, using select() or threads is unnecessary.
- if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
+ if self._communication_started and input:
+ raise ValueError("Cannot send input after starting communication")
+
+ # Optimization: If we are not worried about timeouts, we haven't
+ # started communicating, and we have one or zero pipes, using select()
+ # or threads is unnecessary.
+ if (timeout is None and not self._communication_started and
+ [self.stdin, self.stdout, self.stderr].count(None) >= 2):
stdout = None
stderr = None
if self.stdin:
@@ -814,15 +840,42 @@ class Popen(object):
stderr = _eintr_retry_call(self.stderr.read)
self.stderr.close()
self.wait()
- return (stdout, stderr)
+ else:
+ if timeout is not None:
+ endtime = time.time() + timeout
+ else:
+ endtime = None
+
+ try:
+ stdout, stderr = self._communicate(input, endtime, timeout)
+ finally:
+ self._communication_started = True
+
+ sts = self.wait(timeout=self._remaining_time(endtime))
- return self._communicate(input)
+ return (stdout, stderr)
def poll(self):
return self._internal_poll()
+ def _remaining_time(self, endtime):
+ """Convenience for _communicate when computing timeouts."""
+ if endtime is None:
+ return None
+ else:
+ return endtime - time.time()
+
+
+ def _check_timeout(self, endtime, orig_timeout):
+ """Convenience for checking if a timeout has expired."""
+ if endtime is None:
+ return
+ if time.time() > endtime:
+ raise TimeoutExpired(self.args, orig_timeout)
+
+
if mswindows:
#
# Windows methods
@@ -844,6 +897,8 @@ class Popen(object):
p2cread, _ = _subprocess.CreatePipe(None, 0)
elif stdin == PIPE:
p2cread, p2cwrite = _subprocess.CreatePipe(None, 0)
+ elif stdin == DEVNULL:
+ p2cread = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
@@ -857,6 +912,8 @@ class Popen(object):
_, c2pwrite = _subprocess.CreatePipe(None, 0)
elif stdout == PIPE:
c2pread, c2pwrite = _subprocess.CreatePipe(None, 0)
+ elif stdout == DEVNULL:
+ c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
@@ -872,6 +929,8 @@ class Popen(object):
errread, errwrite = _subprocess.CreatePipe(None, 0)
elif stderr == STDOUT:
errwrite = c2pwrite
+ elif stderr == DEVNULL:
+ errwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
@@ -981,6 +1040,8 @@ class Popen(object):
c2pwrite.Close()
if errwrite != -1:
errwrite.Close()
+ if hasattr(self, '_devnull'):
+ os.close(self._devnull)
# Retain the process handle, but close the thread handle
self._child_created = True
@@ -1005,12 +1066,20 @@ class Popen(object):
return self.returncode
- def wait(self):
+ def wait(self, timeout=None, endtime=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
+ if endtime is not None:
+ timeout = self._remaining_time(endtime)
+ if timeout is None:
+ timeout_millis = _subprocess.INFINITE
+ else:
+ timeout_millis = int(timeout * 1000)
if self.returncode is None:
- _subprocess.WaitForSingleObject(self._handle,
- _subprocess.INFINITE)
+ result = _subprocess.WaitForSingleObject(self._handle,
+ timeout_millis)
+ if result == _subprocess.WAIT_TIMEOUT:
+ raise TimeoutExpired(self.args, timeout)
self.returncode = _subprocess.GetExitCodeProcess(self._handle)
return self.returncode
@@ -1020,22 +1089,23 @@ class Popen(object):
fh.close()
- def _communicate(self, input):
- stdout = None # Return
- stderr = None # Return
-
- if self.stdout:
- stdout = []
- stdout_thread = threading.Thread(target=self._readerthread,
- args=(self.stdout, stdout))
- stdout_thread.daemon = True
- stdout_thread.start()
- if self.stderr:
- stderr = []
- stderr_thread = threading.Thread(target=self._readerthread,
- args=(self.stderr, stderr))
- stderr_thread.daemon = True
- stderr_thread.start()
+ def _communicate(self, input, endtime, orig_timeout):
+ # Start reader threads feeding into a list hanging off of this
+ # object, unless they've already been started.
+ if self.stdout and not hasattr(self, "_stdout_buff"):
+ self._stdout_buff = []
+ self.stdout_thread = \
+ threading.Thread(target=self._readerthread,
+ args=(self.stdout, self._stdout_buff))
+ self.stdout_thread.daemon = True
+ self.stdout_thread.start()
+ if self.stderr and not hasattr(self, "_stderr_buff"):
+ self._stderr_buff = []
+ self.stderr_thread = \
+ threading.Thread(target=self._readerthread,
+ args=(self.stderr, self._stderr_buff))
+ self.stderr_thread.daemon = True
+ self.stderr_thread.start()
if self.stdin:
if input is not None:
@@ -1046,10 +1116,28 @@ class Popen(object):
raise
self.stdin.close()
+ # Wait for the reader threads, or time out. If we time out, the
+ # threads remain reading and the fds left open in case the user
+ # calls communicate again.
+ if self.stdout is not None:
+ self.stdout_thread.join(self._remaining_time(endtime))
+ if self.stdout_thread.isAlive():
+ raise TimeoutExpired(self.args, orig_timeout)
+ if self.stderr is not None:
+ self.stderr_thread.join(self._remaining_time(endtime))
+ if self.stderr_thread.isAlive():
+ raise TimeoutExpired(self.args, orig_timeout)
+
+ # Collect the output from and close both pipes, now that we know
+ # both have been read successfully.
+ stdout = None
+ stderr = None
if self.stdout:
- stdout_thread.join()
+ stdout = self._stdout_buff
+ self.stdout.close()
if self.stderr:
- stderr_thread.join()
+ stderr = self._stderr_buff
+ self.stderr.close()
# All data exchanged. Translate lists into strings.
if stdout is not None:
@@ -1057,7 +1145,6 @@ class Popen(object):
if stderr is not None:
stderr = stderr[0]
- self.wait()
return (stdout, stderr)
def send_signal(self, sig):
@@ -1077,11 +1164,9 @@ class Popen(object):
"""
try:
_subprocess.TerminateProcess(self._handle, 1)
- except OSError as e:
+ except PermissionError:
# ERROR_ACCESS_DENIED (winerror 5) is received when the
# process already died.
- if e.winerror != 5:
- raise
rc = _subprocess.GetExitCodeProcess(self._handle)
if rc == _subprocess.STILL_ACTIVE:
raise
@@ -1105,6 +1190,8 @@ class Popen(object):
pass
elif stdin == PIPE:
p2cread, p2cwrite = _create_pipe()
+ elif stdin == DEVNULL:
+ p2cread = self._get_devnull()
elif isinstance(stdin, int):
p2cread = stdin
else:
@@ -1115,6 +1202,8 @@ class Popen(object):
pass
elif stdout == PIPE:
c2pread, c2pwrite = _create_pipe()
+ elif stdout == DEVNULL:
+ c2pwrite = self._get_devnull()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
@@ -1127,6 +1216,8 @@ class Popen(object):
errread, errwrite = _create_pipe()
elif stderr == STDOUT:
errwrite = c2pwrite
+ elif stderr == DEVNULL:
+ errwrite = self._get_devnull()
elif isinstance(stderr, int):
errwrite = stderr
else:
@@ -1157,7 +1248,7 @@ class Popen(object):
restore_signals, start_new_session):
"""Execute program (POSIX version)"""
- if isinstance(args, str):
+ if isinstance(args, (str, bytes)):
args = [args]
else:
args = list(args)
@@ -1176,149 +1267,34 @@ class Popen(object):
errpipe_read, errpipe_write = _create_pipe()
try:
try:
-
- if _posixsubprocess:
- # We must avoid complex work that could involve
- # malloc or free in the child process to avoid
- # potential deadlocks, thus we do all this here.
- # and pass it to fork_exec()
-
- if env is not None:
- env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
- for k, v in env.items()]
- else:
- env_list = None # Use execv instead of execve.
- executable = os.fsencode(executable)
- if os.path.dirname(executable):
- executable_list = (executable,)
- else:
- # This matches the behavior of os._execvpe().
- executable_list = tuple(
- os.path.join(os.fsencode(dir), executable)
- for dir in os.get_exec_path(env))
- fds_to_keep = set(pass_fds)
- fds_to_keep.add(errpipe_write)
- self.pid = _posixsubprocess.fork_exec(
- args, executable_list,
- close_fds, sorted(fds_to_keep), cwd, env_list,
- p2cread, p2cwrite, c2pread, c2pwrite,
- errread, errwrite,
- errpipe_read, errpipe_write,
- restore_signals, start_new_session, preexec_fn)
- self._child_created = True
+ # We must avoid complex work that could involve
+ # malloc or free in the child process to avoid
+ # potential deadlocks, thus we do all this here.
+ # and pass it to fork_exec()
+
+ if env is not None:
+ env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
+ for k, v in env.items()]
else:
- # Pure Python implementation: It is not thread safe.
- # This implementation may deadlock in the child if your
- # parent process has any other threads running.
-
- gc_was_enabled = gc.isenabled()
- # Disable gc to avoid bug where gc -> file_dealloc ->
- # write to stderr -> hang. See issue1336
- gc.disable()
- try:
- self.pid = os.fork()
- except:
- if gc_was_enabled:
- gc.enable()
- raise
- self._child_created = True
- if self.pid == 0:
- # Child
- try:
- # Close parent's pipe ends
- if p2cwrite != -1:
- os.close(p2cwrite)
- if c2pread != -1:
- os.close(c2pread)
- if errread != -1:
- os.close(errread)
- os.close(errpipe_read)
-
- # When duping fds, if there arises a situation
- # where one of the fds is either 0, 1 or 2, it
- # is possible that it is overwritten (#12607).
- if c2pwrite == 0:
- c2pwrite = os.dup(c2pwrite)
- if errwrite == 0 or errwrite == 1:
- errwrite = os.dup(errwrite)
-
- # Dup fds for child
- def _dup2(a, b):
- # dup2() removes the CLOEXEC flag but
- # we must do it ourselves if dup2()
- # would be a no-op (issue #10806).
- if a == b:
- _set_cloexec(a, False)
- elif a != -1:
- os.dup2(a, b)
- _dup2(p2cread, 0)
- _dup2(c2pwrite, 1)
- _dup2(errwrite, 2)
-
- # Close pipe fds. Make sure we don't close the
- # same fd more than once, or standard fds.
- closed = set()
- for fd in [p2cread, c2pwrite, errwrite]:
- if fd > 2 and fd not in closed:
- os.close(fd)
- closed.add(fd)
-
- # Close all other fds, if asked for
- if close_fds:
- fds_to_keep = set(pass_fds)
- fds_to_keep.add(errpipe_write)
- self._close_fds(fds_to_keep)
-
-
- if cwd is not None:
- os.chdir(cwd)
-
- # This is a copy of Python/pythonrun.c
- # _Py_RestoreSignals(). If that were exposed
- # as a sys._py_restoresignals func it would be
- # better.. but this pure python implementation
- # isn't likely to be used much anymore.
- if restore_signals:
- signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
- for sig in signals:
- if hasattr(signal, sig):
- signal.signal(getattr(signal, sig),
- signal.SIG_DFL)
-
- if start_new_session and hasattr(os, 'setsid'):
- os.setsid()
-
- if preexec_fn:
- preexec_fn()
-
- if env is None:
- os.execvp(executable, args)
- else:
- os.execvpe(executable, args, env)
-
- except:
- try:
- exc_type, exc_value = sys.exc_info()[:2]
- if isinstance(exc_value, OSError):
- errno_num = exc_value.errno
- else:
- errno_num = 0
- message = '%s:%x:%s' % (exc_type.__name__,
- errno_num, exc_value)
- message = message.encode(errors="surrogatepass")
- os.write(errpipe_write, message)
- except Exception:
- # We MUST not allow anything odd happening
- # above to prevent us from exiting below.
- pass
-
- # This exitcode won't be reported to applications
- # so it really doesn't matter what we return.
- os._exit(255)
-
- # Parent
- if gc_was_enabled:
- gc.enable()
+ env_list = None # Use execv instead of execve.
+ executable = os.fsencode(executable)
+ if os.path.dirname(executable):
+ executable_list = (executable,)
+ else:
+ # This matches the behavior of os._execvpe().
+ executable_list = tuple(
+ os.path.join(os.fsencode(dir), executable)
+ for dir in os.get_exec_path(env))
+ fds_to_keep = set(pass_fds)
+ fds_to_keep.add(errpipe_write)
+ self.pid = _posixsubprocess.fork_exec(
+ args, executable_list,
+ close_fds, sorted(fds_to_keep), cwd, env_list,
+ p2cread, p2cwrite, c2pread, c2pwrite,
+ errread, errwrite,
+ errpipe_read, errpipe_write,
+ restore_signals, start_new_session, preexec_fn)
+ self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)
@@ -1329,6 +1305,8 @@ class Popen(object):
os.close(c2pwrite)
if errwrite != -1 and errread != -1:
os.close(errwrite)
+ if hasattr(self, '_devnull'):
+ os.close(self._devnull)
# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
@@ -1406,25 +1384,57 @@ class Popen(object):
return self.returncode
- def wait(self):
+ def _try_wait(self, wait_flags):
+ try:
+ (pid, sts) = _eintr_retry_call(os.waitpid, self.pid, wait_flags)
+ except OSError as e:
+ if e.errno != errno.ECHILD:
+ raise
+ # This happens if SIGCLD is set to be ignored or waiting
+ # for child processes has otherwise been disabled for our
+ # process. This child is dead, we can't get the status.
+ pid = self.pid
+ sts = 0
+ return (pid, sts)
+
+
+ def wait(self, timeout=None, endtime=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
- if self.returncode is None:
- try:
- pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
- except OSError as e:
- if e.errno != errno.ECHILD:
- raise
- # This happens if SIGCLD is set to be ignored or waiting
- # for child processes has otherwise been disabled for our
- # process. This child is dead, we can't get the status.
- sts = 0
+ if self.returncode is not None:
+ return self.returncode
+
+ # endtime is preferred to timeout. timeout is only used for
+ # printing.
+ if endtime is not None or timeout is not None:
+ if endtime is None:
+ endtime = time.time() + timeout
+ elif timeout is None:
+ timeout = self._remaining_time(endtime)
+
+ if endtime is not None:
+ # Enter a busy loop if we have a timeout. This busy loop was
+ # cribbed from Lib/threading.py in Thread.wait() at r71065.
+ delay = 0.0005 # 500 us -> initial delay of 1 ms
+ while True:
+ (pid, sts) = self._try_wait(os.WNOHANG)
+ assert pid == self.pid or pid == 0
+ if pid == self.pid:
+ self._handle_exitstatus(sts)
+ break
+ remaining = self._remaining_time(endtime)
+ if remaining <= 0:
+ raise TimeoutExpired(self.args, timeout)
+ delay = min(delay * 2, remaining, .05)
+ time.sleep(delay)
+ elif self.returncode is None:
+ (pid, sts) = self._try_wait(0)
self._handle_exitstatus(sts)
return self.returncode
- def _communicate(self, input):
- if self.stdin:
+ def _communicate(self, input, endtime, orig_timeout):
+ if self.stdin and not self._communication_started:
# Flush stdio buffer. This might block, if the user has
# been writing to .stdin in an uncontrolled fashion.
self.stdin.flush()
@@ -1432,9 +1442,13 @@ class Popen(object):
self.stdin.close()
if _has_poll:
- stdout, stderr = self._communicate_with_poll(input)
+ stdout, stderr = self._communicate_with_poll(input, endtime,
+ orig_timeout)
else:
- stdout, stderr = self._communicate_with_select(input)
+ stdout, stderr = self._communicate_with_select(input, endtime,
+ orig_timeout)
+
+ self.wait(timeout=self._remaining_time(endtime))
# All data exchanged. Translate lists into strings.
if stdout is not None:
@@ -1452,67 +1466,87 @@ class Popen(object):
stderr = self._translate_newlines(stderr,
self.stderr.encoding)
- self.wait()
return (stdout, stderr)
- def _communicate_with_poll(self, input):
+ def _communicate_with_poll(self, input, endtime, orig_timeout):
stdout = None # Return
stderr = None # Return
- fd2file = {}
- fd2output = {}
+
+ if not self._communication_started:
+ self._fd2file = {}
poller = select.poll()
def register_and_append(file_obj, eventmask):
poller.register(file_obj.fileno(), eventmask)
- fd2file[file_obj.fileno()] = file_obj
+ self._fd2file[file_obj.fileno()] = file_obj
def close_unregister_and_remove(fd):
poller.unregister(fd)
- fd2file[fd].close()
- fd2file.pop(fd)
+ self._fd2file[fd].close()
+ self._fd2file.pop(fd)
if self.stdin and input:
register_and_append(self.stdin, select.POLLOUT)
+ # Only create this mapping if we haven't already.
+ if not self._communication_started:
+ self._fd2output = {}
+ if self.stdout:
+ self._fd2output[self.stdout.fileno()] = []
+ if self.stderr:
+ self._fd2output[self.stderr.fileno()] = []
+
select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI
if self.stdout:
register_and_append(self.stdout, select_POLLIN_POLLPRI)
- fd2output[self.stdout.fileno()] = stdout = []
+ stdout = self._fd2output[self.stdout.fileno()]
if self.stderr:
register_and_append(self.stderr, select_POLLIN_POLLPRI)
- fd2output[self.stderr.fileno()] = stderr = []
-
- input_offset = 0
- while fd2file:
+ stderr = self._fd2output[self.stderr.fileno()]
+
+ # Save the input here so that if we time out while communicating,
+ # we can continue sending input if we retry.
+ if self.stdin and self._input is None:
+ self._input_offset = 0
+ self._input = input
+ if self.universal_newlines:
+ self._input = self._input.encode(self.stdin.encoding)
+
+ while self._fd2file:
+ timeout = self._remaining_time(endtime)
+ if timeout is not None and timeout < 0:
+ raise TimeoutExpired(self.args, orig_timeout)
try:
- ready = poller.poll()
+ ready = poller.poll(timeout)
except select.error as e:
if e.args[0] == errno.EINTR:
continue
raise
+ self._check_timeout(endtime, orig_timeout)
# XXX Rewrite these to use non-blocking I/O on the
# file objects; they are no longer using C stdio!
for fd, mode in ready:
if mode & select.POLLOUT:
- chunk = input[input_offset : input_offset + _PIPE_BUF]
+ chunk = self._input[self._input_offset :
+ self._input_offset + _PIPE_BUF]
try:
- input_offset += os.write(fd, chunk)
+ self._input_offset += os.write(fd, chunk)
except OSError as e:
if e.errno == errno.EPIPE:
close_unregister_and_remove(fd)
else:
raise
else:
- if input_offset >= len(input):
+ if self._input_offset >= len(self._input):
close_unregister_and_remove(fd)
elif mode & select_POLLIN_POLLPRI:
data = os.read(fd, 4096)
if not data:
close_unregister_and_remove(fd)
- fd2output[fd].append(data)
+ self._fd2output[fd].append(data)
else:
# Ignore hang up or errors.
close_unregister_and_remove(fd)
@@ -1520,61 +1554,87 @@ class Popen(object):
return (stdout, stderr)
- def _communicate_with_select(self, input):
- read_set = []
- write_set = []
+ def _communicate_with_select(self, input, endtime, orig_timeout):
+ if not self._communication_started:
+ self._read_set = []
+ self._write_set = []
+ if self.stdin and input:
+ self._write_set.append(self.stdin)
+ if self.stdout:
+ self._read_set.append(self.stdout)
+ if self.stderr:
+ self._read_set.append(self.stderr)
+
+ if self.stdin and self._input is None:
+ self._input_offset = 0
+ self._input = input
+ if self.universal_newlines:
+ self._input = self._input.encode(self.stdin.encoding)
+
stdout = None # Return
stderr = None # Return
- if self.stdin and input:
- write_set.append(self.stdin)
if self.stdout:
- read_set.append(self.stdout)
- stdout = []
+ if not self._communication_started:
+ self._stdout_buff = []
+ stdout = self._stdout_buff
if self.stderr:
- read_set.append(self.stderr)
- stderr = []
-
- input_offset = 0
- while read_set or write_set:
+ if not self._communication_started:
+ self._stderr_buff = []
+ stderr = self._stderr_buff
+
+ while self._read_set or self._write_set:
+ timeout = self._remaining_time(endtime)
+ if timeout is not None and timeout < 0:
+ raise TimeoutExpired(self.args, orig_timeout)
try:
- rlist, wlist, xlist = select.select(read_set, write_set, [])
+ (rlist, wlist, xlist) = \
+ select.select(self._read_set, self._write_set, [],
+ timeout)
except select.error as e:
if e.args[0] == errno.EINTR:
continue
raise
+ # According to the docs, returning three empty lists indicates
+ # that the timeout expired.
+ if not (rlist or wlist or xlist):
+ raise TimeoutExpired(self.args, orig_timeout)
+ # We also check what time it is ourselves for good measure.
+ self._check_timeout(endtime, orig_timeout)
+
# XXX Rewrite these to use non-blocking I/O on the
# file objects; they are no longer using C stdio!
if self.stdin in wlist:
- chunk = input[input_offset : input_offset + _PIPE_BUF]
+ chunk = self._input[self._input_offset :
+ self._input_offset + _PIPE_BUF]
try:
bytes_written = os.write(self.stdin.fileno(), chunk)
except OSError as e:
if e.errno == errno.EPIPE:
self.stdin.close()
- write_set.remove(self.stdin)
+ self._write_set.remove(self.stdin)
else:
raise
else:
- input_offset += bytes_written
- if input_offset >= len(input):
+ self._input_offset += bytes_written
+ if self._input_offset >= len(self._input):
self.stdin.close()
- write_set.remove(self.stdin)
+ self._write_set.remove(self.stdin)
if self.stdout in rlist:
data = os.read(self.stdout.fileno(), 1024)
if not data:
self.stdout.close()
- read_set.remove(self.stdout)
+ self._read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data = os.read(self.stderr.fileno(), 1024)
if not data:
self.stderr.close()
- read_set.remove(self.stderr)
+ self._read_set.remove(self.stderr)
stderr.append(data)
return (stdout, stderr)
@@ -1594,68 +1654,3 @@ class Popen(object):
"""Kill the process with SIGKILL
"""
self.send_signal(signal.SIGKILL)
-
-
-def _demo_posix():
- #
- # Example 1: Simple redirection: Get process list
- #
- plist = Popen(["ps"], stdout=PIPE).communicate()[0]
- print("Process list:")
- print(plist)
-
- #
- # Example 2: Change uid before executing child
- #
- if os.getuid() == 0:
- p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
- p.wait()
-
- #
- # Example 3: Connecting several subprocesses
- #
- print("Looking for 'hda'...")
- p1 = Popen(["dmesg"], stdout=PIPE)
- p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
- print(repr(p2.communicate()[0]))
-
- #
- # Example 4: Catch execution error
- #
- print()
- print("Trying a weird file...")
- try:
- print(Popen(["/this/path/does/not/exist"]).communicate())
- except OSError as e:
- if e.errno == errno.ENOENT:
- print("The file didn't exist. I thought so...")
- print("Child traceback:")
- print(e.child_traceback)
- else:
- print("Error", e.errno)
- else:
- print("Gosh. No error.", file=sys.stderr)
-
-
-def _demo_windows():
- #
- # Example 1: Connecting several subprocesses
- #
- print("Looking for 'PROMPT' in set output...")
- p1 = Popen("set", stdout=PIPE, shell=True)
- p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
- print(repr(p2.communicate()[0]))
-
- #
- # Example 2: Simple execution of program
- #
- print("Executing calc...")
- p = Popen("calc")
- p.wait()
-
-
-if __name__ == "__main__":
- if mswindows:
- _demo_windows()
- else:
- _demo_posix()
diff --git a/Lib/sysconfig.cfg b/Lib/sysconfig.cfg
new file mode 100644
index 0000000..565c0eb
--- /dev/null
+++ b/Lib/sysconfig.cfg
@@ -0,0 +1,111 @@
+[globals]
+# These are useful categories that can be referenced at run time,
+# using packaging.database.get_file.
+# Configuration files
+config = {confdir}/{distribution.name}
+# Non-writable data that is independent of architecture (images, many xml/text files)
+appdata = {datadir}/{distribution.name}
+# Non-writable data that is architecture-dependent (some binary data formats)
+appdata.arch = {libdir}/{distribution.name}
+# Data, written by the app/lib, that must be preserved (databases)
+appdata.persistent = {statedir}/lib/{distribution.name}
+# Data, written by the app/lib, that can be safely discarded (cache)
+appdata.disposable = {statedir}/cache/{distribution.name}
+# Help or documentation files
+help = {datadir}/{distribution.name}
+icon = {datadir}/pixmaps
+scripts = {base}/bin
+
+# Non-runtime files. These are valid categories for marking files for
+# install, but they should not be referenced by the app/lib at run time.
+# Help or documentation files
+doc = {datadir}/doc/{distribution.name}
+# GNU info documentation files
+info = {datadir}/info
+# man pages
+man = {datadir}/man
+
+[posix_prefix]
+# Configuration directories. Some of these come straight out of the
+# configure script. They are for implementing the other variables, not to
+# be used directly in [resource_locations].
+confdir = /etc
+datadir = /usr/share
+libdir = /usr/lib
+statedir = /var
+# User resource directory
+local = ~/.local/{distribution.name}
+
+stdlib = {base}/lib/python{py_version_short}
+platstdlib = {platbase}/lib/python{py_version_short}
+purelib = {base}/lib/python{py_version_short}/site-packages
+platlib = {platbase}/lib/python{py_version_short}/site-packages
+include = {base}/include/python{py_version_short}{abiflags}
+platinclude = {platbase}/include/python{py_version_short}{abiflags}
+data = {base}
+
+[posix_home]
+stdlib = {base}/lib/python
+platstdlib = {base}/lib/python
+purelib = {base}/lib/python
+platlib = {base}/lib/python
+include = {base}/include/python
+platinclude = {base}/include/python
+scripts = {base}/bin
+data = {base}
+
+[nt]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2_home]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[nt_user]
+stdlib = {userbase}/Python{py_version_nodot}
+platstdlib = {userbase}/Python{py_version_nodot}
+purelib = {userbase}/Python{py_version_nodot}/site-packages
+platlib = {userbase}/Python{py_version_nodot}/site-packages
+include = {userbase}/Python{py_version_nodot}/Include
+scripts = {userbase}/Scripts
+data = {userbase}
+
+[posix_user]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[osx_framework_user]
+stdlib = {userbase}/lib/python
+platstdlib = {userbase}/lib/python
+purelib = {userbase}/lib/python/site-packages
+platlib = {userbase}/lib/python/site-packages
+include = {userbase}/include
+scripts = {userbase}/bin
+data = {userbase}
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index b2183d8..e5c1e60 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -1,9 +1,10 @@
-"""Provide access to Python's configuration information.
+"""Access to Python's configuration information."""
-"""
-import sys
import os
+import re
+import sys
from os.path import pardir, realpath
+from configparser import RawConfigParser
__all__ = [
'get_config_h_filename',
@@ -17,91 +18,51 @@ __all__ = [
'get_python_version',
'get_scheme_names',
'parse_config_h',
- ]
-
-_INSTALL_SCHEMES = {
- 'posix_prefix': {
- 'stdlib': '{base}/lib/python{py_version_short}',
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
- 'purelib': '{base}/lib/python{py_version_short}/site-packages',
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
- 'include':
- '{base}/include/python{py_version_short}{abiflags}',
- 'platinclude':
- '{platbase}/include/python{py_version_short}{abiflags}',
- 'scripts': '{base}/bin',
- 'data': '{base}',
- },
- 'posix_home': {
- 'stdlib': '{base}/lib/python',
- 'platstdlib': '{base}/lib/python',
- 'purelib': '{base}/lib/python',
- 'platlib': '{base}/lib/python',
- 'include': '{base}/include/python',
- 'platinclude': '{base}/include/python',
- 'scripts': '{base}/bin',
- 'data' : '{base}',
- },
- 'nt': {
- 'stdlib': '{base}/Lib',
- 'platstdlib': '{base}/Lib',
- 'purelib': '{base}/Lib/site-packages',
- 'platlib': '{base}/Lib/site-packages',
- 'include': '{base}/Include',
- 'platinclude': '{base}/Include',
- 'scripts': '{base}/Scripts',
- 'data' : '{base}',
- },
- 'os2': {
- 'stdlib': '{base}/Lib',
- 'platstdlib': '{base}/Lib',
- 'purelib': '{base}/Lib/site-packages',
- 'platlib': '{base}/Lib/site-packages',
- 'include': '{base}/Include',
- 'platinclude': '{base}/Include',
- 'scripts': '{base}/Scripts',
- 'data' : '{base}',
- },
- 'os2_home': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
- 'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'include': '{userbase}/include/python{py_version_short}',
- 'scripts': '{userbase}/bin',
- 'data' : '{userbase}',
- },
- 'nt_user': {
- 'stdlib': '{userbase}/Python{py_version_nodot}',
- 'platstdlib': '{userbase}/Python{py_version_nodot}',
- 'purelib': '{userbase}/Python{py_version_nodot}/site-packages',
- 'platlib': '{userbase}/Python{py_version_nodot}/site-packages',
- 'include': '{userbase}/Python{py_version_nodot}/Include',
- 'scripts': '{userbase}/Scripts',
- 'data' : '{userbase}',
- },
- 'posix_user': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
- 'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'include': '{userbase}/include/python{py_version_short}',
- 'scripts': '{userbase}/bin',
- 'data' : '{userbase}',
- },
- 'osx_framework_user': {
- 'stdlib': '{userbase}/lib/python',
- 'platstdlib': '{userbase}/lib/python',
- 'purelib': '{userbase}/lib/python/site-packages',
- 'platlib': '{userbase}/lib/python/site-packages',
- 'include': '{userbase}/include',
- 'scripts': '{userbase}/bin',
- 'data' : '{userbase}',
- },
- }
-
-_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
- 'scripts', 'data')
+]
+
+# let's read the configuration file
+# XXX _CONFIG_DIR will be set by the Makefile later
+_CONFIG_DIR = os.path.normpath(os.path.dirname(__file__))
+_CONFIG_FILE = os.path.join(_CONFIG_DIR, 'sysconfig.cfg')
+_SCHEMES = RawConfigParser(dict_type=dict) # Faster than OrderedDict
+_SCHEMES.read(_CONFIG_FILE)
+_VAR_REPL = re.compile(r'\{([^{]*?)\}')
+
+
+def _expand_globals(config):
+ if config.has_section('globals'):
+ globals = config.items('globals')
+ else:
+ globals = tuple()
+
+ sections = config.sections()
+ for section in sections:
+ if section == 'globals':
+ continue
+ for option, value in globals:
+ if config.has_option(section, option):
+ continue
+ config.set(section, option, value)
+ config.remove_section('globals')
+
+ # now expanding local variables defined in the cfg file
+ #
+ for section in config.sections():
+ variables = dict(config.items(section))
+
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in variables:
+ return variables[name]
+ return matchobj.group(0)
+
+ for option, value in config.items(section):
+ config.set(section, option, _VAR_REPL.sub(_replacer, value))
+
+_expand_globals(_SCHEMES)
+
+ # FIXME don't rely on sys.version here, its format is an implementatin detail
+ # of CPython, use sys.version_info or sys.hexversion
_PY_VERSION = sys.version.split()[0]
_PY_VERSION_SHORT = sys.version[:3]
_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
@@ -110,6 +71,7 @@ _EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
_CONFIG_VARS = None
_USER_BASE = None
+
def _safe_realpath(path):
try:
return realpath(path)
@@ -132,6 +94,7 @@ if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+
def is_python_build():
for fn in ("Setup.dist", "Setup.local"):
if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
@@ -142,17 +105,25 @@ _PYTHON_BUILD = is_python_build()
if _PYTHON_BUILD:
for scheme in ('posix_prefix', 'posix_home'):
- _INSTALL_SCHEMES[scheme]['include'] = '{srcdir}/Include'
- _INSTALL_SCHEMES[scheme]['platinclude'] = '{projectbase}/.'
+ _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
+ _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
+
+
+def _subst_vars(path, local_vars):
+ """In the string `path`, replace tokens like {some.thing} with the
+ corresponding value from the map `local_vars`.
+
+ If there is no corresponding value, leave the token unchanged.
+ """
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in local_vars:
+ return local_vars[name]
+ elif name in os.environ:
+ return os.environ[name]
+ return matchobj.group(0)
+ return _VAR_REPL.sub(_replacer, path)
-def _subst_vars(s, local_vars):
- try:
- return s.format(**local_vars)
- except KeyError:
- try:
- return s.format(**os.environ)
- except KeyError as var:
- raise AttributeError('{%s}' % var)
def _extend_dict(target_dict, other_dict):
target_keys = target_dict.keys()
@@ -161,41 +132,63 @@ def _extend_dict(target_dict, other_dict):
continue
target_dict[key] = value
+
def _expand_vars(scheme, vars):
res = {}
if vars is None:
vars = {}
_extend_dict(vars, get_config_vars())
- for key, value in _INSTALL_SCHEMES[scheme].items():
+ for key, value in _SCHEMES.items(scheme):
if os.name in ('posix', 'nt'):
value = os.path.expanduser(value)
res[key] = os.path.normpath(_subst_vars(value, vars))
return res
+
+def format_value(value, vars):
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in vars:
+ return vars[name]
+ return matchobj.group(0)
+ return _VAR_REPL.sub(_replacer, value)
+
+
def _get_default_scheme():
if os.name == 'posix':
# the default scheme for posix is posix_prefix
return 'posix_prefix'
return os.name
+
def _getuserbase():
env_base = os.environ.get("PYTHONUSERBASE", None)
+
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
# what about 'os2emx', 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
- return env_base if env_base else joinuser(base, "Python")
+ if env_base:
+ return env_base
+ else:
+ return joinuser(base, "Python")
if sys.platform == "darwin":
framework = get_config_var("PYTHONFRAMEWORK")
if framework:
- return env_base if env_base else joinuser("~", "Library", framework, "%d.%d"%(
- sys.version_info[:2]))
+ if env_base:
+ return env_base
+ else:
+ return joinuser("~", "Library", framework, "%d.%d" %
+ sys.version_info[:2])
- return env_base if env_base else joinuser("~", ".local")
+ if env_base:
+ return env_base
+ else:
+ return joinuser("~", ".local")
def _parse_makefile(filename, vars=None):
@@ -205,7 +198,6 @@ def _parse_makefile(filename, vars=None):
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
- import re
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
@@ -267,7 +259,8 @@ def _parse_makefile(filename, vars=None):
item = os.environ[n]
elif n in renamed_variables:
- if name.startswith('PY_') and name[3:] in renamed_variables:
+ if (name.startswith('PY_') and
+ name[3:] in renamed_variables):
item = ""
elif 'PY_' + n in notdone:
@@ -300,7 +293,6 @@ def _parse_makefile(filename, vars=None):
if name not in done:
done[name] = value
-
else:
# bogus variable reference (e.g. "prefix=$/opt/python");
# just drop it since we can't deal
@@ -321,13 +313,16 @@ def get_makefile_filename():
"""Return the path of the Makefile."""
if _PYTHON_BUILD:
return os.path.join(_PROJECT_BASE, "Makefile")
- return os.path.join(get_path('stdlib'),
- 'config-{}{}'.format(_PY_VERSION_SHORT, sys.abiflags),
- 'Makefile')
-
+ if hasattr(sys, 'abiflags'):
+ config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
+ else:
+ config_dir_name = 'config'
+ return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
-def _init_posix(vars):
- """Initialize the module as appropriate for POSIX systems."""
+def _generate_posix_vars():
+ """Generate the Python module containing build-time variables."""
+ import pprint
+ vars = {}
# load the installed Makefile:
makefile = get_makefile_filename()
try:
@@ -353,6 +348,19 @@ def _init_posix(vars):
if _PYTHON_BUILD:
vars['LDSHARED'] = vars['BLDSHARED']
+ destfile = os.path.join(os.path.dirname(__file__), '_sysconfigdata.py')
+ with open(destfile, 'w', encoding='utf8') as f:
+ f.write('# system configuration generated and used by'
+ ' the sysconfig module\n')
+ f.write('build_time_vars = ')
+ pprint.pprint(vars, stream=f)
+
+def _init_posix(vars):
+ """Initialize the module as appropriate for POSIX systems."""
+ # _sysconfigdata is generated at build time, see _generate_posix_vars()
+ from _sysconfigdata import build_time_vars
+ vars.update(build_time_vars)
+
def _init_non_posix(vars):
"""Initialize the module as appropriate for NT"""
# set basic install directories
@@ -376,7 +384,6 @@ def parse_config_h(fp, vars=None):
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
- import re
if vars is None:
vars = {}
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
@@ -389,8 +396,10 @@ def parse_config_h(fp, vars=None):
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
- try: v = int(v)
- except ValueError: pass
+ try:
+ v = int(v)
+ except ValueError:
+ pass
vars[n] = v
else:
m = undef_rx.match(line)
@@ -398,6 +407,7 @@ def parse_config_h(fp, vars=None):
vars[m.group(1)] = 0
return vars
+
def get_config_h_filename():
"""Return the path of pyconfig.h."""
if _PYTHON_BUILD:
@@ -409,15 +419,17 @@ def get_config_h_filename():
inc_dir = get_path('platinclude')
return os.path.join(inc_dir, 'pyconfig.h')
+
def get_scheme_names():
"""Return a tuple containing the schemes names."""
- schemes = list(_INSTALL_SCHEMES.keys())
- schemes.sort()
- return tuple(schemes)
+ return tuple(sorted(_SCHEMES.sections()))
+
def get_path_names():
"""Return a tuple containing the paths names."""
- return _SCHEME_KEYS
+ # xxx see if we want a static list
+ return _SCHEMES.options('posix_prefix')
+
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a mapping containing an install scheme.
@@ -428,7 +440,8 @@ def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
if expand:
return _expand_vars(scheme, vars)
else:
- return _INSTALL_SCHEMES[scheme]
+ return dict(_SCHEMES.items(scheme))
+
def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a path corresponding to the scheme.
@@ -437,6 +450,7 @@ def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
"""
return get_paths(scheme, vars, expand)[name]
+
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform.
@@ -447,13 +461,12 @@ def get_config_vars(*args):
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
- import re
global _CONFIG_VARS
if _CONFIG_VARS is None:
_CONFIG_VARS = {}
# Normalized versions of prefix and exec_prefix are handy to have;
# in fact, these are the standard versions used most places in the
- # Distutils.
+ # packaging module.
_CONFIG_VARS['prefix'] = _PREFIX
_CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
_CONFIG_VARS['py_version'] = _PY_VERSION
@@ -482,7 +495,6 @@ def get_config_vars(*args):
else:
_CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
-
# Convert srcdir into an absolute path if it appears necessary.
# Normally it is relative to the build directory. However, during
# testing, for example, we might be running a non-installed python
@@ -502,7 +514,7 @@ def get_config_vars(*args):
_CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
if sys.platform == 'darwin':
- kernel_version = os.uname()[2] # Kernel version (8.4.3)
+ kernel_version = os.uname()[2] # Kernel version (8.4.3)
major_version = int(kernel_version.split('.')[0])
if major_version < 8:
@@ -568,6 +580,7 @@ def get_config_vars(*args):
else:
return _CONFIG_VARS
+
def get_config_var(name):
"""Return the value of a single variable using the dictionary returned by
'get_config_vars()'.
@@ -576,6 +589,7 @@ def get_config_var(name):
"""
return get_config_vars().get(name)
+
def get_platform():
"""Return a string that identifies the current platform.
@@ -601,7 +615,6 @@ def get_platform():
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
- import re
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
@@ -651,7 +664,7 @@ def get_platform():
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
- rel_re = re.compile (r'[\d.]+')
+ rel_re = re.compile(r'[\d.]+')
m = rel_re.match(release)
if m:
release = m.group()
@@ -662,11 +675,10 @@ def get_platform():
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
- #
cfgvars = get_config_vars()
macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
- if 1:
+ if True:
# Always calculate the release of the running machine,
# needed to determine if we can build fat binaries or not.
@@ -682,14 +694,13 @@ def get_platform():
pass
else:
try:
- m = re.search(
- r'<key>ProductUserVisibleVersion</key>\s*' +
- r'<string>(.*?)</string>', f.read())
- if m is not None:
- macrelease = '.'.join(m.group(1).split('.')[:2])
- # else: fall back to the default behaviour
+ m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
+ r'<string>(.*?)</string>', f.read())
finally:
f.close()
+ if m is not None:
+ macrelease = '.'.join(m.group(1).split('.')[:2])
+ # else: fall back to the default behaviour
if not macver:
macver = macrelease
@@ -698,8 +709,8 @@ def get_platform():
release = macver
osname = "macosx"
- if (macrelease + '.') >= '10.4.' and \
- '-arch' in get_config_vars().get('CFLAGS', '').strip():
+ if ((macrelease + '.') >= '10.4.' and
+ '-arch' in get_config_vars().get('CFLAGS', '').strip()):
# The universal build will build fat binaries, but not on
# systems before 10.4
#
@@ -726,7 +737,7 @@ def get_platform():
machine = 'universal'
else:
raise ValueError(
- "Don't know machine value for archs=%r"%(archs,))
+ "Don't know machine value for archs=%r" % (archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
@@ -749,21 +760,27 @@ def get_platform():
def get_python_version():
return _PY_VERSION_SHORT
+
def _print_dict(title, data):
for index, (key, value) in enumerate(sorted(data.items())):
if index == 0:
- print('{0}: '.format(title))
- print('\t{0} = "{1}"'.format(key, value))
+ print('%s: ' % (title))
+ print('\t%s = "%s"' % (key, value))
+
def _main():
"""Display all information sysconfig detains."""
- print('Platform: "{0}"'.format(get_platform()))
- print('Python version: "{0}"'.format(get_python_version()))
- print('Current installation scheme: "{0}"'.format(_get_default_scheme()))
- print('')
+ if '--generate-posix-vars' in sys.argv:
+ _generate_posix_vars()
+ return
+ print('Platform: "%s"' % get_platform())
+ print('Python version: "%s"' % get_python_version())
+ print('Current installation scheme: "%s"' % _get_default_scheme())
+ print()
_print_dict('Paths', get_paths())
- print('')
+ print()
_print_dict('Variables', get_config_vars())
+
if __name__ == '__main__':
_main()
diff --git a/Lib/tabnanny.py b/Lib/tabnanny.py
index 46f8163..4a54f89 100755
--- a/Lib/tabnanny.py
+++ b/Lib/tabnanny.py
@@ -126,6 +126,9 @@ def check(file):
else: print(file, badline, repr(line))
return
+ finally:
+ f.close()
+
if verbose:
print("%r: Clean bill of health." % (file,))
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index af32b0f..e3380a2 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -29,8 +29,6 @@
"""Read from and write to tar format archives.
"""
-__version__ = "$Revision$"
-
version = "0.9.0"
__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
@@ -42,9 +40,9 @@ __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
#---------
import sys
import os
+import io
import shutil
import stat
-import errno
import time
import struct
import copy
@@ -423,10 +421,11 @@ class _Stream:
self.crc = zlib.crc32(b"")
if mode == "r":
self._init_read_gz()
+ self.exception = zlib.error
else:
self._init_write_gz()
- if comptype == "bz2":
+ elif comptype == "bz2":
try:
import bz2
except ImportError:
@@ -434,8 +433,25 @@ class _Stream:
if mode == "r":
self.dbuf = b""
self.cmp = bz2.BZ2Decompressor()
+ self.exception = IOError
else:
self.cmp = bz2.BZ2Compressor()
+
+ elif comptype == "xz":
+ try:
+ import lzma
+ except ImportError:
+ raise CompressionError("lzma module is not available")
+ if mode == "r":
+ self.dbuf = b""
+ self.cmp = lzma.LZMADecompressor()
+ self.exception = lzma.LZMAError
+ else:
+ self.cmp = lzma.LZMACompressor()
+
+ elif comptype != "tar":
+ raise CompressionError("unknown compression type %r" % comptype)
+
except:
if not self._extfileobj:
self.fileobj.close()
@@ -587,7 +603,7 @@ class _Stream:
break
try:
buf = self.cmp.decompress(buf)
- except IOError:
+ except self.exception:
raise ReadError("invalid compressed data")
self.dbuf += buf
c += len(buf)
@@ -625,76 +641,19 @@ class _StreamProxy(object):
return self.buf
def getcomptype(self):
- if self.buf.startswith(b"\037\213\010"):
+ if self.buf.startswith(b"\x1f\x8b\x08"):
return "gz"
- if self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
+ elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
return "bz2"
- return "tar"
+ elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
+ return "xz"
+ else:
+ return "tar"
def close(self):
self.fileobj.close()
# class StreamProxy
-class _BZ2Proxy(object):
- """Small proxy class that enables external file object
- support for "r:bz2" and "w:bz2" modes. This is actually
- a workaround for a limitation in bz2 module's BZ2File
- class which (unlike gzip.GzipFile) has no support for
- a file object argument.
- """
-
- blocksize = 16 * 1024
-
- def __init__(self, fileobj, mode):
- self.fileobj = fileobj
- self.mode = mode
- self.name = getattr(self.fileobj, "name", None)
- self.init()
-
- def init(self):
- import bz2
- self.pos = 0
- if self.mode == "r":
- self.bz2obj = bz2.BZ2Decompressor()
- self.fileobj.seek(0)
- self.buf = b""
- else:
- self.bz2obj = bz2.BZ2Compressor()
-
- def read(self, size):
- x = len(self.buf)
- while x < size:
- raw = self.fileobj.read(self.blocksize)
- if not raw:
- break
- data = self.bz2obj.decompress(raw)
- self.buf += data
- x += len(data)
-
- buf = self.buf[:size]
- self.buf = self.buf[size:]
- self.pos += len(buf)
- return buf
-
- def seek(self, pos):
- if pos < self.pos:
- self.init()
- self.read(pos - self.pos)
-
- def tell(self):
- return self.pos
-
- def write(self, data):
- self.pos += len(data)
- raw = self.bz2obj.compress(data)
- self.fileobj.write(raw)
-
- def close(self):
- if self.mode == "w":
- raw = self.bz2obj.flush()
- self.fileobj.write(raw)
-# class _BZ2Proxy
-
#------------------------
# Extraction file object
#------------------------
@@ -875,20 +834,20 @@ class ExFileObject(object):
return self.position
- def seek(self, pos, whence=os.SEEK_SET):
+ def seek(self, pos, whence=io.SEEK_SET):
"""Seek to a position in the file.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
- if whence == os.SEEK_SET:
+ if whence == io.SEEK_SET:
self.position = min(max(pos, 0), self.size)
- elif whence == os.SEEK_CUR:
+ elif whence == io.SEEK_CUR:
if pos < 0:
self.position = max(self.position + pos, 0)
else:
self.position = min(self.position + pos, self.size)
- elif whence == os.SEEK_END:
+ elif whence == io.SEEK_END:
self.position = max(min(self.size + pos, self.size), 0)
else:
raise ValueError("Invalid argument")
@@ -1087,7 +1046,7 @@ class TarInfo(object):
def create_pax_global_header(cls, pax_headers):
"""Return the object as a pax global header block sequence.
"""
- return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")
+ return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
def _posix_split_name(self, name):
"""Split a name longer than 100 chars into a prefix
@@ -1170,7 +1129,7 @@ class TarInfo(object):
binary = False
for keyword, value in pax_headers.items():
try:
- value.encode("utf8", "strict")
+ value.encode("utf-8", "strict")
except UnicodeEncodeError:
binary = True
break
@@ -1181,13 +1140,13 @@ class TarInfo(object):
records += b"21 hdrcharset=BINARY\n"
for keyword, value in pax_headers.items():
- keyword = keyword.encode("utf8")
+ keyword = keyword.encode("utf-8")
if binary:
# Try to restore the original byte representation of `value'.
# Needless to say, that the encoding must match the string.
value = value.encode(encoding, "surrogateescape")
else:
- value = value.encode("utf8")
+ value = value.encode("utf-8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
@@ -1396,7 +1355,7 @@ class TarInfo(object):
# the translation to UTF-8 fails.
match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
if match is not None:
- pax_headers["hdrcharset"] = match.group(1).decode("utf8")
+ pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
# For the time being, we don't care about anything other than "BINARY".
# The only other value that is currently allowed by the standard is
@@ -1405,7 +1364,7 @@ class TarInfo(object):
if hdrcharset == "BINARY":
encoding = tarfile.encoding
else:
- encoding = "utf8"
+ encoding = "utf-8"
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
@@ -1422,20 +1381,20 @@ class TarInfo(object):
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
- # Normally, we could just use "utf8" as the encoding and "strict"
+ # Normally, we could just use "utf-8" as the encoding and "strict"
# as the error handler, but we better not take the risk. For
# example, GNU tar <= 1.23 is known to store filenames it cannot
# translate to UTF-8 as raw strings (unfortunately without a
# hdrcharset=BINARY header).
# We first try the strict standard encoding, and if that fails we
# fall back on the user's encoding and error handler.
- keyword = self._decode_pax_field(keyword, "utf8", "utf8",
+ keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
tarfile.errors)
if keyword in PAX_NAME_FIELDS:
value = self._decode_pax_field(value, encoding, tarfile.encoding,
tarfile.errors)
else:
- value = self._decode_pax_field(value, "utf8", "utf8",
+ value = self._decode_pax_field(value, "utf-8", "utf-8",
tarfile.errors)
pax_headers[keyword] = value
@@ -1714,18 +1673,22 @@ class TarFile(object):
'r:' open for reading exclusively uncompressed
'r:gz' open for reading with gzip compression
'r:bz2' open for reading with bzip2 compression
+ 'r:xz' open for reading with lzma compression
'a' or 'a:' open for appending, creating the file if necessary
'w' or 'w:' open for writing without compression
'w:gz' open for writing with gzip compression
'w:bz2' open for writing with bzip2 compression
+ 'w:xz' open for writing with lzma compression
'r|*' open a stream of tar blocks with transparent compression
'r|' open an uncompressed stream of tar blocks for reading
'r|gz' open a gzip compressed stream of tar blocks
'r|bz2' open a bzip2 compressed stream of tar blocks
+ 'r|xz' open an lzma compressed stream of tar blocks
'w|' open an uncompressed stream for writing
'w|gz' open a gzip compressed stream for writing
'w|bz2' open a bzip2 compressed stream for writing
+ 'w|xz' open an lzma compressed stream for writing
"""
if not name and not fileobj:
@@ -1832,10 +1795,8 @@ class TarFile(object):
except ImportError:
raise CompressionError("bz2 module is not available")
- if fileobj is not None:
- fileobj = _BZ2Proxy(fileobj, mode)
- else:
- fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
+ fileobj = bz2.BZ2File(filename=name if fileobj is None else None,
+ mode=mode, fileobj=fileobj, compresslevel=compresslevel)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
@@ -1845,11 +1806,36 @@ class TarFile(object):
t._extfileobj = False
return t
+ @classmethod
+ def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
+ """Open lzma compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if mode not in ("r", "w"):
+ raise ValueError("mode must be 'r' or 'w'")
+
+ try:
+ import lzma
+ except ImportError:
+ raise CompressionError("lzma module is not available")
+
+ fileobj = lzma.LZMAFile(filename=name if fileobj is None else None,
+ mode=mode, fileobj=fileobj, preset=preset)
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except (lzma.LZMAError, EOFError):
+ fileobj.close()
+ raise ReadError("not an lzma file")
+ t._extfileobj = False
+ return t
+
# All *open() methods are registered here.
OPEN_METH = {
"tar": "taropen", # uncompressed tar
"gz": "gzopen", # gzip compressed tar
- "bz2": "bz2open" # bzip2 compressed tar
+ "bz2": "bz2open", # bzip2 compressed tar
+ "xz": "xzopen" # lzma compressed tar
}
#--------------------------------------------------------------------------
@@ -2283,9 +2269,8 @@ class TarFile(object):
# Use a safe mode for the directory, the real mode is set
# later in _extract_member().
os.mkdir(targetpath, 0o700)
- except EnvironmentError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
def makefile(self, tarinfo, targetpath):
"""Make a file called targetpath.
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
index 34dff30..39ebf5a 100644
--- a/Lib/tempfile.py
+++ b/Lib/tempfile.py
@@ -10,8 +10,6 @@ This module also provides some data items to the user:
TMP_MAX - maximum number of names that will be tried before
giving up.
- template - the default prefix for all temporary names.
- You may change this to control the default prefix.
tempdir - If this is set to a string before the first use of
any routine from this module, it will be considered as
another candidate location to store temporary files.
@@ -33,7 +31,6 @@ import warnings as _warnings
import sys as _sys
import io as _io
import os as _os
-import errno as _errno
from random import Random as _Random
try:
@@ -45,7 +42,7 @@ else:
def _set_cloexec(fd):
try:
flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0)
- except IOError:
+ except OSError:
pass
else:
# flags read successfully, modify
@@ -74,6 +71,8 @@ if hasattr(_os, 'TMP_MAX'):
else:
TMP_MAX = 10000
+# Although it does not have an underscore for historical reasons, this
+# variable is an internal implementation detail (see issue 10354).
template = "tmp"
# Internal routines.
@@ -85,19 +84,16 @@ if hasattr(_os, "lstat"):
elif hasattr(_os, "stat"):
_stat = _os.stat
else:
- # Fallback. All we need is something that raises os.error if the
+ # Fallback. All we need is something that raises OSError if the
# file doesn't exist.
def _stat(fn):
- try:
- f = open(fn)
- except IOError:
- raise _os.error
+ f = open(fn)
f.close()
def _exists(fn):
try:
_stat(fn)
- except _os.error:
+ except OSError:
return False
else:
return True
@@ -149,7 +145,7 @@ def _candidate_tempdir_list():
# As a last resort, the current directory.
try:
dirlist.append(_os.getcwd())
- except (AttributeError, _os.error):
+ except (AttributeError, OSError):
dirlist.append(_os.curdir)
return dirlist
@@ -181,12 +177,11 @@ def _get_default_tempdir():
_os.unlink(filename)
del fp, fd
return dir
- except (OSError, IOError) as e:
- if e.args[0] != _errno.EEXIST:
- break # no point trying more names in this directory
+ except FileExistsError:
pass
- raise IOError(_errno.ENOENT,
- "No usable temporary directory found in %s" % dirlist)
+ except OSError:
+ break # no point trying more names in this directory
+ raise FileNotFoundError("No usable temporary directory found in %s" % dirlist)
_name_sequence = None
@@ -216,12 +211,10 @@ def _mkstemp_inner(dir, pre, suf, flags):
fd = _os.open(file, flags, 0o600)
_set_cloexec(fd)
return (fd, _os.path.abspath(file))
- except OSError as e:
- if e.errno == _errno.EEXIST:
- continue # try again
- raise
+ except FileExistsError:
+ continue # try again
- raise IOError(_errno.EEXIST, "No usable temporary file name found")
+ raise FileExistsError("No usable temporary file name found")
# User visible interfaces.
@@ -305,12 +298,10 @@ def mkdtemp(suffix="", prefix=template, dir=None):
try:
_os.mkdir(file, 0o700)
return file
- except OSError as e:
- if e.errno == _errno.EEXIST:
- continue # try again
- raise
+ except FileExistsError:
+ continue # try again
- raise IOError(_errno.EEXIST, "No usable temporary directory name found")
+ raise FileExistsError("No usable temporary directory name found")
def mktemp(suffix="", prefix=template, dir=None):
"""User-callable function to return a unique temporary file name. The
@@ -339,7 +330,7 @@ def mktemp(suffix="", prefix=template, dir=None):
if not _exists(file):
return file
- raise IOError(_errno.EEXIST, "No usable temporary filename found")
+ raise FileExistsError("No usable temporary filename found")
class _TemporaryFileWrapper:
@@ -592,8 +583,13 @@ class SpooledTemporaryFile:
def tell(self):
return self._file.tell()
- def truncate(self):
- self._file.truncate()
+ def truncate(self, size=None):
+ if size is None:
+ self._file.truncate()
+ else:
+ if size > self._max_size:
+ self.rollover()
+ self._file.truncate(size)
def write(self, s):
file = self._file
@@ -669,7 +665,7 @@ class TemporaryDirectory(object):
_islink = staticmethod(_os.path.islink)
_remove = staticmethod(_os.remove)
_rmdir = staticmethod(_os.rmdir)
- _os_error = _os.error
+ _os_error = OSError
_warn = _warnings.warn
def _rmtree(self, path):
diff --git a/Lib/test/buffer_tests.py b/Lib/test/buffer_tests.py
index 6d20f7d..cf54c28 100644
--- a/Lib/test/buffer_tests.py
+++ b/Lib/test/buffer_tests.py
@@ -200,7 +200,13 @@ class MixinBytesBufferCommonTests(object):
self.marshal(b'abc\ndef\r\nghi\n\r').splitlines())
self.assertEqual([b'', b'abc', b'def', b'ghi', b''],
self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines())
+ self.assertEqual([b'', b'abc', b'def', b'ghi', b''],
+ self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines(False))
+ self.assertEqual([b'\n', b'abc\n', b'def\r\n', b'ghi\n', b'\r'],
+ self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines(True))
+ self.assertEqual([b'', b'abc', b'def', b'ghi', b''],
+ self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines(keepends=False))
self.assertEqual([b'\n', b'abc\n', b'def\r\n', b'ghi\n', b'\r'],
- self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines(1))
+ self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines(keepends=True))
self.assertRaises(TypeError, self.marshal(b'abc').splitlines, 42, 42)
diff --git a/Lib/test/crashers/README b/Lib/test/crashers/README
index 2a73e1b..0259a06 100644
--- a/Lib/test/crashers/README
+++ b/Lib/test/crashers/README
@@ -14,3 +14,7 @@ note if the cause is system or environment dependent and what the variables are.
Once the crash is fixed, the test case should be moved into an appropriate test
(even if it was originally from the test suite). This ensures the regression
doesn't happen again. And if it does, it should be easier to track down.
+
+Also see Lib/test_crashers.py which exercises the crashers in this directory.
+In particular, make sure to add any new infinite loop crashers to the black
+list so it doesn't try to run them.
diff --git a/Lib/test/crashers/borrowed_ref_1.py b/Lib/test/crashers/borrowed_ref_1.py
deleted file mode 100644
index b82f464..0000000
--- a/Lib/test/crashers/borrowed_ref_1.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
-_PyType_Lookup() returns a borrowed reference.
-This attacks the call in dictobject.c.
-"""
-
-class A(object):
- pass
-
-class B(object):
- def __del__(self):
- print('hi')
- del D.__missing__
-
-class D(dict):
- class __missing__:
- def __init__(self, *args):
- pass
-
-
-d = D()
-a = A()
-a.cycle = a
-a.other = B()
-del a
-
-prev = None
-while 1:
- d[5]
- prev = (prev,)
diff --git a/Lib/test/crashers/borrowed_ref_2.py b/Lib/test/crashers/borrowed_ref_2.py
deleted file mode 100644
index 6e403eb..0000000
--- a/Lib/test/crashers/borrowed_ref_2.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""
-_PyType_Lookup() returns a borrowed reference.
-This attacks PyObject_GenericSetAttr().
-
-NB. on my machine this crashes in 2.5 debug but not release.
-"""
-
-class A(object):
- pass
-
-class B(object):
- def __del__(self):
- print("hi")
- del C.d
-
-class D(object):
- def __set__(self, obj, value):
- self.hello = 42
-
-class C(object):
- d = D()
-
- def g():
- pass
-
-
-c = C()
-a = A()
-a.cycle = a
-a.other = B()
-
-lst = [None] * 1000000
-i = 0
-del a
-while 1:
- c.d = 42 # segfaults in PyMethod_New(__func__=D.__set__, __self__=d)
- lst[i] = c.g # consume the free list of instancemethod objects
- i += 1
diff --git a/Lib/test/crashers/compiler_recursion.py b/Lib/test/crashers/compiler_recursion.py
index 4954bdd..31f28a9 100644
--- a/Lib/test/crashers/compiler_recursion.py
+++ b/Lib/test/crashers/compiler_recursion.py
@@ -1,5 +1,13 @@
"""
-The compiler (>= 2.5) recurses happily.
+The compiler (>= 2.5) recurses happily until it blows the stack.
+
+Recorded on the tracker as http://bugs.python.org/issue11383
"""
-compile('()'*9**5, '?', 'exec')
+# The variant below blows up in compiler_call, but there are assorted
+# other variations that blow up in other functions
+# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
+
+# The exact limit to destroy the stack will vary by platform
+# but 10M should do the trick even with huge stack allocations
+compile('()'*10**7, '?', 'exec')
diff --git a/Lib/test/crashers/loosing_mro_ref.py b/Lib/test/crashers/loosing_mro_ref.py
deleted file mode 100644
index b3bcd32..0000000
--- a/Lib/test/crashers/loosing_mro_ref.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""
-There is a way to put keys of any type in a type's dictionary.
-I think this allows various kinds of crashes, but so far I have only
-found a convoluted attack of _PyType_Lookup(), which uses the mro of the
-type without holding a strong reference to it. Probably works with
-super.__getattribute__() too, which uses the same kind of code.
-"""
-
-class MyKey(object):
- def __hash__(self):
- return hash('mykey')
-
- def __eq__(self, other):
- # the following line decrefs the previous X.__mro__
- X.__bases__ = (Base2,)
- # trash all tuples of length 3, to make sure that the items of
- # the previous X.__mro__ are really garbage
- z = []
- for i in range(1000):
- z.append((i, None, None))
- return 0
-
-
-class Base(object):
- mykey = 'from Base'
-
-class Base2(object):
- mykey = 'from Base2'
-
-# you can't add a non-string key to X.__dict__, but it can be
-# there from the beginning :-)
-X = type('X', (Base,), {MyKey(): 5})
-
-print(X.mykey)
-# I get a segfault, or a slightly wrong assertion error in a debug build.
diff --git a/Lib/test/crashers/nasty_eq_vs_dict.py b/Lib/test/crashers/nasty_eq_vs_dict.py
deleted file mode 100644
index 85f7caf..0000000
--- a/Lib/test/crashers/nasty_eq_vs_dict.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# from http://mail.python.org/pipermail/python-dev/2001-June/015239.html
-
-# if you keep changing a dictionary while looking up a key, you can
-# provoke an infinite recursion in C
-
-# At the time neither Tim nor Michael could be bothered to think of a
-# way to fix it.
-
-class Yuck:
- def __init__(self):
- self.i = 0
-
- def make_dangerous(self):
- self.i = 1
-
- def __hash__(self):
- # direct to slot 4 in table of size 8; slot 12 when size 16
- return 4 + 8
-
- def __eq__(self, other):
- if self.i == 0:
- # leave dict alone
- pass
- elif self.i == 1:
- # fiddle to 16 slots
- self.__fill_dict(6)
- self.i = 2
- else:
- # fiddle to 8 slots
- self.__fill_dict(4)
- self.i = 1
-
- return 1
-
- def __fill_dict(self, n):
- self.i = 0
- dict.clear()
- for i in range(n):
- dict[i] = i
- dict[self] = "OK!"
-
-y = Yuck()
-dict = {y: "OK!"}
-
-z = Yuck()
-y.make_dangerous()
-print(dict[z])
diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py
index 3fd6799..853806b 100644
--- a/Lib/test/datetimetester.py
+++ b/Lib/test/datetimetester.py
@@ -977,7 +977,7 @@ class TestDate(HarmlessMixedComparison, unittest.TestCase):
# exempt such platforms (provided they return reasonable
# results!).
for insane in -1e200, 1e200:
- self.assertRaises(ValueError, self.theclass.fromtimestamp,
+ self.assertRaises(OverflowError, self.theclass.fromtimestamp,
insane)
def test_today(self):
@@ -1289,12 +1289,18 @@ class TestDate(HarmlessMixedComparison, unittest.TestCase):
self.assertTrue(self.theclass.min)
self.assertTrue(self.theclass.max)
- def test_strftime_out_of_range(self):
- # For nasty technical reasons, we can't handle years before 1000.
- cls = self.theclass
- self.assertEqual(cls(1000, 1, 1).strftime("%Y"), "1000")
- for y in 1, 49, 51, 99, 100, 999:
- self.assertRaises(ValueError, cls(y, 1, 1).strftime, "%Y")
+ def test_strftime_y2k(self):
+ for y in (1, 49, 70, 99, 100, 999, 1000, 1970):
+ d = self.theclass(y, 1, 1)
+ # Issue 13305: For years < 1000, the value is not always
+ # padded to 4 digits across platforms. The C standard
+ # assumes year >= 1900, so it does not specify the number
+ # of digits.
+ if d.strftime("%Y") != '%04d' % y:
+ # Year 42 returns '42', not padded
+ self.assertEqual(d.strftime("%Y"), '%d' % y)
+ # '0042' is obtained anyway
+ self.assertEqual(d.strftime("%4Y"), '%04d' % y)
def test_replace(self):
cls = self.theclass
@@ -1730,12 +1736,37 @@ class TestDateTime(TestDate):
self.verify_field_equality(expected, got)
def test_microsecond_rounding(self):
- # Test whether fromtimestamp "rounds up" floats that are less
- # than 1/2 microsecond smaller than an integer.
for fts in [self.theclass.fromtimestamp,
self.theclass.utcfromtimestamp]:
- self.assertEqual(fts(0.9999999), fts(1))
- self.assertEqual(fts(0.99999949).microsecond, 999999)
+ zero = fts(0)
+ self.assertEqual(zero.second, 0)
+ self.assertEqual(zero.microsecond, 0)
+ try:
+ minus_one = fts(-1e-6)
+ except OSError:
+ # localtime(-1) and gmtime(-1) is not supported on Windows
+ pass
+ else:
+ self.assertEqual(minus_one.second, 59)
+ self.assertEqual(minus_one.microsecond, 999999)
+
+ t = fts(-1e-8)
+ self.assertEqual(t, minus_one)
+ t = fts(-9e-7)
+ self.assertEqual(t, minus_one)
+ t = fts(-1e-7)
+ self.assertEqual(t, minus_one)
+
+ t = fts(1e-7)
+ self.assertEqual(t, zero)
+ t = fts(9e-7)
+ self.assertEqual(t, zero)
+ t = fts(0.99999949)
+ self.assertEqual(t.second, 0)
+ self.assertEqual(t.microsecond, 999999)
+ t = fts(0.9999999)
+ self.assertEqual(t.second, 0)
+ self.assertEqual(t.microsecond, 999999)
def test_insane_fromtimestamp(self):
# It's possible that some platform maps time_t to double,
@@ -1743,7 +1774,7 @@ class TestDateTime(TestDate):
# exempt such platforms (provided they return reasonable
# results!).
for insane in -1e200, 1e200:
- self.assertRaises(ValueError, self.theclass.fromtimestamp,
+ self.assertRaises(OverflowError, self.theclass.fromtimestamp,
insane)
def test_insane_utcfromtimestamp(self):
@@ -1752,7 +1783,7 @@ class TestDateTime(TestDate):
# exempt such platforms (provided they return reasonable
# results!).
for insane in -1e200, 1e200:
- self.assertRaises(ValueError, self.theclass.utcfromtimestamp,
+ self.assertRaises(OverflowError, self.theclass.utcfromtimestamp,
insane)
@unittest.skipIf(sys.platform == "win32", "Windows doesn't accept negative timestamps")
def test_negative_float_fromtimestamp(self):
diff --git a/Lib/test/decimaltestdata/extra.decTest b/Lib/test/decimaltestdata/extra.decTest
index fe8b77a..b630d8e 100644
--- a/Lib/test/decimaltestdata/extra.decTest
+++ b/Lib/test/decimaltestdata/extra.decTest
@@ -222,12 +222,25 @@ extr1700 power 10 1e-999999999 -> 1.000000000000000 Inexact Rounded
extr1701 power 100.0 -557.71e-742888888 -> 1.000000000000000 Inexact Rounded
extr1702 power 10 1e-100 -> 1.000000000000000 Inexact Rounded
+-- Another one (see issue #12080). Thanks again to Stefan Krah.
+extr1703 power 4 -1.2e-999999999 -> 1.000000000000000 Inexact Rounded
+
-- A couple of interesting exact cases for power. Note that the specification
-- requires these to be reported as Inexact.
extr1710 power 1e375 56e-3 -> 1.000000000000000E+21 Inexact Rounded
extr1711 power 10000 0.75 -> 1000.000000000000 Inexact Rounded
extr1712 power 1e-24 0.875 -> 1.000000000000000E-21 Inexact Rounded
+-- Some more exact cases, exercising power with negative second argument.
+extr1720 power 400 -0.5 -> 0.05000000000000000 Inexact Rounded
+extr1721 power 4096 -0.75 -> 0.001953125000000000 Inexact Rounded
+extr1722 power 625e4 -0.25 -> 0.02000000000000000 Inexact Rounded
+
+-- Nonexact cases, to exercise some of the early exit conditions from
+-- _power_exact.
+extr1730 power 2048 -0.75 -> 0.003284751622084822 Inexact Rounded
+
+
-- Tests for the is_* boolean operations
precision: 9
maxExponent: 999
diff --git a/Lib/test/dh512.pem b/Lib/test/dh512.pem
new file mode 100644
index 0000000..200d16c
--- /dev/null
+++ b/Lib/test/dh512.pem
@@ -0,0 +1,9 @@
+-----BEGIN DH PARAMETERS-----
+MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak
+XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC
+-----END DH PARAMETERS-----
+
+These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols"
+(http://www.skip-vpn.org/spec/numbers.html).
+See there for how they were generated.
+Note that g is not a generator, but this is not a problem since p is a safe prime.
diff --git a/Lib/test/exception_hierarchy.txt b/Lib/test/exception_hierarchy.txt
index 5037b33..1c1f69f 100644
--- a/Lib/test/exception_hierarchy.txt
+++ b/Lib/test/exception_hierarchy.txt
@@ -11,11 +11,6 @@ BaseException
+-- AssertionError
+-- AttributeError
+-- BufferError
- +-- EnvironmentError
- | +-- IOError
- | +-- OSError
- | +-- WindowsError (Windows)
- | +-- VMSError (VMS)
+-- EOFError
+-- ImportError
+-- LookupError
@@ -24,6 +19,22 @@ BaseException
+-- MemoryError
+-- NameError
| +-- UnboundLocalError
+ +-- OSError
+ | +-- BlockingIOError
+ | +-- ChildProcessError
+ | +-- ConnectionError
+ | | +-- BrokenPipeError
+ | | +-- ConnectionAbortedError
+ | | +-- ConnectionRefusedError
+ | | +-- ConnectionResetError
+ | +-- FileExistsError
+ | +-- FileNotFoundError
+ | +-- InterruptedError
+ | +-- IsADirectoryError
+ | +-- NotADirectoryError
+ | +-- PermissionError
+ | +-- ProcessLookupError
+ | +-- TimeoutError
+-- ReferenceError
+-- RuntimeError
| +-- NotImplementedError
diff --git a/Lib/test/fork_wait.py b/Lib/test/fork_wait.py
index 1caab1c..88527df 100644
--- a/Lib/test/fork_wait.py
+++ b/Lib/test/fork_wait.py
@@ -43,6 +43,7 @@ class ForkWait(unittest.TestCase):
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
+ @support.reap_threads
def test_wait(self):
for i in range(NUM_THREADS):
_thread.start_new(self.f, (i,))
@@ -69,7 +70,8 @@ class ForkWait(unittest.TestCase):
os._exit(n)
else:
# Parent
- self.wait_impl(cpid)
- # Tell threads to die
- self.stop = 1
- time.sleep(2*SHORTSLEEP) # Wait for threads to die
+ try:
+ self.wait_impl(cpid)
+ finally:
+ # Tell threads to die
+ self.stop = 1
diff --git a/Lib/test/test_future1.py b/Lib/test/future_test1.py
index 297c2e0..297c2e0 100644
--- a/Lib/test/test_future1.py
+++ b/Lib/test/future_test1.py
diff --git a/Lib/test/test_future2.py b/Lib/test/future_test2.py
index 3d7fc86..3d7fc86 100644
--- a/Lib/test/test_future2.py
+++ b/Lib/test/future_test2.py
diff --git a/Lib/test/json_tests/test_dump.py b/Lib/test/json_tests/test_dump.py
index 083c11f..4b3386f 100644
--- a/Lib/test/json_tests/test_dump.py
+++ b/Lib/test/json_tests/test_dump.py
@@ -1,6 +1,7 @@
from io import StringIO
from test.json_tests import PyTest, CTest
+from test.support import bigmemtest, _1G
class TestDump:
def test_dump(self):
@@ -21,4 +22,20 @@ class TestDump:
class TestPyDump(TestDump, PyTest): pass
-class TestCDump(TestDump, CTest): pass
+
+class TestCDump(TestDump, CTest):
+
+ # The size requirement here is hopefully over-estimated (actual
+ # memory consumption depending on implementation details, and also
+ # system memory management, since this may allocate a lot of
+ # small objects).
+
+ @bigmemtest(size=_1G, memuse=1)
+ def test_large_list(self, size):
+ N = int(30 * 1024 * 1024 * (size / _1G))
+ l = [1] * N
+ encoded = self.dumps(l)
+ self.assertEqual(len(encoded), N * 3)
+ self.assertEqual(encoded[:1], "[")
+ self.assertEqual(encoded[-2:], "1]")
+ self.assertEqual(encoded[1:-2], "1, " * (N - 1))
diff --git a/Lib/test/json_tests/test_scanstring.py b/Lib/test/json_tests/test_scanstring.py
index f82cdee..426c8dd 100644
--- a/Lib/test/json_tests/test_scanstring.py
+++ b/Lib/test/json_tests/test_scanstring.py
@@ -9,14 +9,9 @@ class TestScanstring:
scanstring('"z\\ud834\\udd20x"', 1, True),
('z\U0001d120x', 16))
- if sys.maxunicode == 65535:
- self.assertEqual(
- scanstring('"z\U0001d120x"', 1, True),
- ('z\U0001d120x', 6))
- else:
- self.assertEqual(
- scanstring('"z\U0001d120x"', 1, True),
- ('z\U0001d120x', 5))
+ self.assertEqual(
+ scanstring('"z\U0001d120x"', 1, True),
+ ('z\U0001d120x', 5))
self.assertEqual(
scanstring('"\\u007b"', 1, True),
diff --git a/Lib/test/keycert.passwd.pem b/Lib/test/keycert.passwd.pem
new file mode 100644
index 0000000..e905748
--- /dev/null
+++ b/Lib/test/keycert.passwd.pem
@@ -0,0 +1,33 @@
+-----BEGIN RSA PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A
+
+kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c
+u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA
+AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr
+Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+
+YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P
+6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+
+noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1
+94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l
+7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo
+cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO
+zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt
+L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo
+2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ==
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV
+BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u
+IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw
+MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH
+Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k
+YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw
+gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7
+6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt
+pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw
+FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd
+BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G
+lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1
+CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX
+-----END CERTIFICATE-----
diff --git a/Lib/test/list_tests.py b/Lib/test/list_tests.py
index be054ea..42e118b 100644
--- a/Lib/test/list_tests.py
+++ b/Lib/test/list_tests.py
@@ -418,6 +418,47 @@ class CommonTest(seq_tests.CommonTest):
self.assertRaises(TypeError, u.reverse, 42)
+ def test_clear(self):
+ u = self.type2test([2, 3, 4])
+ u.clear()
+ self.assertEqual(u, [])
+
+ u = self.type2test([])
+ u.clear()
+ self.assertEqual(u, [])
+
+ u = self.type2test([])
+ u.append(1)
+ u.clear()
+ u.append(2)
+ self.assertEqual(u, [2])
+
+ self.assertRaises(TypeError, u.clear, None)
+
+ def test_copy(self):
+ u = self.type2test([1, 2, 3])
+ v = u.copy()
+ self.assertEqual(v, [1, 2, 3])
+
+ u = self.type2test([])
+ v = u.copy()
+ self.assertEqual(v, [])
+
+ # test that it's indeed a copy and not a reference
+ u = self.type2test(['a', 'b'])
+ v = u.copy()
+ v.append('i')
+ self.assertEqual(u, ['a', 'b'])
+ self.assertEqual(v, u + ['i'])
+
+ # test that it's a shallow, not a deep copy
+ u = self.type2test([1, 2, [3, 4], 5])
+ v = u.copy()
+ self.assertEqual(u, v)
+ self.assertIs(v[3], u[3])
+
+ self.assertRaises(TypeError, u.copy, None)
+
def test_sort(self):
u = self.type2test([1, 0])
u.sort()
diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py
index 094cc7a..d88f364 100644
--- a/Lib/test/lock_tests.py
+++ b/Lib/test/lock_tests.py
@@ -4,7 +4,7 @@ Various tests for synchronization primitives.
import sys
import time
-from _thread import start_new_thread, get_ident, TIMEOUT_MAX
+from _thread import start_new_thread, TIMEOUT_MAX
import threading
import unittest
@@ -31,7 +31,7 @@ class Bunch(object):
self.finished = []
self._can_exit = not wait_before_exit
def task():
- tid = get_ident()
+ tid = threading.get_ident()
self.started.append(tid)
try:
f()
@@ -247,6 +247,7 @@ class RLockTests(BaseLockTests):
# Cannot release an unacquired lock
lock = self.locktype()
self.assertRaises(RuntimeError, lock.release)
+ self.assertRaises(RuntimeError, lock._release_save)
lock.acquire()
lock.acquire()
lock.release()
@@ -254,6 +255,7 @@ class RLockTests(BaseLockTests):
lock.release()
lock.release()
self.assertRaises(RuntimeError, lock.release)
+ self.assertRaises(RuntimeError, lock._release_save)
def test_different_thread(self):
# Cannot release from a different thread
diff --git a/Lib/test/mailcap.txt b/Lib/test/mailcap.txt
new file mode 100644
index 0000000..f61135d
--- /dev/null
+++ b/Lib/test/mailcap.txt
@@ -0,0 +1,39 @@
+# Mailcap file for test_mailcap; based on RFC 1524
+# Referred to by test_mailcap.py
+
+#
+# This is a comment.
+#
+
+application/frame; showframe %s; print="cat %s | lp"
+application/postscript; ps-to-terminal %s;\
+ needsterminal
+application/postscript; ps-to-terminal %s; \
+ compose=idraw %s
+application/x-dvi; xdvi %s
+application/x-movie; movieplayer %s; compose=moviemaker %s; \
+ description="Movie"; \
+ x11-bitmap="/usr/lib/Zmail/bitmaps/movie.xbm"
+application/*; echo "This is \"%t\" but \
+ is 50 \% Greek to me" \; cat %s; copiousoutput
+
+audio/basic; showaudio %s; compose=audiocompose %s; edit=audiocompose %s;\
+description="An audio fragment"
+audio/* ; /usr/local/bin/showaudio %t
+
+image/rgb; display %s
+#image/gif; display %s
+image/x-xwindowdump; display %s
+
+# The continuation char shouldn't \
+# make a difference in a comment.
+
+message/external-body; showexternal %s %{access-type} %{name} %{site} \
+ %{directory} %{mode} %{server}; needsterminal; composetyped = extcompose %s; \
+ description="A reference to data stored in an external location"
+
+text/richtext; shownonascii iso-8859-8 -e richtext -p %s; test=test "`echo \
+ %{charset} | tr '[A-Z]' '[a-z]'`" = iso-8859-8; copiousoutput
+
+video/mpeg; mpeg_play %s
+video/*; animate %s
diff --git a/Lib/test/math_testcases.txt b/Lib/test/math_testcases.txt
index 5e24335..9585188 100644
--- a/Lib/test/math_testcases.txt
+++ b/Lib/test/math_testcases.txt
@@ -517,3 +517,117 @@ expm10306 expm1 1.79e308 -> inf overflow
-- weaker version of expm10302
expm10307 expm1 709.5 -> 1.3549863193146328e+308
+
+-------------------------
+-- log2: log to base 2 --
+-------------------------
+
+-- special values
+log20000 log2 0.0 -> -inf divide-by-zero
+log20001 log2 -0.0 -> -inf divide-by-zero
+log20002 log2 inf -> inf
+log20003 log2 -inf -> nan invalid
+log20004 log2 nan -> nan
+
+-- exact value at 1.0
+log20010 log2 1.0 -> 0.0
+
+-- negatives
+log20020 log2 -5e-324 -> nan invalid
+log20021 log2 -1.0 -> nan invalid
+log20022 log2 -1.7e-308 -> nan invalid
+
+-- exact values at powers of 2
+log20100 log2 2.0 -> 1.0
+log20101 log2 4.0 -> 2.0
+log20102 log2 8.0 -> 3.0
+log20103 log2 16.0 -> 4.0
+log20104 log2 32.0 -> 5.0
+log20105 log2 64.0 -> 6.0
+log20106 log2 128.0 -> 7.0
+log20107 log2 256.0 -> 8.0
+log20108 log2 512.0 -> 9.0
+log20109 log2 1024.0 -> 10.0
+log20110 log2 2048.0 -> 11.0
+
+log20200 log2 0.5 -> -1.0
+log20201 log2 0.25 -> -2.0
+log20202 log2 0.125 -> -3.0
+log20203 log2 0.0625 -> -4.0
+
+-- values close to 1.0
+log20300 log2 1.0000000000000002 -> 3.2034265038149171e-16
+log20301 log2 1.0000000001 -> 1.4426951601859516e-10
+log20302 log2 1.00001 -> 1.4426878274712997e-5
+
+log20310 log2 0.9999999999999999 -> -1.6017132519074588e-16
+log20311 log2 0.9999999999 -> -1.4426951603302210e-10
+log20312 log2 0.99999 -> -1.4427022544056922e-5
+
+-- tiny values
+log20400 log2 5e-324 -> -1074.0
+log20401 log2 1e-323 -> -1073.0
+log20402 log2 1.5e-323 -> -1072.4150374992789
+log20403 log2 2e-323 -> -1072.0
+
+log20410 log2 1e-308 -> -1023.1538532253076
+log20411 log2 2.2250738585072014e-308 -> -1022.0
+log20412 log2 4.4501477170144028e-308 -> -1021.0
+log20413 log2 1e-307 -> -1019.8319251304202
+
+-- huge values
+log20500 log2 1.7976931348623157e+308 -> 1024.0
+log20501 log2 1.7e+308 -> 1023.9193879716706
+log20502 log2 8.9884656743115795e+307 -> 1023.0
+
+-- selection of random values
+log20600 log2 -7.2174324841039838e+289 -> nan invalid
+log20601 log2 -2.861319734089617e+265 -> nan invalid
+log20602 log2 -4.3507646894008962e+257 -> nan invalid
+log20603 log2 -6.6717265307520224e+234 -> nan invalid
+log20604 log2 -3.9118023786619294e+229 -> nan invalid
+log20605 log2 -1.5478221302505161e+206 -> nan invalid
+log20606 log2 -1.4380485131364602e+200 -> nan invalid
+log20607 log2 -3.7235198730382645e+185 -> nan invalid
+log20608 log2 -1.0472242235095724e+184 -> nan invalid
+log20609 log2 -5.0141781956163884e+160 -> nan invalid
+log20610 log2 -2.1157958031160324e+124 -> nan invalid
+log20611 log2 -7.9677558612567718e+90 -> nan invalid
+log20612 log2 -5.5553906194063732e+45 -> nan invalid
+log20613 log2 -16573900952607.953 -> nan invalid
+log20614 log2 -37198371019.888618 -> nan invalid
+log20615 log2 -6.0727115121422674e-32 -> nan invalid
+log20616 log2 -2.5406841656526057e-38 -> nan invalid
+log20617 log2 -4.9056766703267657e-43 -> nan invalid
+log20618 log2 -2.1646786075228305e-71 -> nan invalid
+log20619 log2 -2.470826790488573e-78 -> nan invalid
+log20620 log2 -3.8661709303489064e-165 -> nan invalid
+log20621 log2 -1.0516496976649986e-182 -> nan invalid
+log20622 log2 -1.5935458614317996e-255 -> nan invalid
+log20623 log2 -2.8750977267336654e-293 -> nan invalid
+log20624 log2 -7.6079466794732585e-296 -> nan invalid
+log20625 log2 3.2073253539988545e-307 -> -1018.1505544209213
+log20626 log2 1.674937885472249e-244 -> -809.80634755783126
+log20627 log2 1.0911259044931283e-214 -> -710.76679472274213
+log20628 log2 2.0275372624809709e-154 -> -510.55719818383272
+log20629 log2 7.3926087369631841e-115 -> -379.13564735312292
+log20630 log2 1.3480198206342423e-86 -> -285.25497445094436
+log20631 log2 8.9927384655719947e-83 -> -272.55127136401637
+log20632 log2 3.1452398713597487e-60 -> -197.66251564496875
+log20633 log2 7.0706573215457351e-55 -> -179.88420087782217
+log20634 log2 3.1258285390731669e-49 -> -161.13023800505653
+log20635 log2 8.2253046627829942e-41 -> -133.15898277355879
+log20636 log2 7.8691367397519897e+49 -> 165.75068202732419
+log20637 log2 2.9920561983925013e+64 -> 214.18453534573757
+log20638 log2 4.7827254553946841e+77 -> 258.04629628445673
+log20639 log2 3.1903566496481868e+105 -> 350.47616767491166
+log20640 log2 5.6195082449502419e+113 -> 377.86831861008250
+log20641 log2 9.9625658250651047e+125 -> 418.55752921228753
+log20642 log2 2.7358945220961532e+145 -> 483.13158636923413
+log20643 log2 2.785842387926931e+174 -> 579.49360214860280
+log20644 log2 2.4169172507252751e+193 -> 642.40529039289652
+log20645 log2 3.1689091206395632e+205 -> 682.65924573798395
+log20646 log2 2.535995592365391e+208 -> 692.30359597460460
+log20647 log2 6.2011236566089916e+233 -> 776.64177576730913
+log20648 log2 2.1843274820677632e+253 -> 841.57499717289647
+log20649 log2 8.7493931063474791e+297 -> 989.74182713073981
diff --git a/Lib/test/mock_socket.py b/Lib/test/mock_socket.py
index 8036932..d09e78c 100644
--- a/Lib/test/mock_socket.py
+++ b/Lib/test/mock_socket.py
@@ -106,7 +106,8 @@ def socket(family=None, type=None, proto=None):
return MockSocket()
-def create_connection(address, timeout=socket_module._GLOBAL_DEFAULT_TIMEOUT):
+def create_connection(address, timeout=socket_module._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None):
try:
int_port = int(address[1])
except ValueError:
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index cab0523..3686a62 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -4,10 +4,11 @@ import pickle
import pickletools
import sys
import copyreg
+import weakref
from http.cookies import SimpleCookie
from test.support import (
- TestFailed, TESTFN, run_with_locale,
+ TestFailed, TESTFN, run_with_locale, no_tracing,
_2G, _4G, bigmemtest,
)
@@ -18,7 +19,7 @@ from pickle import bytes_types
# kind of outer loop.
protocols = range(pickle.HIGHEST_PROTOCOL + 1)
-character_size = 4 if sys.maxunicode > 0xFFFF else 2
+ascii_char_size = 1
# Return True if opcode code appears in the pickle, else False.
@@ -742,6 +743,18 @@ class AbstractPickleTests(unittest.TestCase):
u = self.loads(s)
self.assertEqual(t, u)
+ def test_ellipsis(self):
+ for proto in protocols:
+ s = self.dumps(..., proto)
+ u = self.loads(s)
+ self.assertEqual(..., u)
+
+ def test_notimplemented(self):
+ for proto in protocols:
+ s = self.dumps(NotImplemented, proto)
+ u = self.loads(s)
+ self.assertEqual(NotImplemented, u)
+
# Tests for protocol 2
def test_proto(self):
@@ -875,6 +888,25 @@ class AbstractPickleTests(unittest.TestCase):
self.assertEqual(B(x), B(y), detail)
self.assertEqual(x.__dict__, y.__dict__, detail)
+ def test_newobj_proxies(self):
+ # NEWOBJ should use the __class__ rather than the raw type
+ classes = myclasses[:]
+ # Cannot create weakproxies to these classes
+ for c in (MyInt, MyTuple):
+ classes.remove(c)
+ for proto in protocols:
+ for C in classes:
+ B = C.__base__
+ x = C(C.sample)
+ x.foo = 42
+ p = weakref.proxy(x)
+ s = self.dumps(p, proto)
+ y = self.loads(s)
+ self.assertEqual(type(y), type(x)) # rather than type(p)
+ detail = (proto, C, B, x, y, type(y))
+ self.assertEqual(B(x), B(y), detail)
+ self.assertEqual(x.__dict__, y.__dict__, detail)
+
# Register a type with copyreg, with extension code extcode. Pickle
# an object of that type. Check that the resulting pickle uses opcode
# (EXT[124]) under proto 2, and not in proto 1.
@@ -1035,13 +1067,13 @@ class AbstractPickleTests(unittest.TestCase):
y = self.loads(s)
self.assertEqual(y._reduce_called, 1)
+ @no_tracing
def test_bad_getattr(self):
x = BadGetattr()
for proto in 0, 1:
self.assertRaises(RuntimeError, self.dumps, x, proto)
# protocol 2 don't raise a RuntimeError.
d = self.dumps(x, 2)
- self.assertRaises(RuntimeError, self.loads, d)
def test_reduce_bad_iterator(self):
# Issue4176: crash when 4th and 5th items of __reduce__()
@@ -1131,6 +1163,15 @@ class AbstractPickleTests(unittest.TestCase):
empty = self.loads(b'\x80\x03U\x00q\x00.', encoding='koi8-r')
self.assertEqual(empty, '')
+ def test_int_pickling_efficiency(self):
+ # Test compacity of int representation (see issue #12744)
+ for proto in protocols:
+ sizes = [len(self.dumps(2**n, proto)) for n in range(70)]
+ # the size function is monotonic
+ self.assertEqual(sorted(sizes), sizes)
+ if proto >= 2:
+ self.assertLessEqual(sizes[-1], 14)
+
def check_negative_32b_binXXX(self, dumped):
if sys.maxsize > 2**32:
self.skipTest("test is only meaningful on 32-bit builds")
@@ -1212,7 +1253,7 @@ class BigmemPickleTests(unittest.TestCase):
# All protocols use 1-byte per printable ASCII character; we add another
# byte because the encoded form has to be copied into the internal buffer.
- @bigmemtest(size=_2G, memuse=2 + character_size, dry_run=False)
+ @bigmemtest(size=_2G, memuse=2 + ascii_char_size, dry_run=False)
def test_huge_str_32b(self, size):
data = "abcd" * (size // 4)
try:
@@ -1229,7 +1270,7 @@ class BigmemPickleTests(unittest.TestCase):
# BINUNICODE (protocols 1, 2 and 3) cannot carry more than
# 2**32 - 1 bytes of utf-8 encoded unicode.
- @bigmemtest(size=_4G, memuse=1 + character_size, dry_run=False)
+ @bigmemtest(size=_4G, memuse=1 + ascii_char_size, dry_run=False)
def test_huge_str_64b(self, size):
data = "a" * size
try:
@@ -1576,6 +1617,105 @@ class AbstractPicklerUnpicklerObjectTests(unittest.TestCase):
self.assertEqual(unpickler.load(), data)
+# Tests for dispatch_table attribute
+
+REDUCE_A = 'reduce_A'
+
+class AAA(object):
+ def __reduce__(self):
+ return str, (REDUCE_A,)
+
+class BBB(object):
+ pass
+
+class AbstractDispatchTableTests(unittest.TestCase):
+
+ def test_default_dispatch_table(self):
+ # No dispatch_table attribute by default
+ f = io.BytesIO()
+ p = self.pickler_class(f, 0)
+ with self.assertRaises(AttributeError):
+ p.dispatch_table
+ self.assertFalse(hasattr(p, 'dispatch_table'))
+
+ def test_class_dispatch_table(self):
+ # A dispatch_table attribute can be specified class-wide
+ dt = self.get_dispatch_table()
+
+ class MyPickler(self.pickler_class):
+ dispatch_table = dt
+
+ def dumps(obj, protocol=None):
+ f = io.BytesIO()
+ p = MyPickler(f, protocol)
+ self.assertEqual(p.dispatch_table, dt)
+ p.dump(obj)
+ return f.getvalue()
+
+ self._test_dispatch_table(dumps, dt)
+
+ def test_instance_dispatch_table(self):
+ # A dispatch_table attribute can also be specified instance-wide
+ dt = self.get_dispatch_table()
+
+ def dumps(obj, protocol=None):
+ f = io.BytesIO()
+ p = self.pickler_class(f, protocol)
+ p.dispatch_table = dt
+ self.assertEqual(p.dispatch_table, dt)
+ p.dump(obj)
+ return f.getvalue()
+
+ self._test_dispatch_table(dumps, dt)
+
+ def _test_dispatch_table(self, dumps, dispatch_table):
+ def custom_load_dump(obj):
+ return pickle.loads(dumps(obj, 0))
+
+ def default_load_dump(obj):
+ return pickle.loads(pickle.dumps(obj, 0))
+
+ # pickling complex numbers using protocol 0 relies on copyreg
+ # so check pickling a complex number still works
+ z = 1 + 2j
+ self.assertEqual(custom_load_dump(z), z)
+ self.assertEqual(default_load_dump(z), z)
+
+ # modify pickling of complex
+ REDUCE_1 = 'reduce_1'
+ def reduce_1(obj):
+ return str, (REDUCE_1,)
+ dispatch_table[complex] = reduce_1
+ self.assertEqual(custom_load_dump(z), REDUCE_1)
+ self.assertEqual(default_load_dump(z), z)
+
+ # check picklability of AAA and BBB
+ a = AAA()
+ b = BBB()
+ self.assertEqual(custom_load_dump(a), REDUCE_A)
+ self.assertIsInstance(custom_load_dump(b), BBB)
+ self.assertEqual(default_load_dump(a), REDUCE_A)
+ self.assertIsInstance(default_load_dump(b), BBB)
+
+ # modify pickling of BBB
+ dispatch_table[BBB] = reduce_1
+ self.assertEqual(custom_load_dump(a), REDUCE_A)
+ self.assertEqual(custom_load_dump(b), REDUCE_1)
+ self.assertEqual(default_load_dump(a), REDUCE_A)
+ self.assertIsInstance(default_load_dump(b), BBB)
+
+ # revert pickling of BBB and modify pickling of AAA
+ REDUCE_2 = 'reduce_2'
+ def reduce_2(obj):
+ return str, (REDUCE_2,)
+ dispatch_table[AAA] = reduce_2
+ del dispatch_table[BBB]
+ self.assertEqual(custom_load_dump(a), REDUCE_2)
+ self.assertIsInstance(custom_load_dump(b), BBB)
+ self.assertEqual(default_load_dump(a), REDUCE_A)
+ self.assertIsInstance(default_load_dump(b), BBB)
+
+
if __name__ == "__main__":
# Print some stuff that can be used to rewrite DATA{0,1,2}
from pickletools import dis
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 714a116..44d3426 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -20,6 +20,11 @@ python -E -Wd -m test [options] [test_name1 ...]
Options:
-h/--help -- print this text and exit
+--timeout TIMEOUT
+ -- dump the traceback and exit if a test takes more
+ than TIMEOUT seconds; disabled if TIMEOUT is negative
+ or equals to zero
+--wait -- wait for user input, e.g., allow a debugger to be attached
Verbosity
@@ -44,6 +49,9 @@ Selecting tests
-- specify which special resource intensive tests to run
-M/--memlimit LIMIT
-- run very large memory-consuming tests
+ --testdir DIR
+ -- execute test files in the specified directory (instead
+ of the Python stdlib test suite)
Special runs
@@ -125,6 +133,8 @@ resources to test. Currently only the following are defined:
all - Enable all special resources.
+ none - Disable all special resources (this is the default).
+
audio - Tests that use the audio device. (There are known
cases of broken audio drivers that can crash Python or
even the Linux kernel.)
@@ -156,16 +166,19 @@ option '-uall,-gui'.
"""
import builtins
-import errno
+import faulthandler
import getopt
import io
import json
import logging
import os
+import packaging.command
+import packaging.database
import platform
import random
import re
import shutil
+import signal
import sys
import sysconfig
import tempfile
@@ -225,6 +238,7 @@ ENV_CHANGED = -1
SKIPPED = -2
RESOURCE_DENIED = -3
INTERRUPTED = -4
+CHILD_ERROR = -5 # error in a child process
from test import support
@@ -268,6 +282,18 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
on the command line.
"""
+ # Display the Python traceback on fatal errors (e.g. segfault)
+ faulthandler.enable(all_threads=True)
+
+ # Display the Python traceback on SIGALRM or SIGUSR1 signal
+ signals = []
+ if hasattr(signal, 'SIGALRM'):
+ signals.append(signal.SIGALRM)
+ if hasattr(signal, 'SIGUSR1'):
+ signals.append(signal.SIGUSR1)
+ for signum in signals:
+ faulthandler.register(signum, chain=True)
+
replace_stdout()
support.record_original_stdout(sys.stdout)
@@ -278,7 +304,8 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
'use=', 'threshold=', 'coverdir=', 'nocoverdir',
'runleaks', 'huntrleaks=', 'memlimit=', 'randseed=',
'multiprocess=', 'coverage', 'slaveargs=', 'forever', 'debug',
- 'start=', 'nowindows', 'header', 'failfast', 'match'])
+ 'start=', 'nowindows', 'header', 'testdir=', 'timeout=', 'wait',
+ 'failfast', 'match'])
except getopt.error as msg:
usage(msg)
@@ -289,6 +316,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
use_resources = []
debug = False
start = None
+ timeout = None
for o, a in opts:
if o in ('-h', '--help'):
print(__doc__)
@@ -332,7 +360,9 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
elif o in ('-T', '--coverage'):
trace = True
elif o in ('-D', '--coverdir'):
- coverdir = os.path.join(os.getcwd(), a)
+ # CWD is replaced with a temporary dir before calling main(), so we
+ # need join it with the saved CWD so it goes where the user expects.
+ coverdir = os.path.join(support.SAVEDCWD, a)
elif o in ('-N', '--nocoverdir'):
coverdir = None
elif o in ('-R', '--huntrleaks'):
@@ -361,6 +391,9 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
if r == 'all':
use_resources[:] = RESOURCE_NAMES
continue
+ if r == 'none':
+ del use_resources[:]
+ continue
remove = False
if r[0] == '-':
remove = True
@@ -391,6 +424,15 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
forever = True
elif o in ('-j', '--multiprocess'):
use_mp = int(a)
+ if use_mp <= 0:
+ try:
+ import multiprocessing
+ # Use all cores + extras for tests that like to sleep
+ use_mp = 2 + multiprocessing.cpu_count()
+ except (ImportError, NotImplementedError):
+ use_mp = 3
+ if use_mp == 1:
+ use_mp = None
elif o == '--header':
header = True
elif o == '--slaveargs':
@@ -403,6 +445,21 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
print() # Force a newline (just in case)
print(json.dumps(result))
sys.exit(0)
+ elif o == '--testdir':
+ # CWD is replaced with a temporary dir before calling main(), so we
+ # join it with the saved CWD so it ends up where the user expects.
+ testdir = os.path.join(support.SAVEDCWD, a)
+ elif o == '--timeout':
+ if hasattr(faulthandler, 'dump_tracebacks_later'):
+ timeout = float(a)
+ if timeout <= 0:
+ timeout = None
+ else:
+ print("Warning: The timeout option requires "
+ "faulthandler.dump_tracebacks_later")
+ timeout = None
+ elif o == '--wait':
+ input("Press any key to continue...")
else:
print(("No handler for option {}. Please report this as a bug "
"at http://bugs.python.org.").format(o), file=sys.stderr)
@@ -481,7 +538,13 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
print("== ", os.getcwd())
print("Testing with flags:", sys.flags)
- alltests = findtests(testdir, stdtests, nottests)
+ # if testdir is set, then we are not running the python tests suite, so
+ # don't add default tests to be executed or skipped (pass empty values)
+ if testdir:
+ alltests = findtests(testdir, list(), set())
+ else:
+ alltests = findtests(testdir, stdtests, nottests)
+
selected = tests or args or alltests
if single:
selected = selected[:1]
@@ -556,7 +619,8 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
(test, verbose, quiet),
dict(huntrleaks=huntrleaks, use_resources=use_resources,
debug=debug, output_on_failure=verbose3,
- failfast=failfast, match_tests=match_tests)
+ timeout=timeout, failfast=failfast,
+ match_tests=match_tests)
)
yield (test, args_tuple)
pending = tests_and_args()
@@ -577,10 +641,15 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
universal_newlines=True,
close_fds=(os.name != 'nt'))
stdout, stderr = popen.communicate()
+ retcode = popen.wait()
# Strip last refcount output line if it exists, since it
# comes from the shutdown of the interpreter in the subcommand.
stderr = debug_output_pat.sub("", stderr)
stdout, _, result = stdout.strip().rpartition("\n")
+ if retcode != 0:
+ result = (CHILD_ERROR, "Exit code %s" % retcode)
+ output.put((test, stdout.rstrip(), stderr.rstrip(), result))
+ return
if not result:
output.put((None, None, None, None))
return
@@ -613,6 +682,8 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
if result[0] == INTERRUPTED:
assert result[1] == 'KeyboardInterrupt'
raise KeyboardInterrupt # What else?
+ if result[0] == CHILD_ERROR:
+ raise Exception("Child error on {}: {}".format(test, result[1]))
test_index += 1
except KeyboardInterrupt:
interrupted = True
@@ -629,13 +700,14 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
if trace:
# If we're tracing code coverage, then we don't exit with status
# if on a false return value from main.
- tracer.runctx('runtest(test, verbose, quiet)',
+ tracer.runctx('runtest(test, verbose, quiet, timeout=timeout)',
globals=globals(), locals=vars())
else:
try:
result = runtest(test, verbose, quiet, huntrleaks, debug,
output_on_failure=verbose3,
- failfast=failfast, match_tests=match_tests)
+ timeout=timeout, failfast=failfast,
+ match_tests=match_tests)
accumulate_result(test, result)
except KeyboardInterrupt:
interrupted = True
@@ -706,7 +778,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
sys.stdout.flush()
try:
verbose = True
- ok = runtest(test, True, quiet, huntrleaks, debug)
+ ok = runtest(test, True, quiet, huntrleaks, debug, timeout=timeout)
except KeyboardInterrupt:
# print a newline separate from the ^C
print()
@@ -731,6 +803,8 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
sys.exit(len(bad) > 0 or interrupted)
+# small set of tests to determine if we have a basically functioning interpreter
+# (i.e. if any of these fail, then anything else is likely to follow)
STDTESTS = [
'test_grammar',
'test_opcodes',
@@ -741,12 +815,11 @@ STDTESTS = [
'test_unittest',
'test_doctest',
'test_doctest2',
+ 'test_support'
]
-NOTTESTS = {
- 'test_future1',
- 'test_future2',
-}
+# set of tests that we don't want to be executed when using regrtest
+NOTTESTS = set()
def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
"""Return a list of all applicable test modules."""
@@ -755,25 +828,22 @@ def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
tests = []
others = set(stdtests) | nottests
for name in names:
- modname, ext = os.path.splitext(name)
- if modname[:5] == "test_" and ext == ".py" and modname not in others:
- tests.append(modname)
+ mod, ext = os.path.splitext(name)
+ if mod[:5] == "test_" and ext in (".py", "") and mod not in others:
+ tests.append(mod)
return stdtests + sorted(tests)
def replace_stdout():
"""Set stdout encoder error handler to backslashreplace (as stderr error
handler) to avoid UnicodeEncodeError when printing a traceback"""
- if os.name == "nt":
- # Replace sys.stdout breaks the stdout newlines on Windows: issue #8533
- return
-
import atexit
stdout = sys.stdout
sys.stdout = open(stdout.fileno(), 'w',
encoding=stdout.encoding,
errors="backslashreplace",
- closefd=False)
+ closefd=False,
+ newline='\n')
def restore_stdout():
sys.stdout.close()
@@ -782,7 +852,8 @@ def replace_stdout():
def runtest(test, verbose, quiet,
huntrleaks=False, debug=False, use_resources=None,
- output_on_failure=False, failfast=False, match_tests=None):
+ output_on_failure=False, failfast=False, match_tests=None,
+ timeout=None):
"""Run a single test.
test -- the name of the test
@@ -792,6 +863,8 @@ def runtest(test, verbose, quiet,
huntrleaks -- run multiple times to test for leaks; requires a debug
build; a triple corresponding to -R's three arguments
output_on_failure -- if true, display test output on failure
+ timeout -- dump the traceback and exit if a test takes more than
+ timeout seconds
Returns one of the test result constants:
INTERRUPTED KeyboardInterrupt when run under -j
@@ -804,6 +877,9 @@ def runtest(test, verbose, quiet,
if use_resources is not None:
support.use_resources = use_resources
+ use_timeout = (timeout is not None)
+ if use_timeout:
+ faulthandler.dump_tracebacks_later(timeout, exit=True)
try:
support.match_tests = match_tests
if failfast:
@@ -842,6 +918,8 @@ def runtest(test, verbose, quiet,
display_failure=not verbose)
return result
finally:
+ if use_timeout:
+ faulthandler.cancel_dump_tracebacks_later()
cleanup_test_droppings(test, verbose)
runtest.stringio = None
@@ -887,10 +965,11 @@ class saved_test_environment:
resources = ('sys.argv', 'cwd', 'sys.stdin', 'sys.stdout', 'sys.stderr',
'os.environ', 'sys.path', 'sys.path_hooks', '__import__',
'warnings.filters', 'asyncore.socket_map',
- 'logging._handlers', 'logging._handlerList',
- 'shutil.archive_formats', 'shutil.unpack_formats',
+ 'logging._handlers', 'logging._handlerList', 'sys.gettrace',
'sys.warnoptions', 'threading._dangling',
'multiprocessing.process._dangling',
+ 'sysconfig._CONFIG_VARS', 'sysconfig._SCHEMES',
+ 'packaging.command._COMMANDS', 'packaging.database_caches',
'support.TESTFN',
)
@@ -939,6 +1018,11 @@ class saved_test_environment:
sys.path_hooks = saved_hooks[1]
sys.path_hooks[:] = saved_hooks[2]
+ def get_sys_gettrace(self):
+ return sys.gettrace()
+ def restore_sys_gettrace(self, trace_fxn):
+ sys.settrace(trace_fxn)
+
def get___import__(self):
return builtins.__import__
def restore___import__(self, import_):
@@ -991,6 +1075,44 @@ class saved_test_environment:
# Can't easily revert the logging state
pass
+ def get_packaging_command__COMMANDS(self):
+ # registry mapping command names to full dotted path or to the actual
+ # class (resolved on demand); this check only looks at the names, not
+ # the types of the values (IOW, if a value changes from a string
+ # (dotted path) to a class it's okay but if a key (i.e. command class)
+ # is added we complain)
+ id_ = id(packaging.command._COMMANDS)
+ keys = set(packaging.command._COMMANDS)
+ return id_, keys
+ def restore_packaging_command__COMMANDS(self, saved):
+ # if command._COMMANDS was bound to another dict object, we can't
+ # restore the previous object and contents, because the get_ method
+ # above does not return the dict object (to ignore changes in values)
+ for key in packaging.command._COMMANDS.keys() - saved[1]:
+ del packaging.command._COMMANDS[key]
+
+ def get_packaging_database_caches(self):
+ # caching system used by the PEP 376 implementation
+ # we have one boolean and four dictionaries, initially empty
+ switch = packaging.database._cache_enabled
+ saved = []
+ for name in ('_cache_name', '_cache_name_egg',
+ '_cache_path', '_cache_path_egg'):
+ cache = getattr(packaging.database, name)
+ saved.append((id(cache), cache, cache.copy()))
+ return switch, saved
+ def restore_packaging_database_caches(self, saved):
+ switch, saved_caches = saved
+ packaging.database._cache_enabled = switch
+ for offset, name in enumerate(('_cache_name', '_cache_name_egg',
+ '_cache_path', '_cache_path_egg')):
+ _, cache, items = saved_caches[offset]
+ # put back the same object in place
+ setattr(packaging.database, name, cache)
+ # now restore its items
+ cache.clear()
+ cache.update(items)
+
def get_sys_warnoptions(self):
return id(sys.warnoptions), sys.warnoptions, sys.warnoptions[:]
def restore_sys_warnoptions(self, saved_options):
@@ -1022,6 +1144,26 @@ class saved_test_environment:
multiprocessing.process._dangling.clear()
multiprocessing.process._dangling.update(saved)
+ def get_sysconfig__CONFIG_VARS(self):
+ # make sure the dict is initialized
+ sysconfig.get_config_var('prefix')
+ return (id(sysconfig._CONFIG_VARS), sysconfig._CONFIG_VARS,
+ dict(sysconfig._CONFIG_VARS))
+ def restore_sysconfig__CONFIG_VARS(self, saved):
+ sysconfig._CONFIG_VARS = saved[1]
+ sysconfig._CONFIG_VARS.clear()
+ sysconfig._CONFIG_VARS.update(saved[2])
+
+ def get_sysconfig__SCHEMES(self):
+ # it's mildly evil to look at the internal attribute, but it's easier
+ # than copying a RawConfigParser object
+ return (id(sysconfig._SCHEMES), sysconfig._SCHEMES._sections,
+ sysconfig._SCHEMES._sections.copy())
+ def restore_sysconfig__SCHEMES(self, saved):
+ sysconfig._SCHEMES._sections = saved[1]
+ sysconfig._SCHEMES._sections.clear()
+ sysconfig._SCHEMES._sections.update(saved[2])
+
def get_support_TESTFN(self):
if os.path.isfile(support.TESTFN):
result = 'f'
@@ -1176,7 +1318,8 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
False if the test didn't leak references; True if we detected refleaks.
"""
# This code is hackish and inelegant, but it seems to do the job.
- import copyreg, _abcoll
+ import copyreg
+ import collections.abc
if not hasattr(sys, 'gettotalrefcount'):
raise Exception("Tracking reference leaks requires a debug build "
@@ -1193,7 +1336,7 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
else:
zdc = zipimport._zip_directory_cache.copy()
abcs = {}
- for abc in [getattr(_abcoll, a) for a in _abcoll.__all__]:
+ for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
if not isabstract(abc):
continue
for obj in abc.__subclasses__() + [abc]:
@@ -1239,7 +1382,7 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import gc, copyreg
import _strptime, linecache
import urllib.parse, urllib.request, mimetypes, doctest
- import struct, filecmp, _abcoll
+ import struct, filecmp, collections.abc
from distutils.dir_util import _path_created
from weakref import WeakSet
@@ -1266,7 +1409,7 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
sys._clear_type_cache()
# Clear ABC registries, restoring previously saved ABC registries.
- for abc in [getattr(_abcoll, a) for a in _abcoll.__all__]:
+ for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
if not isabstract(abc):
continue
for obj in abc.__subclasses__() + [abc]:
@@ -1352,13 +1495,14 @@ def printlist(x, width=70, indent=4):
# Tests that are expected to be skipped everywhere except on one platform
# are also handled separately.
-_expectations = {
- 'win32':
+_expectations = (
+ ('win32',
"""
test__locale
test_crypt
test_curses
test_dbm
+ test_devpoll
test_fcntl
test_fork1
test_epoll
@@ -1381,15 +1525,16 @@ _expectations = {
test_threadsignals
test_wait3
test_wait4
- """,
- 'linux2':
+ """),
+ ('linux',
"""
test_curses
+ test_devpoll
test_largefile
test_kqueue
test_ossaudiodev
- """,
- 'unixware7':
+ """),
+ ('unixware',
"""
test_epoll
test_largefile
@@ -1399,8 +1544,8 @@ _expectations = {
test_pyexpat
test_sax
test_sundry
- """,
- 'openunix8':
+ """),
+ ('openunix',
"""
test_epoll
test_largefile
@@ -1410,8 +1555,8 @@ _expectations = {
test_pyexpat
test_sax
test_sundry
- """,
- 'sco_sv3':
+ """),
+ ('sco_sv',
"""
test_asynchat
test_fork1
@@ -1430,11 +1575,12 @@ _expectations = {
test_threaded_import
test_threadedtempfile
test_threading
- """,
- 'darwin':
+ """),
+ ('darwin',
"""
test__locale
test_curses
+ test_devpoll
test_epoll
test_dbm_gnu
test_gdb
@@ -1443,8 +1589,8 @@ _expectations = {
test_minidom
test_ossaudiodev
test_poll
- """,
- 'sunos5':
+ """),
+ ('sunos',
"""
test_curses
test_dbm
@@ -1455,8 +1601,8 @@ _expectations = {
test_openpty
test_zipfile
test_zlib
- """,
- 'hp-ux11':
+ """),
+ ('hp-ux',
"""
test_curses
test_epoll
@@ -1471,11 +1617,12 @@ _expectations = {
test_sax
test_zipfile
test_zlib
- """,
- 'cygwin':
+ """),
+ ('cygwin',
"""
test_curses
test_dbm
+ test_devpoll
test_epoll
test_ioctl
test_kqueue
@@ -1483,8 +1630,8 @@ _expectations = {
test_locale
test_ossaudiodev
test_socketserver
- """,
- 'os2emx':
+ """),
+ ('os2emx',
"""
test_audioop
test_curses
@@ -1497,9 +1644,10 @@ _expectations = {
test_pty
test_resource
test_signal
- """,
- 'freebsd4':
+ """),
+ ('freebsd',
"""
+ test_devpoll
test_epoll
test_dbm_gnu
test_locale
@@ -1514,8 +1662,8 @@ _expectations = {
test_timeout
test_urllibnet
test_multiprocessing
- """,
- 'aix5':
+ """),
+ ('aix',
"""
test_bz2
test_epoll
@@ -1529,10 +1677,11 @@ _expectations = {
test_ttk_textonly
test_zipimport
test_zlib
- """,
- 'openbsd3':
+ """),
+ ('openbsd',
"""
test_ctypes
+ test_devpoll
test_epoll
test_dbm_gnu
test_locale
@@ -1544,11 +1693,12 @@ _expectations = {
test_ttk_guionly
test_ttk_textonly
test_multiprocessing
- """,
- 'netbsd3':
+ """),
+ ('netbsd',
"""
test_ctypes
test_curses
+ test_devpoll
test_epoll
test_dbm_gnu
test_locale
@@ -1559,12 +1709,8 @@ _expectations = {
test_ttk_guionly
test_ttk_textonly
test_multiprocessing
- """,
-}
-_expectations['freebsd5'] = _expectations['freebsd4']
-_expectations['freebsd6'] = _expectations['freebsd4']
-_expectations['freebsd7'] = _expectations['freebsd4']
-_expectations['freebsd8'] = _expectations['freebsd4']
+ """),
+)
class _ExpectedSkips:
def __init__(self):
@@ -1572,9 +1718,13 @@ class _ExpectedSkips:
from test import test_timeout
self.valid = False
- if sys.platform in _expectations:
- s = _expectations[sys.platform]
- self.expected = set(s.split())
+ expected = None
+ for item in _expectations:
+ if sys.platform.startswith(item[0]):
+ expected = item[1]
+ break
+ if expected is not None:
+ self.expected = set(expected.split())
# These are broken tests, for now skipped on every platform.
# XXX Fix these!
@@ -1634,9 +1784,8 @@ def _make_temp_dir_for_build(TEMPDIR):
TEMPDIR = os.path.abspath(TEMPDIR)
try:
os.mkdir(TEMPDIR)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
# Define a writable temp dir that will be used as cwd while running
# the tests. The name of the dir includes the pid to allow parallel
diff --git a/Lib/test/script_helper.py b/Lib/test/script_helper.py
index ba446cd..10ada6d 100644
--- a/Lib/test/script_helper.py
+++ b/Lib/test/script_helper.py
@@ -59,11 +59,12 @@ def assert_python_failure(*args, **env_vars):
"""
return _assert_python(False, *args, **env_vars)
-def spawn_python(*args):
+def spawn_python(*args, **kw):
cmd_line = [sys.executable, '-E']
cmd_line.extend(args)
return subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ **kw)
def kill_python(p):
p.stdin.close()
diff --git a/Lib/test/ssl_key.passwd.pem b/Lib/test/ssl_key.passwd.pem
new file mode 100644
index 0000000..2524672
--- /dev/null
+++ b/Lib/test/ssl_key.passwd.pem
@@ -0,0 +1,18 @@
+-----BEGIN RSA PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A
+
+kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c
+u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA
+AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr
+Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+
+YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P
+6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+
+noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1
+94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l
+7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo
+cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO
+zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt
+L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo
+2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ==
+-----END RSA PRIVATE KEY-----
diff --git a/Lib/test/ssl_servers.py b/Lib/test/ssl_servers.py
index 77c0542..8686153 100644
--- a/Lib/test/ssl_servers.py
+++ b/Lib/test/ssl_servers.py
@@ -94,7 +94,12 @@ class StatsRequestHandler(BaseHTTPRequestHandler):
"""Serve a GET request."""
sock = self.rfile.raw._sock
context = sock.context
- body = pprint.pformat(context.session_stats())
+ stats = {
+ 'session_cache': context.session_stats(),
+ 'cipher': sock.cipher(),
+ 'compression': sock.compression(),
+ }
+ body = pprint.pformat(stats)
body = body.encode('utf-8')
self.send_response(200)
self.send_header("Content-type", "text/plain; charset=utf-8")
@@ -172,6 +177,11 @@ if __name__ == "__main__":
action='store_false', help='be less verbose')
parser.add_argument('-s', '--stats', dest='use_stats_handler', default=False,
action='store_true', help='always return stats page')
+ parser.add_argument('--curve-name', dest='curve_name', type=str,
+ action='store',
+ help='curve name for EC-based Diffie-Hellman')
+ parser.add_argument('--dh', dest='dh_file', type=str, action='store',
+ help='PEM file containing DH parameters')
args = parser.parse_args()
support.verbose = args.verbose
@@ -182,6 +192,10 @@ if __name__ == "__main__":
handler_class.root = os.getcwd()
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
+ if args.curve_name:
+ context.set_ecdh_curve(args.curve_name)
+ if args.dh_file:
+ context.load_dh_params(args.dh_file)
server = HTTPSServer(("", args.port), handler_class, context)
if args.verbose:
diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py
index d792529..b7246eb 100644
--- a/Lib/test/string_tests.py
+++ b/Lib/test/string_tests.py
@@ -28,6 +28,11 @@ class BaseTest(unittest.TestCase):
# Change in subclasses to change the behaviour of fixtesttype()
type2test = None
+ # Whether the "contained items" of the container are integers in
+ # range(0, 256) (i.e. bytes, bytearray) or strings of length 1
+ # (str)
+ contains_bytes = False
+
# All tests pass their arguments to the testing methods
# as str objects. fixtesttype() can be used to propagate
# these arguments to the appropriate type
@@ -47,11 +52,12 @@ class BaseTest(unittest.TestCase):
return obj
# check that obj.method(*args) returns result
- def checkequal(self, result, obj, methodname, *args):
+ def checkequal(self, result, obj, methodname, *args, **kwargs):
result = self.fixtype(result)
obj = self.fixtype(obj)
args = self.fixtype(args)
- realresult = getattr(obj, methodname)(*args)
+ kwargs = {k: self.fixtype(v) for k,v in kwargs.items()}
+ realresult = getattr(obj, methodname)(*args, **kwargs)
self.assertEqual(
result,
realresult
@@ -116,7 +122,11 @@ class BaseTest(unittest.TestCase):
self.checkequal(0, '', 'count', 'xx', sys.maxsize, 0)
self.checkraises(TypeError, 'hello', 'count')
- self.checkraises(TypeError, 'hello', 'count', 42)
+
+ if self.contains_bytes:
+ self.checkequal(0, 'hello', 'count', 42)
+ else:
+ self.checkraises(TypeError, 'hello', 'count', 42)
# For a variety of combinations,
# verify that str.count() matches an equivalent function
@@ -162,7 +172,11 @@ class BaseTest(unittest.TestCase):
self.checkequal( 2, 'rrarrrrrrrrra', 'find', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'find')
- self.checkraises(TypeError, 'hello', 'find', 42)
+
+ if self.contains_bytes:
+ self.checkequal(-1, 'hello', 'find', 42)
+ else:
+ self.checkraises(TypeError, 'hello', 'find', 42)
self.checkequal(0, '', 'find', '')
self.checkequal(-1, '', 'find', '', 1, 1)
@@ -216,7 +230,11 @@ class BaseTest(unittest.TestCase):
self.checkequal( 2, 'rrarrrrrrrrra', 'rfind', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'rfind')
- self.checkraises(TypeError, 'hello', 'rfind', 42)
+
+ if self.contains_bytes:
+ self.checkequal(-1, 'hello', 'rfind', 42)
+ else:
+ self.checkraises(TypeError, 'hello', 'rfind', 42)
# For a variety of combinations,
# verify that str.rfind() matches __contains__
@@ -263,7 +281,11 @@ class BaseTest(unittest.TestCase):
self.checkequal( 2, 'rrarrrrrrrrra', 'index', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'index')
- self.checkraises(TypeError, 'hello', 'index', 42)
+
+ if self.contains_bytes:
+ self.checkraises(ValueError, 'hello', 'index', 42)
+ else:
+ self.checkraises(TypeError, 'hello', 'index', 42)
def test_rindex(self):
self.checkequal(12, 'abcdefghiabc', 'rindex', '')
@@ -285,7 +307,11 @@ class BaseTest(unittest.TestCase):
self.checkequal( 2, 'rrarrrrrrrrra', 'rindex', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'rindex')
- self.checkraises(TypeError, 'hello', 'rindex', 42)
+
+ if self.contains_bytes:
+ self.checkraises(ValueError, 'hello', 'rindex', 42)
+ else:
+ self.checkraises(TypeError, 'hello', 'rindex', 42)
def test_lower(self):
self.checkequal('hello', 'HeLLo', 'lower')
@@ -363,6 +389,17 @@ class BaseTest(unittest.TestCase):
self.checkequal(['a']*18 + ['aBLAHa'], ('aBLAH'*20)[:-4],
'split', 'BLAH', 18)
+ # with keyword args
+ self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', sep='|')
+ self.checkequal(['a', 'b|c|d'],
+ 'a|b|c|d', 'split', '|', maxsplit=1)
+ self.checkequal(['a', 'b|c|d'],
+ 'a|b|c|d', 'split', sep='|', maxsplit=1)
+ self.checkequal(['a', 'b|c|d'],
+ 'a|b|c|d', 'split', maxsplit=1, sep='|')
+ self.checkequal(['a', 'b c d'],
+ 'a b c d', 'split', maxsplit=1)
+
# argument type
self.checkraises(TypeError, 'hello', 'split', 42, 42, 42)
@@ -420,6 +457,17 @@ class BaseTest(unittest.TestCase):
self.checkequal(['aBLAHa'] + ['a']*18, ('aBLAH'*20)[:-4],
'rsplit', 'BLAH', 18)
+ # with keyword args
+ self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', sep='|')
+ self.checkequal(['a|b|c', 'd'],
+ 'a|b|c|d', 'rsplit', '|', maxsplit=1)
+ self.checkequal(['a|b|c', 'd'],
+ 'a|b|c|d', 'rsplit', sep='|', maxsplit=1)
+ self.checkequal(['a|b|c', 'd'],
+ 'a|b|c|d', 'rsplit', maxsplit=1, sep='|')
+ self.checkequal(['a b c', 'd'],
+ 'a b c d', 'rsplit', maxsplit=1)
+
# argument type
self.checkraises(TypeError, 'hello', 'rsplit', 42, 42, 42)
@@ -643,7 +691,7 @@ class CommonTest(BaseTest):
# check that titlecased chars are lowered correctly
# \u1ffc is the titlecased char
- self.checkequal('\u1ffc\u1ff3\u1ff3\u1ff3',
+ self.checkequal('\u03a9\u0399\u1ff3\u1ff3\u1ff3',
'\u1ff3\u1ff3\u1ffc\u1ffc', 'capitalize')
# check with cased non-letter chars
self.checkequal('\u24c5\u24e8\u24e3\u24d7\u24de\u24dd',
@@ -908,7 +956,14 @@ class MixinStrUnicodeUserStringTest:
self.checkequal(['abc', 'def', 'ghi'], "abc\ndef\r\nghi\n", 'splitlines')
self.checkequal(['abc', 'def', 'ghi', ''], "abc\ndef\r\nghi\n\r", 'splitlines')
self.checkequal(['', 'abc', 'def', 'ghi', ''], "\nabc\ndef\r\nghi\n\r", 'splitlines')
- self.checkequal(['\n', 'abc\n', 'def\r\n', 'ghi\n', '\r'], "\nabc\ndef\r\nghi\n\r", 'splitlines', 1)
+ self.checkequal(['', 'abc', 'def', 'ghi', ''],
+ "\nabc\ndef\r\nghi\n\r", 'splitlines', False)
+ self.checkequal(['\n', 'abc\n', 'def\r\n', 'ghi\n', '\r'],
+ "\nabc\ndef\r\nghi\n\r", 'splitlines', True)
+ self.checkequal(['', 'abc', 'def', 'ghi', ''], "\nabc\ndef\r\nghi\n\r",
+ 'splitlines', keepends=False)
+ self.checkequal(['\n', 'abc\n', 'def\r\n', 'ghi\n', '\r'],
+ "\nabc\ndef\r\nghi\n\r", 'splitlines', keepends=True)
self.checkraises(TypeError, 'abc', 'splitlines', 42, 42)
diff --git a/Lib/test/support.py b/Lib/test/support.py
index 01cd203..c384222 100644
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -15,7 +15,7 @@ import shutil
import warnings
import unittest
import importlib
-import collections
+import collections.abc
import re
import subprocess
import imp
@@ -23,6 +23,7 @@ import time
import sysconfig
import fnmatch
import logging.handlers
+import struct
try:
import _thread, threading
@@ -34,26 +35,39 @@ try:
except ImportError:
multiprocessing = None
+try:
+ import faulthandler
+except ImportError:
+ faulthandler = None
+
+try:
+ import zlib
+except ImportError:
+ zlib = None
+
+try:
+ import fcntl
+except ImportError:
+ fcntl = None
__all__ = [
"Error", "TestFailed", "ResourceDenied", "import_module",
"verbose", "use_resources", "max_memuse", "record_original_stdout",
"get_original_stdout", "unload", "unlink", "rmtree", "forget",
- "is_resource_enabled", "requires", "requires_mac_ver",
- "find_unused_port", "bind_port",
- "fcmp", "is_jython", "TESTFN", "HOST", "FUZZ", "SAVEDCWD", "temp_cwd",
- "findfile", "sortdict", "check_syntax_error", "open_urlresource",
- "check_warnings", "CleanImport", "EnvironmentVarGuard",
- "TransientResource", "captured_output", "captured_stdout",
- "captured_stdin", "captured_stderr",
- "time_out", "socket_peer_reset", "ioerror_peer_reset",
- "run_with_locale", 'temp_umask', "transient_internet",
- "set_memlimit", "bigmemtest", "bigaddrspacetest", "BasicTestRunner",
- "run_unittest", "run_doctest", "threading_setup", "threading_cleanup",
- "reap_children", "cpython_only", "check_impl_detail", "get_attribute",
- "swap_item", "swap_attr", "requires_IEEE_754",
+ "is_resource_enabled", "requires", "requires_freebsd_version",
+ "requires_linux_version", "requires_mac_ver", "find_unused_port", "bind_port",
+ "IPV6_ENABLED", "is_jython", "TESTFN", "HOST", "SAVEDCWD", "temp_cwd",
+ "findfile", "create_empty_file", "sortdict", "check_syntax_error", "open_urlresource",
+ "check_warnings", "CleanImport", "EnvironmentVarGuard", "TransientResource",
+ "captured_stdout", "captured_stdin", "captured_stderr", "time_out",
+ "socket_peer_reset", "ioerror_peer_reset", "run_with_locale", 'temp_umask',
+ "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest",
+ "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup",
+ "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail",
+ "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754",
"TestHandler", "Matcher", "can_symlink", "skip_unless_symlink",
- "import_fresh_module", "failfast",
+ "import_fresh_module", "requires_zlib", "PIPE_MAX_SIZE", "failfast",
+ "anticipate_failure"
]
class Error(Exception):
@@ -124,6 +138,17 @@ def _save_and_block_module(name, orig_modules):
return saved
+def anticipate_failure(condition):
+ """Decorator to mark a test that is known to be broken in some cases
+
+ Any use of this decorator should have a comment identifying the
+ associated tracker issue.
+ """
+ if condition:
+ return unittest.expectedFailure
+ return lambda f: f
+
+
def import_fresh_module(name, fresh=(), blocked=(), deprecated=False):
"""Imports and returns a module, deliberately bypassing the sys.modules cache
and importing a fresh copy of the module. Once the import is complete,
@@ -167,8 +192,7 @@ def get_attribute(obj, name):
try:
attribute = getattr(obj, name)
except AttributeError:
- raise unittest.SkipTest("module %s has no attribute %s" % (
- obj.__name__, name))
+ raise unittest.SkipTest("object %r has no attribute %r" % (obj, name))
else:
return attribute
@@ -209,8 +233,7 @@ def rmtree(path):
try:
shutil.rmtree(path)
except OSError as error:
- # Unix returns ENOENT, Windows returns ESRCH.
- if error.errno not in (errno.ENOENT, errno.ESRCH):
+ if error.errno != errno.ENOENT:
raise
def make_legacy_pyc(source):
@@ -295,9 +318,52 @@ def requires(resource, msg=None):
return
if not is_resource_enabled(resource):
if msg is None:
- msg = "Use of the `%s' resource not enabled" % resource
+ msg = "Use of the %r resource not enabled" % resource
raise ResourceDenied(msg)
+def _requires_unix_version(sysname, min_version):
+ """Decorator raising SkipTest if the OS is `sysname` and the version is less
+ than `min_version`.
+
+ For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if
+ the FreeBSD version is less than 7.2.
+ """
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kw):
+ if platform.system() == sysname:
+ version_txt = platform.release().split('-', 1)[0]
+ try:
+ version = tuple(map(int, version_txt.split('.')))
+ except ValueError:
+ pass
+ else:
+ if version < min_version:
+ min_version_txt = '.'.join(map(str, min_version))
+ raise unittest.SkipTest(
+ "%s version %s or higher required, not %s"
+ % (sysname, min_version_txt, version_txt))
+ return wrapper
+ return decorator
+
+def requires_freebsd_version(*min_version):
+ """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is
+ less than `min_version`.
+
+ For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD
+ version is less than 7.2.
+ """
+ return _requires_unix_version('FreeBSD', min_version)
+
+def requires_linux_version(*min_version):
+ """Decorator raising SkipTest if the OS is Linux and the Linux version is
+ less than `min_version`.
+
+ For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux
+ version is less than 2.6.32.
+ """
+ return _requires_unix_version('Linux', min_version)
+
def requires_mac_ver(*min_version):
"""Decorator raising SkipTest if the OS is Mac OS X and the OS X
version if less than min_version.
@@ -325,6 +391,7 @@ def requires_mac_ver(*min_version):
return wrapper
return decorator
+
HOST = 'localhost'
def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
@@ -420,29 +487,35 @@ def bind_port(sock, host=HOST):
port = sock.getsockname()[1]
return port
-FUZZ = 1e-6
-
-def fcmp(x, y): # fuzzy comparison function
- if isinstance(x, float) or isinstance(y, float):
+def _is_ipv6_enabled():
+ """Check whether IPv6 is enabled on this host."""
+ if socket.has_ipv6:
try:
- fuzz = (abs(x) + abs(y)) * FUZZ
- if abs(x-y) <= fuzz:
- return 0
- except:
+ sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
+ sock.bind(('::1', 0))
+ except (socket.error, socket.gaierror):
pass
- elif type(x) == type(y) and isinstance(x, (tuple, list)):
- for i in range(min(len(x), len(y))):
- outcome = fcmp(x[i], y[i])
- if outcome != 0:
- return outcome
- return (len(x) > len(y)) - (len(x) < len(y))
- return (x > y) - (x < y)
+ else:
+ sock.close()
+ return True
+ return False
+
+IPV6_ENABLED = _is_ipv6_enabled()
+
+
+# A constant likely larger than the underlying OS pipe buffer size.
+# Windows limit seems to be around 512B, and most Unix kernels have a 64K pipe
+# buffer size: take 1M to be sure.
+PIPE_MAX_SIZE = 1024 * 1024
+
# decorator for skipping tests on non-IEEE 754 platforms
requires_IEEE_754 = unittest.skipUnless(
float.__getformat__("double").startswith("IEEE"),
"test requires IEEE 754 doubles")
+requires_zlib = unittest.skipUnless(zlib, 'requires zlib')
+
is_jython = sys.platform.startswith('java')
# Filename used for testing
@@ -543,14 +616,15 @@ def temp_cwd(name='tempcwd', quiet=False, path=None):
rmtree(name)
-@contextlib.contextmanager
-def temp_umask(umask):
- """Context manager that temporarily sets the process umask."""
- oldmask = os.umask(umask)
- try:
- yield
- finally:
- os.umask(oldmask)
+if hasattr(os, "umask"):
+ @contextlib.contextmanager
+ def temp_umask(umask):
+ """Context manager that temporarily sets the process umask."""
+ oldmask = os.umask(umask)
+ try:
+ yield
+ finally:
+ os.umask(oldmask)
def findfile(file, here=__file__, subdir=None):
@@ -568,6 +642,11 @@ def findfile(file, here=__file__, subdir=None):
if os.path.exists(fn): return fn
return file
+def create_empty_file(filename):
+ """Create an empty file. If the file already exists, truncate it."""
+ fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC)
+ os.close(fd)
+
def sortdict(dict):
"Like repr(dict), but in sorted order."
items = sorted(dict.items())
@@ -632,7 +711,7 @@ def open_urlresource(url, *args, **kw):
f = check_valid_file(fn)
if f is not None:
return f
- raise TestFailed('invalid resource "%s"' % fn)
+ raise TestFailed('invalid resource %r' % fn)
class WarningsRecorder(object):
@@ -753,7 +832,7 @@ class CleanImport(object):
sys.modules.update(self.original_modules)
-class EnvironmentVarGuard(collections.MutableMapping):
+class EnvironmentVarGuard(collections.abc.MutableMapping):
"""Class to help protect the environment variable properly. Can be used as
a context manager."""
@@ -884,7 +963,7 @@ def transient_internet(resource_name, *, timeout=30.0, errnos=()):
('WSANO_DATA', 11004),
]
- denied = ResourceDenied("Resource '%s' is not available" % resource_name)
+ denied = ResourceDenied("Resource %r is not available" % resource_name)
captured_errnos = errnos
gai_errnos = []
if not captured_errnos:
@@ -973,6 +1052,16 @@ def gc_collect():
gc.collect()
gc.collect()
+@contextlib.contextmanager
+def disable_gc():
+ have_gc = gc.isenabled()
+ gc.disable()
+ try:
+ yield
+ finally:
+ if have_gc:
+ gc.enable()
+
def python_is_optimized():
"""Find if Python was built with optimizations."""
@@ -981,7 +1070,7 @@ def python_is_optimized():
for opt in cflags.split():
if opt.startswith('-O'):
final_opt = opt
- return final_opt and final_opt != '-O0'
+ return final_opt != '' and final_opt != '-O0'
#=======================================================================
@@ -1054,41 +1143,71 @@ def set_memlimit(limit):
raise ValueError('Memory limit %r too low to be useful' % (limit,))
max_memuse = memlimit
-def _memory_watchdog(start_evt, finish_evt, period=10.0):
- """A function which periodically watches the process' memory consumption
+class _MemoryWatchdog:
+ """An object which periodically watches the process' memory consumption
and prints it out.
"""
- # XXX: because of the GIL, and because the very long operations tested
- # in most bigmem tests are uninterruptible, the loop below gets woken up
- # much less often than expected.
- # The polling code should be rewritten in raw C, without holding the GIL,
- # and push results onto an anonymous pipe.
- try:
- page_size = os.sysconf('SC_PAGESIZE')
- except (ValueError, AttributeError):
+
+ def __init__(self):
+ self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
+ self.started = False
+ self.thread = None
try:
- page_size = os.sysconf('SC_PAGE_SIZE')
+ self.page_size = os.sysconf('SC_PAGESIZE')
except (ValueError, AttributeError):
- page_size = 4096
- procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
- try:
- f = open(procfile, 'rb')
- except IOError as e:
- warnings.warn('/proc not available for stats: {}'.format(e),
- RuntimeWarning)
- sys.stderr.flush()
- return
- with f:
- start_evt.set()
- old_data = -1
- while not finish_evt.wait(period):
- f.seek(0)
- statm = f.read().decode('ascii')
- data = int(statm.split()[5])
- if data != old_data:
- old_data = data
+ try:
+ self.page_size = os.sysconf('SC_PAGE_SIZE')
+ except (ValueError, AttributeError):
+ self.page_size = 4096
+
+ def consumer(self, fd):
+ HEADER = "l"
+ header_size = struct.calcsize(HEADER)
+ try:
+ while True:
+ header = os.read(fd, header_size)
+ if len(header) < header_size:
+ # Pipe closed on other end
+ break
+ data_len, = struct.unpack(HEADER, header)
+ data = os.read(fd, data_len)
+ statm = data.decode('ascii')
+ data = int(statm.split()[5])
print(" ... process data size: {data:.1f}G"
- .format(data=data * page_size / (1024 ** 3)))
+ .format(data=data * self.page_size / (1024 ** 3)))
+ finally:
+ os.close(fd)
+
+ def start(self):
+ if not faulthandler or not hasattr(faulthandler, '_file_watchdog'):
+ return
+ try:
+ rfd = os.open(self.procfile, os.O_RDONLY)
+ except OSError as e:
+ warnings.warn('/proc not available for stats: {}'.format(e),
+ RuntimeWarning)
+ sys.stderr.flush()
+ return
+ pipe_fd, wfd = os.pipe()
+ # set the write end of the pipe non-blocking to avoid blocking the
+ # watchdog thread when the consumer doesn't drain the pipe fast enough
+ if fcntl:
+ flags = fcntl.fcntl(wfd, fcntl.F_GETFL)
+ fcntl.fcntl(wfd, fcntl.F_SETFL, flags|os.O_NONBLOCK)
+ # _file_watchdog() doesn't take the GIL in its child thread, and
+ # therefore collects statistics timely
+ faulthandler._file_watchdog(rfd, wfd, 1.0)
+ self.started = True
+ self.thread = threading.Thread(target=self.consumer, args=(pipe_fd,))
+ self.thread.daemon = True
+ self.thread.start()
+
+ def stop(self):
+ if not self.started:
+ return
+ faulthandler._cancel_file_watchdog()
+ self.thread.join()
+
def bigmemtest(size, memuse, dry_run=True):
"""Decorator for bigmem tests.
@@ -1115,27 +1234,20 @@ def bigmemtest(size, memuse, dry_run=True):
"not enough memory: %.1fG minimum needed"
% (size * memuse / (1024 ** 3)))
- if real_max_memuse and verbose and threading:
+ if real_max_memuse and verbose and faulthandler and threading:
print()
print(" ... expected peak memory use: {peak:.1f}G"
.format(peak=size * memuse / (1024 ** 3)))
- sys.stdout.flush()
- start_evt = threading.Event()
- finish_evt = threading.Event()
- t = threading.Thread(target=_memory_watchdog,
- args=(start_evt, finish_evt, 0.5))
- t.daemon = True
- t.start()
- start_evt.set()
+ watchdog = _MemoryWatchdog()
+ watchdog.start()
else:
- t = None
+ watchdog = None
try:
return f(self, maxsize)
finally:
- if t:
- finish_evt.set()
- t.join()
+ if watchdog:
+ watchdog.stop()
wrapper.size = size
wrapper.memuse = memuse
@@ -1217,6 +1329,33 @@ def check_impl_detail(**guards):
return guards.get(platform.python_implementation().lower(), default)
+def no_tracing(func):
+ """Decorator to temporarily turn off tracing for the duration of a test."""
+ if not hasattr(sys, 'gettrace'):
+ return func
+ else:
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ original_trace = sys.gettrace()
+ try:
+ sys.settrace(None)
+ return func(*args, **kwargs)
+ finally:
+ sys.settrace(original_trace)
+ return wrapper
+
+
+def refcount_test(test):
+ """Decorator for tests which involve reference counting.
+
+ To start, the decorator does not run the test if is not run by CPython.
+ After that, any trace function is unset during the test to prevent
+ unexpected refcounts caused by the trace function.
+
+ """
+ return no_tracing(cpython_only(test))
+
+
def _filter_suite(suite, pred):
"""Recursively filter test cases in a suite based on a predicate."""
newtests = []
@@ -1229,7 +1368,6 @@ def _filter_suite(suite, pred):
newtests.append(test)
suite._tests = newtests
-
def _run_suite(suite):
"""Run tests from a unittest.TestSuite-derived class."""
if verbose:
@@ -1456,7 +1594,7 @@ def strip_python_stderr(stderr):
def args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
- settings in sys.flags."""
+ settings in sys.flags and sys.warnoptions."""
flag_opt_map = {
'bytes_warning': 'b',
'dont_write_bytecode': 'B',
@@ -1472,6 +1610,8 @@ def args_from_interpreter_flags():
v = getattr(sys.flags, flag)
if v > 0:
args.append('-' + opt * v)
+ for opt in sys.warnoptions:
+ args.append('-W' + opt)
return args
#============================================================
diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py
index 3fadb57..f7f1abd 100644
--- a/Lib/test/test__locale.py
+++ b/Lib/test/test__locale.py
@@ -1,13 +1,15 @@
-from test.support import run_unittest
from _locale import (setlocale, LC_ALL, LC_CTYPE, LC_NUMERIC, localeconv, Error)
try:
from _locale import (RADIXCHAR, THOUSEP, nl_langinfo)
except ImportError:
nl_langinfo = None
-import unittest
+import codecs
+import locale
import sys
+import unittest
from platform import uname
+from test.support import run_unittest
if uname()[0] == "Darwin":
maj, min, mic = [int(part) for part in uname()[2].split(".")]
@@ -17,7 +19,7 @@ if uname()[0] == "Darwin":
candidate_locales = ['es_UY', 'fr_FR', 'fi_FI', 'es_CO', 'pt_PT', 'it_IT',
'et_EE', 'es_PY', 'no_NO', 'nl_NL', 'lv_LV', 'el_GR', 'be_BY', 'fr_BE',
'ro_RO', 'ru_UA', 'ru_RU', 'es_VE', 'ca_ES', 'se_NO', 'es_EC', 'id_ID',
- 'ka_GE', 'es_CL', 'hu_HU', 'wa_BE', 'lt_LT', 'sl_SI', 'hr_HR', 'es_AR',
+ 'ka_GE', 'es_CL', 'wa_BE', 'hu_HU', 'lt_LT', 'sl_SI', 'hr_HR', 'es_AR',
'es_ES', 'oc_FR', 'gl_ES', 'bg_BG', 'is_IS', 'mk_MK', 'de_AT', 'pt_BR',
'da_DK', 'nn_NO', 'cs_CZ', 'de_LU', 'es_BO', 'sq_AL', 'sk_SK', 'fr_CH',
'de_DE', 'sr_YU', 'br_FR', 'nl_BE', 'sv_FI', 'pl_PL', 'fr_CA', 'fo_FO',
@@ -25,6 +27,31 @@ candidate_locales = ['es_UY', 'fr_FR', 'fi_FI', 'es_CO', 'pt_PT', 'it_IT',
'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'en_US',
'es_ES.ISO8859-1', 'fr_FR.ISO8859-15', 'ru_RU.KOI8-R', 'ko_KR.eucKR']
+# Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to
+# workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses
+# the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is
+# decoded as U+30000020 (an invalid character) by mbstowcs().
+if sys.platform == 'sunos5':
+ old_locale = locale.setlocale(locale.LC_ALL)
+ try:
+ locales = []
+ for loc in candidate_locales:
+ try:
+ locale.setlocale(locale.LC_ALL, loc)
+ except Error:
+ continue
+ encoding = locale.getpreferredencoding(False)
+ try:
+ localeconv()
+ except Exception as err:
+ print("WARNING: Skip locale %s (encoding %s): [%s] %s"
+ % (loc, encoding, type(err), err))
+ else:
+ locales.append(loc)
+ candidate_locales = locales
+ finally:
+ locale.setlocale(locale.LC_ALL, old_locale)
+
# Workaround for MSVC6(debug) crash bug
if "MSC v.1200" in sys.version:
def accept(loc):
@@ -86,9 +113,10 @@ class _LocaleTests(unittest.TestCase):
setlocale(LC_CTYPE, loc)
except Error:
continue
+ formatting = localeconv()
for lc in ("decimal_point",
"thousands_sep"):
- self.numeric_tester('localeconv', localeconv()[lc], lc, loc)
+ self.numeric_tester('localeconv', formatting[lc], lc, loc)
@unittest.skipUnless(nl_langinfo, "nl_langinfo is not available")
def test_lc_numeric_basic(self):
diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py
index d86f97c..653c957 100644
--- a/Lib/test/test_abc.py
+++ b/Lib/test/test_abc.py
@@ -10,14 +10,7 @@ import abc
from inspect import isabstract
-class TestABC(unittest.TestCase):
-
- def test_abstractmethod_basics(self):
- @abc.abstractmethod
- def foo(self): pass
- self.assertTrue(foo.__isabstractmethod__)
- def bar(self): pass
- self.assertFalse(hasattr(bar, "__isabstractmethod__"))
+class TestLegacyAPI(unittest.TestCase):
def test_abstractproperty_basics(self):
@abc.abstractproperty
@@ -29,10 +22,12 @@ class TestABC(unittest.TestCase):
class C(metaclass=abc.ABCMeta):
@abc.abstractproperty
def foo(self): return 3
+ self.assertRaises(TypeError, C)
class D(C):
@property
def foo(self): return super().foo
self.assertEqual(D().foo, 3)
+ self.assertFalse(getattr(D.foo, "__isabstractmethod__", False))
def test_abstractclassmethod_basics(self):
@abc.abstractclassmethod
@@ -40,7 +35,7 @@ class TestABC(unittest.TestCase):
self.assertTrue(foo.__isabstractmethod__)
@classmethod
def bar(cls): pass
- self.assertFalse(hasattr(bar, "__isabstractmethod__"))
+ self.assertFalse(getattr(bar, "__isabstractmethod__", False))
class C(metaclass=abc.ABCMeta):
@abc.abstractclassmethod
@@ -58,7 +53,7 @@ class TestABC(unittest.TestCase):
self.assertTrue(foo.__isabstractmethod__)
@staticmethod
def bar(): pass
- self.assertFalse(hasattr(bar, "__isabstractmethod__"))
+ self.assertFalse(getattr(bar, "__isabstractmethod__", False))
class C(metaclass=abc.ABCMeta):
@abc.abstractstaticmethod
@@ -98,6 +93,163 @@ class TestABC(unittest.TestCase):
self.assertRaises(TypeError, F) # because bar is abstract now
self.assertTrue(isabstract(F))
+
+class TestABC(unittest.TestCase):
+
+ def test_abstractmethod_basics(self):
+ @abc.abstractmethod
+ def foo(self): pass
+ self.assertTrue(foo.__isabstractmethod__)
+ def bar(self): pass
+ self.assertFalse(hasattr(bar, "__isabstractmethod__"))
+
+ def test_abstractproperty_basics(self):
+ @property
+ @abc.abstractmethod
+ def foo(self): pass
+ self.assertTrue(foo.__isabstractmethod__)
+ def bar(self): pass
+ self.assertFalse(getattr(bar, "__isabstractmethod__", False))
+
+ class C(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def foo(self): return 3
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @C.foo.getter
+ def foo(self): return super().foo
+ self.assertEqual(D().foo, 3)
+
+ def test_abstractclassmethod_basics(self):
+ @classmethod
+ @abc.abstractmethod
+ def foo(cls): pass
+ self.assertTrue(foo.__isabstractmethod__)
+ @classmethod
+ def bar(cls): pass
+ self.assertFalse(getattr(bar, "__isabstractmethod__", False))
+
+ class C(metaclass=abc.ABCMeta):
+ @classmethod
+ @abc.abstractmethod
+ def foo(cls): return cls.__name__
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @classmethod
+ def foo(cls): return super().foo()
+ self.assertEqual(D.foo(), 'D')
+ self.assertEqual(D().foo(), 'D')
+
+ def test_abstractstaticmethod_basics(self):
+ @staticmethod
+ @abc.abstractmethod
+ def foo(): pass
+ self.assertTrue(foo.__isabstractmethod__)
+ @staticmethod
+ def bar(): pass
+ self.assertFalse(getattr(bar, "__isabstractmethod__", False))
+
+ class C(metaclass=abc.ABCMeta):
+ @staticmethod
+ @abc.abstractmethod
+ def foo(): return 3
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @staticmethod
+ def foo(): return 4
+ self.assertEqual(D.foo(), 4)
+ self.assertEqual(D().foo(), 4)
+
+ def test_abstractmethod_integration(self):
+ for abstractthing in [abc.abstractmethod, abc.abstractproperty,
+ abc.abstractclassmethod,
+ abc.abstractstaticmethod]:
+ class C(metaclass=abc.ABCMeta):
+ @abstractthing
+ def foo(self): pass # abstract
+ def bar(self): pass # concrete
+ self.assertEqual(C.__abstractmethods__, {"foo"})
+ self.assertRaises(TypeError, C) # because foo is abstract
+ self.assertTrue(isabstract(C))
+ class D(C):
+ def bar(self): pass # concrete override of concrete
+ self.assertEqual(D.__abstractmethods__, {"foo"})
+ self.assertRaises(TypeError, D) # because foo is still abstract
+ self.assertTrue(isabstract(D))
+ class E(D):
+ def foo(self): pass
+ self.assertEqual(E.__abstractmethods__, set())
+ E() # now foo is concrete, too
+ self.assertFalse(isabstract(E))
+ class F(E):
+ @abstractthing
+ def bar(self): pass # abstract override of concrete
+ self.assertEqual(F.__abstractmethods__, {"bar"})
+ self.assertRaises(TypeError, F) # because bar is abstract now
+ self.assertTrue(isabstract(F))
+
+ def test_descriptors_with_abstractmethod(self):
+ class C(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def foo(self): return 3
+ @foo.setter
+ @abc.abstractmethod
+ def foo(self, val): pass
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @C.foo.getter
+ def foo(self): return super().foo
+ self.assertRaises(TypeError, D)
+ class E(D):
+ @D.foo.setter
+ def foo(self, val): pass
+ self.assertEqual(E().foo, 3)
+ # check that the property's __isabstractmethod__ descriptor does the
+ # right thing when presented with a value that fails truth testing:
+ class NotBool(object):
+ def __nonzero__(self):
+ raise ValueError()
+ __len__ = __nonzero__
+ with self.assertRaises(ValueError):
+ class F(C):
+ def bar(self):
+ pass
+ bar.__isabstractmethod__ = NotBool()
+ foo = property(bar)
+
+
+ def test_customdescriptors_with_abstractmethod(self):
+ class Descriptor:
+ def __init__(self, fget, fset=None):
+ self._fget = fget
+ self._fset = fset
+ def getter(self, callable):
+ return Descriptor(callable, self._fget)
+ def setter(self, callable):
+ return Descriptor(self._fget, callable)
+ @property
+ def __isabstractmethod__(self):
+ return (getattr(self._fget, '__isabstractmethod__', False)
+ or getattr(self._fset, '__isabstractmethod__', False))
+ class C(metaclass=abc.ABCMeta):
+ @Descriptor
+ @abc.abstractmethod
+ def foo(self): return 3
+ @foo.setter
+ @abc.abstractmethod
+ def foo(self, val): pass
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @C.foo.getter
+ def foo(self): return super().foo
+ self.assertRaises(TypeError, D)
+ class E(D):
+ @D.foo.setter
+ def foo(self, val): pass
+ self.assertFalse(E.foo.__isabstractmethod__)
+
def test_metaclass_abc(self):
# Metaclasses can be ABCs, too.
class A(metaclass=abc.ABCMeta):
@@ -121,11 +273,32 @@ class TestABC(unittest.TestCase):
self.assertFalse(issubclass(B, (A,)))
self.assertNotIsInstance(b, A)
self.assertNotIsInstance(b, (A,))
- A.register(B)
+ B1 = A.register(B)
+ self.assertTrue(issubclass(B, A))
+ self.assertTrue(issubclass(B, (A,)))
+ self.assertIsInstance(b, A)
+ self.assertIsInstance(b, (A,))
+ self.assertIs(B1, B)
+ class C(B):
+ pass
+ c = C()
+ self.assertTrue(issubclass(C, A))
+ self.assertTrue(issubclass(C, (A,)))
+ self.assertIsInstance(c, A)
+ self.assertIsInstance(c, (A,))
+
+ def test_register_as_class_deco(self):
+ class A(metaclass=abc.ABCMeta):
+ pass
+ @A.register
+ class B(object):
+ pass
+ b = B()
self.assertTrue(issubclass(B, A))
self.assertTrue(issubclass(B, (A,)))
self.assertIsInstance(b, A)
self.assertIsInstance(b, (A,))
+ @A.register
class C(B):
pass
c = C()
@@ -133,6 +306,7 @@ class TestABC(unittest.TestCase):
self.assertTrue(issubclass(C, (A,)))
self.assertIsInstance(c, A)
self.assertIsInstance(c, (A,))
+ self.assertIs(C, A.register(C))
def test_isinstance_invalidation(self):
class A(metaclass=abc.ABCMeta):
diff --git a/Lib/test/test_abstract_numbers.py b/Lib/test/test_abstract_numbers.py
index 2a396cd..253e6f0 100644
--- a/Lib/test/test_abstract_numbers.py
+++ b/Lib/test/test_abstract_numbers.py
@@ -14,6 +14,7 @@ class TestNumbers(unittest.TestCase):
self.assertEqual(7, int(7).real)
self.assertEqual(0, int(7).imag)
self.assertEqual(7, int(7).conjugate())
+ self.assertEqual(-7, int(-7).conjugate())
self.assertEqual(7, int(7).numerator)
self.assertEqual(1, int(7).denominator)
@@ -24,6 +25,7 @@ class TestNumbers(unittest.TestCase):
self.assertEqual(7.3, float(7.3).real)
self.assertEqual(0, float(7.3).imag)
self.assertEqual(7.3, float(7.3).conjugate())
+ self.assertEqual(-7.3, float(-7.3).conjugate())
def test_complex(self):
self.assertFalse(issubclass(complex, Real))
diff --git a/Lib/test/test_aifc.py b/Lib/test/test_aifc.py
index ee4ad6b..ad6f610 100644
--- a/Lib/test/test_aifc.py
+++ b/Lib/test/test_aifc.py
@@ -1,4 +1,4 @@
-from test.support import findfile, run_unittest, TESTFN, captured_stdout, unlink
+from test.support import findfile, run_unittest, TESTFN, unlink
import unittest
import os
import io
@@ -207,11 +207,8 @@ class AIFCLowLevelTest(unittest.TestCase):
b += b'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0)
b += b'SSND' + struct.pack('>L', 8) + b'\x00' * 8
b += b'MARK' + struct.pack('>LhB', 3, 1, 1)
- with captured_stdout() as s:
+ with self.assertWarns(UserWarning):
f = aifc.open(io.BytesIO(b))
- self.assertEqual(
- s.getvalue(),
- 'Warning: MARK chunk contains only 0 markers instead of 1\n')
self.assertEqual(f.getmarkers(), None)
def test_read_comm_kludge_compname_even(self):
@@ -219,9 +216,8 @@ class AIFCLowLevelTest(unittest.TestCase):
b += b'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0)
b += b'NONE' + struct.pack('B', 4) + b'even' + b'\x00'
b += b'SSND' + struct.pack('>L', 8) + b'\x00' * 8
- with captured_stdout() as s:
+ with self.assertWarns(UserWarning):
f = aifc.open(io.BytesIO(b))
- self.assertEqual(s.getvalue(), 'Warning: bad COMM chunk size\n')
self.assertEqual(f.getcompname(), b'even')
def test_read_comm_kludge_compname_odd(self):
@@ -229,9 +225,8 @@ class AIFCLowLevelTest(unittest.TestCase):
b += b'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0)
b += b'NONE' + struct.pack('B', 3) + b'odd'
b += b'SSND' + struct.pack('>L', 8) + b'\x00' * 8
- with captured_stdout() as s:
+ with self.assertWarns(UserWarning):
f = aifc.open(io.BytesIO(b))
- self.assertEqual(s.getvalue(), 'Warning: bad COMM chunk size\n')
self.assertEqual(f.getcompname(), b'odd')
def test_write_params_raises(self):
diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py
index 852991c..f456709 100644
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -4021,6 +4021,37 @@ class TestHelpSubparsersWithHelpOrdering(HelpTestCase):
'''
+
+class TestHelpMetavarTypeFormatter(HelpTestCase):
+ """"""
+
+ def custom_type(string):
+ return string
+
+ parser_signature = Sig(prog='PROG', description='description',
+ formatter_class=argparse.MetavarTypeHelpFormatter)
+ argument_signatures = [Sig('a', type=int),
+ Sig('-b', type=custom_type),
+ Sig('-c', type=float, metavar='SOME FLOAT')]
+ argument_group_signatures = []
+ usage = '''\
+ usage: PROG [-h] [-b custom_type] [-c SOME FLOAT] int
+ '''
+ help = usage + '''\
+
+ description
+
+ positional arguments:
+ int
+
+ optional arguments:
+ -h, --help show this help message and exit
+ -b custom_type
+ -c SOME FLOAT
+ '''
+ version = ''
+
+
# =====================================
# Optional/Positional constructor tests
# =====================================
@@ -4411,7 +4442,7 @@ class TestEncoding(TestCase):
def _test_module_encoding(self, path):
path, _ = os.path.splitext(path)
path += ".py"
- with codecs.open(path, 'r', 'utf8') as f:
+ with codecs.open(path, 'r', 'utf-8') as f:
f.read()
def test_argparse_module_encoding(self):
@@ -4453,6 +4484,67 @@ class TestArgumentTypeError(TestCase):
else:
self.fail()
+# =========================
+# MessageContentError tests
+# =========================
+
+class TestMessageContentError(TestCase):
+
+ def test_missing_argument_name_in_message(self):
+ parser = ErrorRaisingArgumentParser(prog='PROG', usage='')
+ parser.add_argument('req_pos', type=str)
+ parser.add_argument('-req_opt', type=int, required=True)
+ parser.add_argument('need_one', type=str, nargs='+')
+
+ with self.assertRaises(ArgumentParserError) as cm:
+ parser.parse_args([])
+ msg = str(cm.exception)
+ self.assertRegex(msg, 'req_pos')
+ self.assertRegex(msg, 'req_opt')
+ self.assertRegex(msg, 'need_one')
+ with self.assertRaises(ArgumentParserError) as cm:
+ parser.parse_args(['myXargument'])
+ msg = str(cm.exception)
+ self.assertNotIn(msg, 'req_pos')
+ self.assertRegex(msg, 'req_opt')
+ self.assertRegex(msg, 'need_one')
+ with self.assertRaises(ArgumentParserError) as cm:
+ parser.parse_args(['myXargument', '-req_opt=1'])
+ msg = str(cm.exception)
+ self.assertNotIn(msg, 'req_pos')
+ self.assertNotIn(msg, 'req_opt')
+ self.assertRegex(msg, 'need_one')
+
+ def test_optional_optional_not_in_message(self):
+ parser = ErrorRaisingArgumentParser(prog='PROG', usage='')
+ parser.add_argument('req_pos', type=str)
+ parser.add_argument('--req_opt', type=int, required=True)
+ parser.add_argument('--opt_opt', type=bool, nargs='?',
+ default=True)
+ with self.assertRaises(ArgumentParserError) as cm:
+ parser.parse_args([])
+ msg = str(cm.exception)
+ self.assertRegex(msg, 'req_pos')
+ self.assertRegex(msg, 'req_opt')
+ self.assertNotIn(msg, 'opt_opt')
+ with self.assertRaises(ArgumentParserError) as cm:
+ parser.parse_args(['--req_opt=1'])
+ msg = str(cm.exception)
+ self.assertRegex(msg, 'req_pos')
+ self.assertNotIn(msg, 'req_opt')
+ self.assertNotIn(msg, 'opt_opt')
+
+ def test_optional_positional_not_in_message(self):
+ parser = ErrorRaisingArgumentParser(prog='PROG', usage='')
+ parser.add_argument('req_pos')
+ parser.add_argument('optional_positional', nargs='?', default='eggs')
+ with self.assertRaises(ArgumentParserError) as cm:
+ parser.parse_args([])
+ msg = str(cm.exception)
+ self.assertRegex(msg, 'req_pos')
+ self.assertNotIn(msg, 'optional_positional')
+
+
# ======================
# parse_known_args tests
# ======================
diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py
index 5190c35..434e495 100755
--- a/Lib/test/test_array.py
+++ b/Lib/test/test_array.py
@@ -16,6 +16,13 @@ import warnings
import array
from array import _array_reconstructor as array_reconstructor
+try:
+ # Try to determine availability of long long independently
+ # of the array module under test
+ struct.calcsize('@q')
+ have_long_long = True
+except struct.error:
+ have_long_long = False
class ArraySubclass(array.array):
pass
@@ -26,6 +33,8 @@ class ArraySubclassWithKwargs(array.array):
tests = [] # list to accumulate all tests
typecodes = "ubBhHiIlLfd"
+if have_long_long:
+ typecodes += 'qQ'
class BadConstructorTest(unittest.TestCase):
@@ -209,10 +218,14 @@ class BaseTest(unittest.TestCase):
self.assertEqual(bi[1], len(a))
def test_byteswap(self):
- a = array.array(self.typecode, self.example)
+ if self.typecode == 'u':
+ example = '\U00100100'
+ else:
+ example = self.example
+ a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
- b = array.array(self.typecode, self.example)
+ b = array.array(self.typecode, example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
@@ -1002,7 +1015,7 @@ class UnicodeTest(StringTest):
smallerexample = '\x01\u263a\x00\ufefe'
biggerexample = '\x01\u263a\x01\ufeff'
outside = str('\x33')
- minitemsize = 2
+ minitemsize = 4
def test_unicode(self):
self.assertRaises(TypeError, array.array, 'b', 'foo')
@@ -1014,6 +1027,7 @@ class UnicodeTest(StringTest):
a.fromunicode('\x11abc\xff\u1234')
s = a.tounicode()
self.assertEqual(s, '\xa0\xc2\u1234 \x11abc\xff\u1234')
+ self.assertEqual(a.itemsize, 4)
s = '\x00="\'a\\b\x80\xff\u0000\u0001\u1234'
a = array.array('u', s)
@@ -1205,6 +1219,18 @@ class UnsignedLongTest(UnsignedNumberTest):
minitemsize = 4
tests.append(UnsignedLongTest)
+@unittest.skipIf(not have_long_long, 'need long long support')
+class LongLongTest(SignedNumberTest):
+ typecode = 'q'
+ minitemsize = 8
+tests.append(LongLongTest)
+
+@unittest.skipIf(not have_long_long, 'need long long support')
+class UnsignedLongLongTest(UnsignedNumberTest):
+ typecode = 'Q'
+ minitemsize = 8
+tests.append(UnsignedLongLongTest)
+
class FPTest(NumberTest):
example = [-42.0, 0, 42, 1e5, -1e10]
smallerexample = [-42.0, 0, 42, 1e5, -2e10]
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index f4ce615..064c669 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -1,4 +1,6 @@
-import sys, unittest
+import os
+import sys
+import unittest
from test import support
import ast
@@ -52,6 +54,9 @@ exec_tests = [
"while v:pass",
# If
"if v:pass",
+ # With
+ "with x as y: pass",
+ "with x as y, z as q: pass",
# Raise
"raise Exception('string')",
# TryExcept
@@ -191,6 +196,9 @@ class AST_Tests(unittest.TestCase):
def test_AST_objects(self):
x = ast.AST()
self.assertEqual(x._fields, ())
+ x.foobar = 42
+ self.assertEqual(x.foobar, 42)
+ self.assertEqual(x.__dict__["foobar"], 42)
with self.assertRaises(AttributeError):
x.vararg
@@ -492,8 +500,413 @@ class ASTHelpers_Test(unittest.TestCase):
self.assertIn("invalid integer value: None", str(cm.exception))
+class ASTValidatorTests(unittest.TestCase):
+
+ def mod(self, mod, msg=None, mode="exec", *, exc=ValueError):
+ mod.lineno = mod.col_offset = 0
+ ast.fix_missing_locations(mod)
+ with self.assertRaises(exc) as cm:
+ compile(mod, "<test>", mode)
+ if msg is not None:
+ self.assertIn(msg, str(cm.exception))
+
+ def expr(self, node, msg=None, *, exc=ValueError):
+ mod = ast.Module([ast.Expr(node)])
+ self.mod(mod, msg, exc=exc)
+
+ def stmt(self, stmt, msg=None):
+ mod = ast.Module([stmt])
+ self.mod(mod, msg)
+
+ def test_module(self):
+ m = ast.Interactive([ast.Expr(ast.Name("x", ast.Store()))])
+ self.mod(m, "must have Load context", "single")
+ m = ast.Expression(ast.Name("x", ast.Store()))
+ self.mod(m, "must have Load context", "eval")
+
+ def _check_arguments(self, fac, check):
+ def arguments(args=None, vararg=None, varargannotation=None,
+ kwonlyargs=None, kwarg=None, kwargannotation=None,
+ defaults=None, kw_defaults=None):
+ if args is None:
+ args = []
+ if kwonlyargs is None:
+ kwonlyargs = []
+ if defaults is None:
+ defaults = []
+ if kw_defaults is None:
+ kw_defaults = []
+ args = ast.arguments(args, vararg, varargannotation, kwonlyargs,
+ kwarg, kwargannotation, defaults, kw_defaults)
+ return fac(args)
+ args = [ast.arg("x", ast.Name("x", ast.Store()))]
+ check(arguments(args=args), "must have Load context")
+ check(arguments(varargannotation=ast.Num(3)),
+ "varargannotation but no vararg")
+ check(arguments(varargannotation=ast.Name("x", ast.Store()), vararg="x"),
+ "must have Load context")
+ check(arguments(kwonlyargs=args), "must have Load context")
+ check(arguments(kwargannotation=ast.Num(42)),
+ "kwargannotation but no kwarg")
+ check(arguments(kwargannotation=ast.Name("x", ast.Store()),
+ kwarg="x"), "must have Load context")
+ check(arguments(defaults=[ast.Num(3)]),
+ "more positional defaults than args")
+ check(arguments(kw_defaults=[ast.Num(4)]),
+ "length of kwonlyargs is not the same as kw_defaults")
+ args = [ast.arg("x", ast.Name("x", ast.Load()))]
+ check(arguments(args=args, defaults=[ast.Name("x", ast.Store())]),
+ "must have Load context")
+ args = [ast.arg("a", ast.Name("x", ast.Load())),
+ ast.arg("b", ast.Name("y", ast.Load()))]
+ check(arguments(kwonlyargs=args,
+ kw_defaults=[None, ast.Name("x", ast.Store())]),
+ "must have Load context")
+
+ def test_funcdef(self):
+ a = ast.arguments([], None, None, [], None, None, [], [])
+ f = ast.FunctionDef("x", a, [], [], None)
+ self.stmt(f, "empty body on FunctionDef")
+ f = ast.FunctionDef("x", a, [ast.Pass()], [ast.Name("x", ast.Store())],
+ None)
+ self.stmt(f, "must have Load context")
+ f = ast.FunctionDef("x", a, [ast.Pass()], [],
+ ast.Name("x", ast.Store()))
+ self.stmt(f, "must have Load context")
+ def fac(args):
+ return ast.FunctionDef("x", args, [ast.Pass()], [], None)
+ self._check_arguments(fac, self.stmt)
+
+ def test_classdef(self):
+ def cls(bases=None, keywords=None, starargs=None, kwargs=None,
+ body=None, decorator_list=None):
+ if bases is None:
+ bases = []
+ if keywords is None:
+ keywords = []
+ if body is None:
+ body = [ast.Pass()]
+ if decorator_list is None:
+ decorator_list = []
+ return ast.ClassDef("myclass", bases, keywords, starargs,
+ kwargs, body, decorator_list)
+ self.stmt(cls(bases=[ast.Name("x", ast.Store())]),
+ "must have Load context")
+ self.stmt(cls(keywords=[ast.keyword("x", ast.Name("x", ast.Store()))]),
+ "must have Load context")
+ self.stmt(cls(starargs=ast.Name("x", ast.Store())),
+ "must have Load context")
+ self.stmt(cls(kwargs=ast.Name("x", ast.Store())),
+ "must have Load context")
+ self.stmt(cls(body=[]), "empty body on ClassDef")
+ self.stmt(cls(body=[None]), "None disallowed")
+ self.stmt(cls(decorator_list=[ast.Name("x", ast.Store())]),
+ "must have Load context")
+
+ def test_delete(self):
+ self.stmt(ast.Delete([]), "empty targets on Delete")
+ self.stmt(ast.Delete([None]), "None disallowed")
+ self.stmt(ast.Delete([ast.Name("x", ast.Load())]),
+ "must have Del context")
+
+ def test_assign(self):
+ self.stmt(ast.Assign([], ast.Num(3)), "empty targets on Assign")
+ self.stmt(ast.Assign([None], ast.Num(3)), "None disallowed")
+ self.stmt(ast.Assign([ast.Name("x", ast.Load())], ast.Num(3)),
+ "must have Store context")
+ self.stmt(ast.Assign([ast.Name("x", ast.Store())],
+ ast.Name("y", ast.Store())),
+ "must have Load context")
+
+ def test_augassign(self):
+ aug = ast.AugAssign(ast.Name("x", ast.Load()), ast.Add(),
+ ast.Name("y", ast.Load()))
+ self.stmt(aug, "must have Store context")
+ aug = ast.AugAssign(ast.Name("x", ast.Store()), ast.Add(),
+ ast.Name("y", ast.Store()))
+ self.stmt(aug, "must have Load context")
+
+ def test_for(self):
+ x = ast.Name("x", ast.Store())
+ y = ast.Name("y", ast.Load())
+ p = ast.Pass()
+ self.stmt(ast.For(x, y, [], []), "empty body on For")
+ self.stmt(ast.For(ast.Name("x", ast.Load()), y, [p], []),
+ "must have Store context")
+ self.stmt(ast.For(x, ast.Name("y", ast.Store()), [p], []),
+ "must have Load context")
+ e = ast.Expr(ast.Name("x", ast.Store()))
+ self.stmt(ast.For(x, y, [e], []), "must have Load context")
+ self.stmt(ast.For(x, y, [p], [e]), "must have Load context")
+
+ def test_while(self):
+ self.stmt(ast.While(ast.Num(3), [], []), "empty body on While")
+ self.stmt(ast.While(ast.Name("x", ast.Store()), [ast.Pass()], []),
+ "must have Load context")
+ self.stmt(ast.While(ast.Num(3), [ast.Pass()],
+ [ast.Expr(ast.Name("x", ast.Store()))]),
+ "must have Load context")
+
+ def test_if(self):
+ self.stmt(ast.If(ast.Num(3), [], []), "empty body on If")
+ i = ast.If(ast.Name("x", ast.Store()), [ast.Pass()], [])
+ self.stmt(i, "must have Load context")
+ i = ast.If(ast.Num(3), [ast.Expr(ast.Name("x", ast.Store()))], [])
+ self.stmt(i, "must have Load context")
+ i = ast.If(ast.Num(3), [ast.Pass()],
+ [ast.Expr(ast.Name("x", ast.Store()))])
+ self.stmt(i, "must have Load context")
+
+ def test_with(self):
+ p = ast.Pass()
+ self.stmt(ast.With([], [p]), "empty items on With")
+ i = ast.withitem(ast.Num(3), None)
+ self.stmt(ast.With([i], []), "empty body on With")
+ i = ast.withitem(ast.Name("x", ast.Store()), None)
+ self.stmt(ast.With([i], [p]), "must have Load context")
+ i = ast.withitem(ast.Num(3), ast.Name("x", ast.Load()))
+ self.stmt(ast.With([i], [p]), "must have Store context")
+
+ def test_raise(self):
+ r = ast.Raise(None, ast.Num(3))
+ self.stmt(r, "Raise with cause but no exception")
+ r = ast.Raise(ast.Name("x", ast.Store()), None)
+ self.stmt(r, "must have Load context")
+ r = ast.Raise(ast.Num(4), ast.Name("x", ast.Store()))
+ self.stmt(r, "must have Load context")
+
+ def test_try(self):
+ p = ast.Pass()
+ t = ast.Try([], [], [], [p])
+ self.stmt(t, "empty body on Try")
+ t = ast.Try([ast.Expr(ast.Name("x", ast.Store()))], [], [], [p])
+ self.stmt(t, "must have Load context")
+ t = ast.Try([p], [], [], [])
+ self.stmt(t, "Try has neither except handlers nor finalbody")
+ t = ast.Try([p], [], [p], [p])
+ self.stmt(t, "Try has orelse but no except handlers")
+ t = ast.Try([p], [ast.ExceptHandler(None, "x", [])], [], [])
+ self.stmt(t, "empty body on ExceptHandler")
+ e = [ast.ExceptHandler(ast.Name("x", ast.Store()), "y", [p])]
+ self.stmt(ast.Try([p], e, [], []), "must have Load context")
+ e = [ast.ExceptHandler(None, "x", [p])]
+ t = ast.Try([p], e, [ast.Expr(ast.Name("x", ast.Store()))], [p])
+ self.stmt(t, "must have Load context")
+ t = ast.Try([p], e, [p], [ast.Expr(ast.Name("x", ast.Store()))])
+ self.stmt(t, "must have Load context")
+
+ def test_assert(self):
+ self.stmt(ast.Assert(ast.Name("x", ast.Store()), None),
+ "must have Load context")
+ assrt = ast.Assert(ast.Name("x", ast.Load()),
+ ast.Name("y", ast.Store()))
+ self.stmt(assrt, "must have Load context")
+
+ def test_import(self):
+ self.stmt(ast.Import([]), "empty names on Import")
+
+ def test_importfrom(self):
+ imp = ast.ImportFrom(None, [ast.alias("x", None)], -42)
+ self.stmt(imp, "level less than -1")
+ self.stmt(ast.ImportFrom(None, [], 0), "empty names on ImportFrom")
+
+ def test_global(self):
+ self.stmt(ast.Global([]), "empty names on Global")
+
+ def test_nonlocal(self):
+ self.stmt(ast.Nonlocal([]), "empty names on Nonlocal")
+
+ def test_expr(self):
+ e = ast.Expr(ast.Name("x", ast.Store()))
+ self.stmt(e, "must have Load context")
+
+ def test_boolop(self):
+ b = ast.BoolOp(ast.And(), [])
+ self.expr(b, "less than 2 values")
+ b = ast.BoolOp(ast.And(), [ast.Num(3)])
+ self.expr(b, "less than 2 values")
+ b = ast.BoolOp(ast.And(), [ast.Num(4), None])
+ self.expr(b, "None disallowed")
+ b = ast.BoolOp(ast.And(), [ast.Num(4), ast.Name("x", ast.Store())])
+ self.expr(b, "must have Load context")
+
+ def test_unaryop(self):
+ u = ast.UnaryOp(ast.Not(), ast.Name("x", ast.Store()))
+ self.expr(u, "must have Load context")
+
+ def test_lambda(self):
+ a = ast.arguments([], None, None, [], None, None, [], [])
+ self.expr(ast.Lambda(a, ast.Name("x", ast.Store())),
+ "must have Load context")
+ def fac(args):
+ return ast.Lambda(args, ast.Name("x", ast.Load()))
+ self._check_arguments(fac, self.expr)
+
+ def test_ifexp(self):
+ l = ast.Name("x", ast.Load())
+ s = ast.Name("y", ast.Store())
+ for args in (s, l, l), (l, s, l), (l, l, s):
+ self.expr(ast.IfExp(*args), "must have Load context")
+
+ def test_dict(self):
+ d = ast.Dict([], [ast.Name("x", ast.Load())])
+ self.expr(d, "same number of keys as values")
+ d = ast.Dict([None], [ast.Name("x", ast.Load())])
+ self.expr(d, "None disallowed")
+ d = ast.Dict([ast.Name("x", ast.Load())], [None])
+ self.expr(d, "None disallowed")
+
+ def test_set(self):
+ self.expr(ast.Set([None]), "None disallowed")
+ s = ast.Set([ast.Name("x", ast.Store())])
+ self.expr(s, "must have Load context")
+
+ def _check_comprehension(self, fac):
+ self.expr(fac([]), "comprehension with no generators")
+ g = ast.comprehension(ast.Name("x", ast.Load()),
+ ast.Name("x", ast.Load()), [])
+ self.expr(fac([g]), "must have Store context")
+ g = ast.comprehension(ast.Name("x", ast.Store()),
+ ast.Name("x", ast.Store()), [])
+ self.expr(fac([g]), "must have Load context")
+ x = ast.Name("x", ast.Store())
+ y = ast.Name("y", ast.Load())
+ g = ast.comprehension(x, y, [None])
+ self.expr(fac([g]), "None disallowed")
+ g = ast.comprehension(x, y, [ast.Name("x", ast.Store())])
+ self.expr(fac([g]), "must have Load context")
+
+ def _simple_comp(self, fac):
+ g = ast.comprehension(ast.Name("x", ast.Store()),
+ ast.Name("x", ast.Load()), [])
+ self.expr(fac(ast.Name("x", ast.Store()), [g]),
+ "must have Load context")
+ def wrap(gens):
+ return fac(ast.Name("x", ast.Store()), gens)
+ self._check_comprehension(wrap)
+
+ def test_listcomp(self):
+ self._simple_comp(ast.ListComp)
+
+ def test_setcomp(self):
+ self._simple_comp(ast.SetComp)
+
+ def test_generatorexp(self):
+ self._simple_comp(ast.GeneratorExp)
+
+ def test_dictcomp(self):
+ g = ast.comprehension(ast.Name("y", ast.Store()),
+ ast.Name("p", ast.Load()), [])
+ c = ast.DictComp(ast.Name("x", ast.Store()),
+ ast.Name("y", ast.Load()), [g])
+ self.expr(c, "must have Load context")
+ c = ast.DictComp(ast.Name("x", ast.Load()),
+ ast.Name("y", ast.Store()), [g])
+ self.expr(c, "must have Load context")
+ def factory(comps):
+ k = ast.Name("x", ast.Load())
+ v = ast.Name("y", ast.Load())
+ return ast.DictComp(k, v, comps)
+ self._check_comprehension(factory)
+
+ def test_yield(self):
+ self.expr(ast.Yield(ast.Name("x", ast.Store())), "must have Load")
+ self.expr(ast.YieldFrom(ast.Name("x", ast.Store())), "must have Load")
+
+ def test_compare(self):
+ left = ast.Name("x", ast.Load())
+ comp = ast.Compare(left, [ast.In()], [])
+ self.expr(comp, "no comparators")
+ comp = ast.Compare(left, [ast.In()], [ast.Num(4), ast.Num(5)])
+ self.expr(comp, "different number of comparators and operands")
+ comp = ast.Compare(ast.Num("blah"), [ast.In()], [left])
+ self.expr(comp, "non-numeric", exc=TypeError)
+ comp = ast.Compare(left, [ast.In()], [ast.Num("blah")])
+ self.expr(comp, "non-numeric", exc=TypeError)
+
+ def test_call(self):
+ func = ast.Name("x", ast.Load())
+ args = [ast.Name("y", ast.Load())]
+ keywords = [ast.keyword("w", ast.Name("z", ast.Load()))]
+ stararg = ast.Name("p", ast.Load())
+ kwarg = ast.Name("q", ast.Load())
+ call = ast.Call(ast.Name("x", ast.Store()), args, keywords, stararg,
+ kwarg)
+ self.expr(call, "must have Load context")
+ call = ast.Call(func, [None], keywords, stararg, kwarg)
+ self.expr(call, "None disallowed")
+ bad_keywords = [ast.keyword("w", ast.Name("z", ast.Store()))]
+ call = ast.Call(func, args, bad_keywords, stararg, kwarg)
+ self.expr(call, "must have Load context")
+ call = ast.Call(func, args, keywords, ast.Name("z", ast.Store()), kwarg)
+ self.expr(call, "must have Load context")
+ call = ast.Call(func, args, keywords, stararg,
+ ast.Name("w", ast.Store()))
+ self.expr(call, "must have Load context")
+
+ def test_num(self):
+ class subint(int):
+ pass
+ class subfloat(float):
+ pass
+ class subcomplex(complex):
+ pass
+ for obj in "0", "hello", subint(), subfloat(), subcomplex():
+ self.expr(ast.Num(obj), "non-numeric", exc=TypeError)
+
+ def test_attribute(self):
+ attr = ast.Attribute(ast.Name("x", ast.Store()), "y", ast.Load())
+ self.expr(attr, "must have Load context")
+
+ def test_subscript(self):
+ sub = ast.Subscript(ast.Name("x", ast.Store()), ast.Index(ast.Num(3)),
+ ast.Load())
+ self.expr(sub, "must have Load context")
+ x = ast.Name("x", ast.Load())
+ sub = ast.Subscript(x, ast.Index(ast.Name("y", ast.Store())),
+ ast.Load())
+ self.expr(sub, "must have Load context")
+ s = ast.Name("x", ast.Store())
+ for args in (s, None, None), (None, s, None), (None, None, s):
+ sl = ast.Slice(*args)
+ self.expr(ast.Subscript(x, sl, ast.Load()),
+ "must have Load context")
+ sl = ast.ExtSlice([])
+ self.expr(ast.Subscript(x, sl, ast.Load()), "empty dims on ExtSlice")
+ sl = ast.ExtSlice([ast.Index(s)])
+ self.expr(ast.Subscript(x, sl, ast.Load()), "must have Load context")
+
+ def test_starred(self):
+ left = ast.List([ast.Starred(ast.Name("x", ast.Load()), ast.Store())],
+ ast.Store())
+ assign = ast.Assign([left], ast.Num(4))
+ self.stmt(assign, "must have Store context")
+
+ def _sequence(self, fac):
+ self.expr(fac([None], ast.Load()), "None disallowed")
+ self.expr(fac([ast.Name("x", ast.Store())], ast.Load()),
+ "must have Load context")
+
+ def test_list(self):
+ self._sequence(ast.List)
+
+ def test_tuple(self):
+ self._sequence(ast.Tuple)
+
+ def test_stdlib_validates(self):
+ stdlib = os.path.dirname(ast.__file__)
+ tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")]
+ tests.extend(["test/test_grammar.py", "test/test_unpack_ex.py"])
+ for module in tests:
+ fn = os.path.join(stdlib, module)
+ with open(fn, "r", encoding="utf-8") as fp:
+ source = fp.read()
+ mod = ast.parse(source)
+ compile(mod, fn, "exec")
+
+
def test_main():
- support.run_unittest(AST_Tests, ASTHelpers_Test)
+ support.run_unittest(AST_Tests, ASTHelpers_Test, ASTValidatorTests)
def main():
if __name__ != '__main__':
@@ -527,9 +940,11 @@ exec_results = [
('Module', [('For', (1, 0), ('Name', (1, 4), 'v', ('Store',)), ('Name', (1, 9), 'v', ('Load',)), [('Pass', (1, 11))], [])]),
('Module', [('While', (1, 0), ('Name', (1, 6), 'v', ('Load',)), [('Pass', (1, 8))], [])]),
('Module', [('If', (1, 0), ('Name', (1, 3), 'v', ('Load',)), [('Pass', (1, 5))], [])]),
+('Module', [('With', (1, 0), [('withitem', ('Name', (1, 5), 'x', ('Load',)), ('Name', (1, 10), 'y', ('Store',)))], [('Pass', (1, 13))])]),
+('Module', [('With', (1, 0), [('withitem', ('Name', (1, 5), 'x', ('Load',)), ('Name', (1, 10), 'y', ('Store',))), ('withitem', ('Name', (1, 13), 'z', ('Load',)), ('Name', (1, 18), 'q', ('Store',)))], [('Pass', (1, 21))])]),
('Module', [('Raise', (1, 0), ('Call', (1, 6), ('Name', (1, 6), 'Exception', ('Load',)), [('Str', (1, 16), 'string')], [], None, None), None)]),
-('Module', [('TryExcept', (1, 0), [('Pass', (2, 2))], [('ExceptHandler', (3, 0), ('Name', (3, 7), 'Exception', ('Load',)), None, [('Pass', (4, 2))])], [])]),
-('Module', [('TryFinally', (1, 0), [('Pass', (2, 2))], [('Pass', (4, 2))])]),
+('Module', [('Try', (1, 0), [('Pass', (2, 2))], [('ExceptHandler', (3, 0), ('Name', (3, 7), 'Exception', ('Load',)), None, [('Pass', (4, 2))])], [], [])]),
+('Module', [('Try', (1, 0), [('Pass', (2, 2))], [], [], [('Pass', (4, 2))])]),
('Module', [('Assert', (1, 0), ('Name', (1, 7), 'v', ('Load',)), None)]),
('Module', [('Import', (1, 0), [('alias', 'sys', None)])]),
('Module', [('ImportFrom', (1, 0), 'sys', [('alias', 'v', None)], 0)]),
diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py
index 53c49a8..52dff0f 100644
--- a/Lib/test/test_asyncore.py
+++ b/Lib/test/test_asyncore.py
@@ -20,6 +20,8 @@ except ImportError:
HOST = support.HOST
+HAS_UNIX_SOCKETS = hasattr(socket, 'AF_UNIX')
+
class dummysocket:
def __init__(self):
self.closed = False
@@ -87,6 +89,13 @@ def capture_server(evt, buf, serv):
serv.close()
evt.set()
+def bind_af_aware(sock, addr):
+ """Helper function to bind a socket according to its family."""
+ if HAS_UNIX_SOCKETS and sock.family == socket.AF_UNIX:
+ # Make sure the path doesn't exist.
+ unlink(addr)
+ sock.bind(addr)
+
class HelperFunctionTests(unittest.TestCase):
def test_readwriteexc(self):
@@ -352,7 +361,7 @@ class DispatcherWithSendTests(unittest.TestCase):
@support.reap_threads
def test_send(self):
evt = threading.Event()
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock = socket.socket()
sock.settimeout(3)
port = support.bind_port(sock)
@@ -367,7 +376,7 @@ class DispatcherWithSendTests(unittest.TestCase):
data = b"Suppose there isn't a 16-ton weight?"
d = dispatcherwithsend_noread()
- d.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ d.create_socket()
d.connect((HOST, port))
# give time for socket to connect
@@ -467,22 +476,22 @@ class BaseTestHandler(asyncore.dispatcher):
raise
-class TCPServer(asyncore.dispatcher):
+class BaseServer(asyncore.dispatcher):
"""A server which listens on an address and dispatches the
connection to a handler.
"""
- def __init__(self, handler=BaseTestHandler, host=HOST, port=0):
+ def __init__(self, family, addr, handler=BaseTestHandler):
asyncore.dispatcher.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket(family)
self.set_reuse_addr()
- self.bind((host, port))
+ bind_af_aware(self.socket, addr)
self.listen(5)
self.handler = handler
@property
def address(self):
- return self.socket.getsockname()[:2]
+ return self.socket.getsockname()
def handle_accepted(self, sock, addr):
self.handler(sock)
@@ -493,9 +502,9 @@ class TCPServer(asyncore.dispatcher):
class BaseClient(BaseTestHandler):
- def __init__(self, address):
+ def __init__(self, family, address):
BaseTestHandler.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket(family)
self.connect(address)
def handle_connect(self):
@@ -525,8 +534,8 @@ class BaseTestAPI(unittest.TestCase):
def handle_connect(self):
self.flag = True
- server = TCPServer()
- client = TestClient(server.address)
+ server = BaseServer(self.family, self.addr)
+ client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_accept(self):
@@ -534,18 +543,18 @@ class BaseTestAPI(unittest.TestCase):
class TestListener(BaseTestHandler):
- def __init__(self):
+ def __init__(self, family, addr):
BaseTestHandler.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
- self.bind((HOST, 0))
+ self.create_socket(family)
+ bind_af_aware(self.socket, addr)
self.listen(5)
- self.address = self.socket.getsockname()[:2]
+ self.address = self.socket.getsockname()
def handle_accept(self):
self.flag = True
- server = TestListener()
- client = BaseClient(server.address)
+ server = TestListener(self.family, self.addr)
+ client = BaseClient(self.family, server.address)
self.loop_waiting_for_flag(server)
def test_handle_accepted(self):
@@ -553,12 +562,12 @@ class BaseTestAPI(unittest.TestCase):
class TestListener(BaseTestHandler):
- def __init__(self):
+ def __init__(self, family, addr):
BaseTestHandler.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
- self.bind((HOST, 0))
+ self.create_socket(family)
+ bind_af_aware(self.socket, addr)
self.listen(5)
- self.address = self.socket.getsockname()[:2]
+ self.address = self.socket.getsockname()
def handle_accept(self):
asyncore.dispatcher.handle_accept(self)
@@ -567,8 +576,8 @@ class BaseTestAPI(unittest.TestCase):
sock.close()
self.flag = True
- server = TestListener()
- client = BaseClient(server.address)
+ server = TestListener(self.family, self.addr)
+ client = BaseClient(self.family, server.address)
self.loop_waiting_for_flag(server)
@@ -584,8 +593,8 @@ class BaseTestAPI(unittest.TestCase):
BaseTestHandler.__init__(self, conn)
self.send(b'x' * 1024)
- server = TCPServer(TestHandler)
- client = TestClient(server.address)
+ server = BaseServer(self.family, self.addr, TestHandler)
+ client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_write(self):
@@ -595,8 +604,8 @@ class BaseTestAPI(unittest.TestCase):
def handle_write(self):
self.flag = True
- server = TCPServer()
- client = TestClient(server.address)
+ server = BaseServer(self.family, self.addr)
+ client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_close(self):
@@ -619,8 +628,40 @@ class BaseTestAPI(unittest.TestCase):
BaseTestHandler.__init__(self, conn)
self.close()
- server = TCPServer(TestHandler)
- client = TestClient(server.address)
+ server = BaseServer(self.family, self.addr, TestHandler)
+ client = TestClient(self.family, server.address)
+ self.loop_waiting_for_flag(client)
+
+ def test_handle_close_after_conn_broken(self):
+ # Check that ECONNRESET/EPIPE is correctly handled (issues #5661 and
+ # #11265).
+
+ data = b'\0' * 128
+
+ class TestClient(BaseClient):
+
+ def handle_write(self):
+ self.send(data)
+
+ def handle_close(self):
+ self.flag = True
+ self.close()
+
+ def handle_expt(self):
+ self.flag = True
+ self.close()
+
+ class TestHandler(BaseTestHandler):
+
+ def handle_read(self):
+ self.recv(len(data))
+ self.close()
+
+ def writable(self):
+ return False
+
+ server = BaseServer(self.family, self.addr, TestHandler)
+ client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
@unittest.skipIf(sys.platform.startswith("sunos"),
@@ -629,9 +670,12 @@ class BaseTestAPI(unittest.TestCase):
# Make sure handle_expt is called on OOB data received.
# Note: this might fail on some platforms as OOB data is
# tenuously supported and rarely used.
+ if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
+ self.skipTest("Not applicable to AF_UNIX sockets.")
class TestClient(BaseClient):
def handle_expt(self):
+ self.socket.recv(1024, socket.MSG_OOB)
self.flag = True
class TestHandler(BaseTestHandler):
@@ -639,8 +683,8 @@ class BaseTestAPI(unittest.TestCase):
BaseTestHandler.__init__(self, conn)
self.socket.send(bytes(chr(244), 'latin-1'), socket.MSG_OOB)
- server = TCPServer(TestHandler)
- client = TestClient(server.address)
+ server = BaseServer(self.family, self.addr, TestHandler)
+ client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_handle_error(self):
@@ -657,13 +701,13 @@ class BaseTestAPI(unittest.TestCase):
else:
raise Exception("exception not raised")
- server = TCPServer()
- client = TestClient(server.address)
+ server = BaseServer(self.family, self.addr)
+ client = TestClient(self.family, server.address)
self.loop_waiting_for_flag(client)
def test_connection_attributes(self):
- server = TCPServer()
- client = BaseClient(server.address)
+ server = BaseServer(self.family, self.addr)
+ client = BaseClient(self.family, server.address)
# we start disconnected
self.assertFalse(server.connected)
@@ -693,25 +737,29 @@ class BaseTestAPI(unittest.TestCase):
def test_create_socket(self):
s = asyncore.dispatcher()
- s.create_socket(socket.AF_INET, socket.SOCK_STREAM)
- self.assertEqual(s.socket.family, socket.AF_INET)
+ s.create_socket(self.family)
+ self.assertEqual(s.socket.family, self.family)
SOCK_NONBLOCK = getattr(socket, 'SOCK_NONBLOCK', 0)
self.assertEqual(s.socket.type, socket.SOCK_STREAM | SOCK_NONBLOCK)
def test_bind(self):
+ if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
+ self.skipTest("Not applicable to AF_UNIX sockets.")
s1 = asyncore.dispatcher()
- s1.create_socket(socket.AF_INET, socket.SOCK_STREAM)
- s1.bind((HOST, 0))
+ s1.create_socket(self.family)
+ s1.bind(self.addr)
s1.listen(5)
port = s1.socket.getsockname()[1]
s2 = asyncore.dispatcher()
- s2.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ s2.create_socket(self.family)
# EADDRINUSE indicates the socket was correctly bound
- self.assertRaises(socket.error, s2.bind, (HOST, port))
+ self.assertRaises(socket.error, s2.bind, (self.addr[0], port))
def test_set_reuse_addr(self):
- sock = socket.socket()
+ if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
+ self.skipTest("Not applicable to AF_UNIX sockets.")
+ sock = socket.socket(self.family)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error:
@@ -719,11 +767,11 @@ class BaseTestAPI(unittest.TestCase):
else:
# if SO_REUSEADDR succeeded for sock we expect asyncore
# to do the same
- s = asyncore.dispatcher(socket.socket())
+ s = asyncore.dispatcher(socket.socket(self.family))
self.assertFalse(s.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR))
s.socket.close()
- s.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.create_socket(self.family)
s.set_reuse_addr()
self.assertTrue(s.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR))
@@ -731,18 +779,52 @@ class BaseTestAPI(unittest.TestCase):
sock.close()
-class TestAPI_UseSelect(BaseTestAPI):
+class TestAPI_UseIPv4Sockets(BaseTestAPI):
+ family = socket.AF_INET
+ addr = (HOST, 0)
+
+@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 support required')
+class TestAPI_UseIPv6Sockets(BaseTestAPI):
+ family = socket.AF_INET6
+ addr = ('::1', 0)
+
+@unittest.skipUnless(HAS_UNIX_SOCKETS, 'Unix sockets required')
+class TestAPI_UseUnixSockets(BaseTestAPI):
+ if HAS_UNIX_SOCKETS:
+ family = socket.AF_UNIX
+ addr = support.TESTFN
+
+ def tearDown(self):
+ unlink(self.addr)
+ BaseTestAPI.tearDown(self)
+
+class TestAPI_UseIPv4Select(TestAPI_UseIPv4Sockets):
+ use_poll = False
+
+@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required')
+class TestAPI_UseIPv4Poll(TestAPI_UseIPv4Sockets):
+ use_poll = True
+
+class TestAPI_UseIPv6Select(TestAPI_UseIPv6Sockets):
use_poll = False
@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required')
-class TestAPI_UsePoll(BaseTestAPI):
+class TestAPI_UseIPv6Poll(TestAPI_UseIPv6Sockets):
use_poll = True
+class TestAPI_UseUnixSocketsSelect(TestAPI_UseUnixSockets):
+ use_poll = False
+
+@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required')
+class TestAPI_UseUnixSocketsPoll(TestAPI_UseUnixSockets):
+ use_poll = True
def test_main():
tests = [HelperFunctionTests, DispatcherTests, DispatcherWithSendTests,
- DispatcherWithSendTests_UsePoll, TestAPI_UseSelect,
- TestAPI_UsePoll, FileWrapperTest]
+ DispatcherWithSendTests_UsePoll, FileWrapperTest,
+ TestAPI_UseIPv4Select, TestAPI_UseIPv4Poll, TestAPI_UseIPv6Select,
+ TestAPI_UseIPv6Poll, TestAPI_UseUnixSocketsSelect,
+ TestAPI_UseUnixSocketsPoll]
run_unittest(*tests)
if __name__ == "__main__":
diff --git a/Lib/test/test_base64.py b/Lib/test/test_base64.py
index ca94504..2569476 100644
--- a/Lib/test/test_base64.py
+++ b/Lib/test/test_base64.py
@@ -103,44 +103,53 @@ class BaseXYTestCase(unittest.TestCase):
def test_b64decode(self):
eq = self.assertEqual
- eq(base64.b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
- eq(base64.b64decode(b'AA=='), b'\x00')
- eq(base64.b64decode(b"YQ=="), b"a")
- eq(base64.b64decode(b"YWI="), b"ab")
- eq(base64.b64decode(b"YWJj"), b"abc")
- eq(base64.b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
- b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}")
- eq(base64.b64decode(b''), b'')
+
+ tests = {b"d3d3LnB5dGhvbi5vcmc=": b"www.python.org",
+ b'AA==': b'\x00',
+ b"YQ==": b"a",
+ b"YWI=": b"ab",
+ b"YWJj": b"abc",
+ b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+ b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
+ b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==":
+
+ b"abcdefghijklmnopqrstuvwxyz"
+ b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ b"0123456789!@#0^&*();:<>,. []{}",
+ b'': b'',
+ }
+ for data, res in tests.items():
+ eq(base64.b64decode(data), res)
+ eq(base64.b64decode(data.decode('ascii')), res)
+
# Test with arbitrary alternative characters
- eq(base64.b64decode(b'01a*b$cd', altchars=b'*$'), b'\xd3V\xbeo\xf7\x1d')
- # Check if passing a str object raises an error
- self.assertRaises(TypeError, base64.b64decode, "")
- self.assertRaises(TypeError, base64.b64decode, b"", altchars="")
+ tests_altchars = {(b'01a*b$cd', b'*$'): b'\xd3V\xbeo\xf7\x1d',
+ }
+ for (data, altchars), res in tests_altchars.items():
+ data_str = data.decode('ascii')
+ altchars_str = altchars.decode('ascii')
+
+ eq(base64.b64decode(data, altchars=altchars), res)
+ eq(base64.b64decode(data_str, altchars=altchars), res)
+ eq(base64.b64decode(data, altchars=altchars_str), res)
+ eq(base64.b64decode(data_str, altchars=altchars_str), res)
+
# Test standard alphabet
- eq(base64.standard_b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
- eq(base64.standard_b64decode(b"YQ=="), b"a")
- eq(base64.standard_b64decode(b"YWI="), b"ab")
- eq(base64.standard_b64decode(b"YWJj"), b"abc")
- eq(base64.standard_b64decode(b""), b"")
- eq(base64.standard_b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
- b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}")
- # Check if passing a str object raises an error
- self.assertRaises(TypeError, base64.standard_b64decode, "")
- self.assertRaises(TypeError, base64.standard_b64decode, b"", altchars="")
+ for data, res in tests.items():
+ eq(base64.standard_b64decode(data), res)
+ eq(base64.standard_b64decode(data.decode('ascii')), res)
+
# Test with 'URL safe' alternative characters
- eq(base64.urlsafe_b64decode(b'01a-b_cd'), b'\xd3V\xbeo\xf7\x1d')
- self.assertRaises(TypeError, base64.urlsafe_b64decode, "")
+ tests_urlsafe = {b'01a-b_cd': b'\xd3V\xbeo\xf7\x1d',
+ b'': b'',
+ }
+ for data, res in tests_urlsafe.items():
+ eq(base64.urlsafe_b64decode(data), res)
+ eq(base64.urlsafe_b64decode(data.decode('ascii')), res)
def test_b64decode_padding_error(self):
self.assertRaises(binascii.Error, base64.b64decode, b'abc')
+ self.assertRaises(binascii.Error, base64.b64decode, 'abc')
def test_b64decode_invalid_chars(self):
# issue 1466065: Test some invalid characters.
@@ -155,8 +164,11 @@ class BaseXYTestCase(unittest.TestCase):
(b'YWJj\nYWI=', b'abcab'))
for bstr, res in tests:
self.assertEqual(base64.b64decode(bstr), res)
+ self.assertEqual(base64.b64decode(bstr.decode('ascii')), res)
with self.assertRaises(binascii.Error):
base64.b64decode(bstr, validate=True)
+ with self.assertRaises(binascii.Error):
+ base64.b64decode(bstr.decode('ascii'), validate=True)
def test_b32encode(self):
eq = self.assertEqual
@@ -171,40 +183,63 @@ class BaseXYTestCase(unittest.TestCase):
def test_b32decode(self):
eq = self.assertEqual
- eq(base64.b32decode(b''), b'')
- eq(base64.b32decode(b'AA======'), b'\x00')
- eq(base64.b32decode(b'ME======'), b'a')
- eq(base64.b32decode(b'MFRA===='), b'ab')
- eq(base64.b32decode(b'MFRGG==='), b'abc')
- eq(base64.b32decode(b'MFRGGZA='), b'abcd')
- eq(base64.b32decode(b'MFRGGZDF'), b'abcde')
- self.assertRaises(TypeError, base64.b32decode, "")
+ tests = {b'': b'',
+ b'AA======': b'\x00',
+ b'ME======': b'a',
+ b'MFRA====': b'ab',
+ b'MFRGG===': b'abc',
+ b'MFRGGZA=': b'abcd',
+ b'MFRGGZDF': b'abcde',
+ }
+ for data, res in tests.items():
+ eq(base64.b32decode(data), res)
+ eq(base64.b32decode(data.decode('ascii')), res)
def test_b32decode_casefold(self):
eq = self.assertEqual
- eq(base64.b32decode(b'', True), b'')
- eq(base64.b32decode(b'ME======', True), b'a')
- eq(base64.b32decode(b'MFRA====', True), b'ab')
- eq(base64.b32decode(b'MFRGG===', True), b'abc')
- eq(base64.b32decode(b'MFRGGZA=', True), b'abcd')
- eq(base64.b32decode(b'MFRGGZDF', True), b'abcde')
- # Lower cases
- eq(base64.b32decode(b'me======', True), b'a')
- eq(base64.b32decode(b'mfra====', True), b'ab')
- eq(base64.b32decode(b'mfrgg===', True), b'abc')
- eq(base64.b32decode(b'mfrggza=', True), b'abcd')
- eq(base64.b32decode(b'mfrggzdf', True), b'abcde')
- # Expected exceptions
+ tests = {b'': b'',
+ b'ME======': b'a',
+ b'MFRA====': b'ab',
+ b'MFRGG===': b'abc',
+ b'MFRGGZA=': b'abcd',
+ b'MFRGGZDF': b'abcde',
+ # Lower cases
+ b'me======': b'a',
+ b'mfra====': b'ab',
+ b'mfrgg===': b'abc',
+ b'mfrggza=': b'abcd',
+ b'mfrggzdf': b'abcde',
+ }
+
+ for data, res in tests.items():
+ eq(base64.b32decode(data, True), res)
+ eq(base64.b32decode(data.decode('ascii'), True), res)
+
self.assertRaises(TypeError, base64.b32decode, b'me======')
+ self.assertRaises(TypeError, base64.b32decode, 'me======')
+
# Mapping zero and one
eq(base64.b32decode(b'MLO23456'), b'b\xdd\xad\xf3\xbe')
- eq(base64.b32decode(b'M1023456', map01=b'L'), b'b\xdd\xad\xf3\xbe')
- eq(base64.b32decode(b'M1023456', map01=b'I'), b'b\x1d\xad\xf3\xbe')
- self.assertRaises(TypeError, base64.b32decode, b"", map01="")
+ eq(base64.b32decode('MLO23456'), b'b\xdd\xad\xf3\xbe')
+
+ map_tests = {(b'M1023456', b'L'): b'b\xdd\xad\xf3\xbe',
+ (b'M1023456', b'I'): b'b\x1d\xad\xf3\xbe',
+ }
+ for (data, map01), res in map_tests.items():
+ data_str = data.decode('ascii')
+ map01_str = map01.decode('ascii')
+
+ eq(base64.b32decode(data, map01=map01), res)
+ eq(base64.b32decode(data_str, map01=map01), res)
+ eq(base64.b32decode(data, map01=map01_str), res)
+ eq(base64.b32decode(data_str, map01=map01_str), res)
def test_b32decode_error(self):
- self.assertRaises(binascii.Error, base64.b32decode, b'abc')
- self.assertRaises(binascii.Error, base64.b32decode, b'ABCDEF==')
+ for data in [b'abc', b'ABCDEF==']:
+ with self.assertRaises(binascii.Error):
+ base64.b32decode(data)
+ with self.assertRaises(binascii.Error):
+ base64.b32decode(data.decode('ascii'))
def test_b16encode(self):
eq = self.assertEqual
@@ -215,12 +250,24 @@ class BaseXYTestCase(unittest.TestCase):
def test_b16decode(self):
eq = self.assertEqual
eq(base64.b16decode(b'0102ABCDEF'), b'\x01\x02\xab\xcd\xef')
+ eq(base64.b16decode('0102ABCDEF'), b'\x01\x02\xab\xcd\xef')
eq(base64.b16decode(b'00'), b'\x00')
+ eq(base64.b16decode('00'), b'\x00')
# Lower case is not allowed without a flag
self.assertRaises(binascii.Error, base64.b16decode, b'0102abcdef')
+ self.assertRaises(binascii.Error, base64.b16decode, '0102abcdef')
# Case fold
eq(base64.b16decode(b'0102abcdef', True), b'\x01\x02\xab\xcd\xef')
- self.assertRaises(TypeError, base64.b16decode, "")
+ eq(base64.b16decode('0102abcdef', True), b'\x01\x02\xab\xcd\xef')
+
+ def test_decode_nonascii_str(self):
+ decode_funcs = (base64.b64decode,
+ base64.standard_b64decode,
+ base64.urlsafe_b64decode,
+ base64.b32decode,
+ base64.b16decode)
+ for f in decode_funcs:
+ self.assertRaises(ValueError, f, 'with non-ascii \xcb')
def test_ErrorHeritage(self):
self.assertTrue(issubclass(binascii.Error, ValueError))
diff --git a/Lib/test/test_bigmem.py b/Lib/test/test_bigmem.py
index f3c6ebb..0e54595 100644
--- a/Lib/test/test_bigmem.py
+++ b/Lib/test/test_bigmem.py
@@ -1,3 +1,13 @@
+"""Bigmem tests - tests for the 32-bit boundary in containers.
+
+These tests try to exercise the 32-bit boundary that is sometimes, if
+rarely, exceeded in practice, but almost never tested. They are really only
+meaningful on 64-bit builds on machines with a *lot* of memory, but the
+tests are always run, usually with very low memory limits to make sure the
+tests themselves don't suffer from bitrot. To run them for real, pass a
+high memory limit to regrtest, with the -M option.
+"""
+
from test import support
from test.support import bigmemtest, _1G, _2G, _4G
@@ -6,20 +16,35 @@ import operator
import sys
import functools
+# These tests all use one of the bigmemtest decorators to indicate how much
+# memory they use and how much memory they need to be even meaningful. The
+# decorators take two arguments: a 'memuse' indicator declaring
+# (approximate) bytes per size-unit the test will use (at peak usage), and a
+# 'minsize' indicator declaring a minimum *useful* size. A test that
+# allocates a bytestring to test various operations near the end will have a
+# minsize of at least 2Gb (or it wouldn't reach the 32-bit limit, so the
+# test wouldn't be very useful) and a memuse of 1 (one byte per size-unit,
+# if it allocates only one big string at a time.)
+#
+# When run with a memory limit set, both decorators skip tests that need
+# more memory than available to be meaningful. The precisionbigmemtest will
+# always pass minsize as size, even if there is much more memory available.
+# The bigmemtest decorator will scale size upward to fill available memory.
+#
# Bigmem testing houserules:
#
# - Try not to allocate too many large objects. It's okay to rely on
-# refcounting semantics, but don't forget that 's = create_largestring()'
+# refcounting semantics, and don't forget that 's = create_largestring()'
# doesn't release the old 's' (if it exists) until well after its new
# value has been created. Use 'del s' before the create_largestring call.
#
-# - Do *not* compare large objects using assertEqual or similar. It's a
-# lengthy operation and the errormessage will be utterly useless due to
-# its size. To make sure whether a result has the right contents, better
-# to use the strip or count methods, or compare meaningful slices.
+# - Do *not* compare large objects using assertEqual, assertIn or similar.
+# It's a lengthy operation and the errormessage will be utterly useless
+# due to its size. To make sure whether a result has the right contents,
+# better to use the strip or count methods, or compare meaningful slices.
#
# - Don't forget to test for large indices, offsets and results and such,
-# in addition to large sizes.
+# in addition to large sizes. Anything that probes the 32-bit boundary.
#
# - When repeating an object (say, a substring, or a small list) to create
# a large object, make the subobject of a length that is not a power of
@@ -37,13 +62,14 @@ import functools
# fail as well. I do not know whether it is due to memory fragmentation
# issues, or other specifics of the platform malloc() routine.
-character_size = 4 if sys.maxunicode > 0xFFFF else 2
+ascii_char_size = 1
+ucs2_char_size = 2
+ucs4_char_size = 4
class BaseStrTest:
- @bigmemtest(size=_2G, memuse=2)
- def test_capitalize(self, size):
+ def _test_capitalize(self, size):
_ = self.from_latin1
SUBSTR = self.from_latin1(' abc def ghi')
s = _('-') * size + SUBSTR
@@ -92,7 +118,7 @@ class BaseStrTest:
_ = self.from_latin1
s = _('-') * size
tabsize = 8
- self.assertEqual(s.expandtabs(), s)
+ self.assertTrue(s.expandtabs() == s)
del s
slen, remainder = divmod(size, tabsize)
s = _(' \t') * slen
@@ -347,7 +373,7 @@ class BaseStrTest:
# suffer for the list size. (Otherwise, it'd cost another 48 times
# size in bytes!) Nevertheless, a list of size takes
# 8*size bytes.
- @bigmemtest(size=_2G + 5, memuse=10)
+ @bigmemtest(size=_2G + 5, memuse=2 * ascii_char_size + 8)
def test_split_large(self, size):
_ = self.from_latin1
s = _(' a') * size + _(' ')
@@ -366,9 +392,9 @@ class BaseStrTest:
# take up an inordinate amount of memory
chunksize = int(size ** 0.5 + 2) // 2
SUBSTR = _(' ') * chunksize + _('\n') + _(' ') * chunksize + _('\r\n')
- s = SUBSTR * chunksize
+ s = SUBSTR * (chunksize * 2)
l = s.splitlines()
- self.assertEqual(len(l), chunksize * 2)
+ self.assertEqual(len(l), chunksize * 4)
expected = _(' ') * chunksize
for item in l:
self.assertEqual(item, expected)
@@ -394,8 +420,7 @@ class BaseStrTest:
self.assertEqual(len(s), size)
self.assertEqual(s.strip(), SUBSTR.strip())
- @bigmemtest(size=_2G, memuse=2)
- def test_swapcase(self, size):
+ def _test_swapcase(self, size):
_ = self.from_latin1
SUBSTR = _("aBcDeFG12.'\xa9\x00")
sublen = len(SUBSTR)
@@ -406,8 +431,7 @@ class BaseStrTest:
self.assertEqual(s[:sublen * 3], SUBSTR.swapcase() * 3)
self.assertEqual(s[-sublen * 3:], SUBSTR.swapcase() * 3)
- @bigmemtest(size=_2G, memuse=2)
- def test_title(self, size):
+ def _test_title(self, size):
_ = self.from_latin1
SUBSTR = _('SpaaHAaaAaham')
s = SUBSTR * (size // len(SUBSTR) + 2)
@@ -419,14 +443,7 @@ class BaseStrTest:
def test_translate(self, size):
_ = self.from_latin1
SUBSTR = _('aZz.z.Aaz.')
- if isinstance(SUBSTR, str):
- trans = {
- ord(_('.')): _('-'),
- ord(_('a')): _('!'),
- ord(_('Z')): _('$'),
- }
- else:
- trans = bytes.maketrans(b'.aZ', b'-!$')
+ trans = bytes.maketrans(b'.aZ', b'-!$')
sublen = len(SUBSTR)
repeats = size // sublen + 2
s = SUBSTR * repeats
@@ -519,19 +536,19 @@ class BaseStrTest:
edge = _('-') * (size // 2)
s = _('').join([edge, SUBSTR, edge])
del edge
- self.assertIn(SUBSTR, s)
- self.assertNotIn(SUBSTR * 2, s)
- self.assertIn(_('-'), s)
- self.assertNotIn(_('a'), s)
+ self.assertTrue(SUBSTR in s)
+ self.assertFalse(SUBSTR * 2 in s)
+ self.assertTrue(_('-') in s)
+ self.assertFalse(_('a') in s)
s += _('a')
- self.assertIn(_('a'), s)
+ self.assertTrue(_('a') in s)
@bigmemtest(size=_2G + 10, memuse=2)
def test_compare(self, size):
_ = self.from_latin1
s1 = _('-') * size
s2 = _('-') * size
- self.assertEqual(s1, s2)
+ self.assertTrue(s1 == s2)
del s2
s2 = s1 + _('a')
self.assertFalse(s1 == s2)
@@ -552,7 +569,7 @@ class BaseStrTest:
h1 = hash(s)
del s
s = _('\x00') * (size + 1)
- self.assertFalse(h1 == hash(s))
+ self.assertNotEqual(h1, hash(s))
class StrTest(unittest.TestCase, BaseStrTest):
@@ -563,7 +580,6 @@ class StrTest(unittest.TestCase, BaseStrTest):
def basic_encode_test(self, size, enc, c='.', expectedsize=None):
if expectedsize is None:
expectedsize = size
-
try:
s = c * size
self.assertEqual(len(s.encode(enc)), expectedsize)
@@ -582,48 +598,64 @@ class StrTest(unittest.TestCase, BaseStrTest):
memuse = meth.memuse
except AttributeError:
continue
- meth.memuse = character_size * memuse
+ meth.memuse = ascii_char_size * memuse
self._adjusted[name] = memuse
def tearDown(self):
for name, memuse in self._adjusted.items():
getattr(type(self), name).memuse = memuse
- # the utf8 encoder preallocates big time (4x the number of characters)
- @bigmemtest(size=_2G + 2, memuse=character_size + 4)
+ @bigmemtest(size=_2G, memuse=ucs4_char_size * 3)
+ def test_capitalize(self, size):
+ self._test_capitalize(size)
+
+ @bigmemtest(size=_2G, memuse=ucs4_char_size * 3)
+ def test_title(self, size):
+ self._test_title(size)
+
+ @bigmemtest(size=_2G, memuse=ucs4_char_size * 3)
+ def test_swapcase(self, size):
+ self._test_swapcase(size)
+
+ # Many codecs convert to the legacy representation first, explaining
+ # why we add 'ucs4_char_size' to the 'memuse' below.
+
+ @bigmemtest(size=_2G + 2, memuse=ascii_char_size + 1)
def test_encode(self, size):
return self.basic_encode_test(size, 'utf-8')
- @bigmemtest(size=_4G // 6 + 2, memuse=character_size + 1)
+ @bigmemtest(size=_4G // 6 + 2, memuse=ascii_char_size + ucs4_char_size + 1)
def test_encode_raw_unicode_escape(self, size):
try:
return self.basic_encode_test(size, 'raw_unicode_escape')
except MemoryError:
pass # acceptable on 32-bit
- @bigmemtest(size=_4G // 5 + 70, memuse=character_size + 1)
+ @bigmemtest(size=_4G // 5 + 70, memuse=ascii_char_size + ucs4_char_size + 1)
def test_encode_utf7(self, size):
try:
return self.basic_encode_test(size, 'utf7')
except MemoryError:
pass # acceptable on 32-bit
- @bigmemtest(size=_4G // 4 + 5, memuse=character_size + 4)
+ @bigmemtest(size=_4G // 4 + 5, memuse=ascii_char_size + ucs4_char_size + 4)
def test_encode_utf32(self, size):
try:
- return self.basic_encode_test(size, 'utf32', expectedsize=4*size+4)
+ return self.basic_encode_test(size, 'utf32', expectedsize=4 * size + 4)
except MemoryError:
pass # acceptable on 32-bit
- @bigmemtest(size=_2G - 1, memuse=character_size + 1)
+ @bigmemtest(size=_2G - 1, memuse=ascii_char_size + 1)
def test_encode_ascii(self, size):
return self.basic_encode_test(size, 'ascii', c='A')
- @bigmemtest(size=_2G + 10, memuse=character_size * 2)
+ # str % (...) uses a Py_UCS4 intermediate representation
+
+ @bigmemtest(size=_2G + 10, memuse=ascii_char_size * 2 + ucs4_char_size)
def test_format(self, size):
s = '-' * size
sf = '%s' % (s,)
- self.assertEqual(s, sf)
+ self.assertTrue(s == sf)
del sf
sf = '..%s..' % (s,)
self.assertEqual(len(sf), len(s) + 4)
@@ -640,7 +672,7 @@ class StrTest(unittest.TestCase, BaseStrTest):
self.assertEqual(s.count('.'), 3)
self.assertEqual(s.count('-'), size * 2)
- @bigmemtest(size=_2G + 10, memuse=character_size * 2)
+ @bigmemtest(size=_2G + 10, memuse=ascii_char_size * 2)
def test_repr_small(self, size):
s = '-' * size
s = repr(s)
@@ -661,7 +693,7 @@ class StrTest(unittest.TestCase, BaseStrTest):
self.assertEqual(s.count('\\'), size)
self.assertEqual(s.count('0'), size * 2)
- @bigmemtest(size=_2G + 10, memuse=character_size * 5)
+ @bigmemtest(size=_2G + 10, memuse=ascii_char_size * 5)
def test_repr_large(self, size):
s = '\x00' * size
s = repr(s)
@@ -671,7 +703,13 @@ class StrTest(unittest.TestCase, BaseStrTest):
self.assertEqual(s.count('\\'), size)
self.assertEqual(s.count('0'), size * 2)
- @bigmemtest(size=_2G // 5 + 1, memuse=character_size * 7)
+ # ascii() calls encode('ascii', 'backslashreplace'), which itself
+ # creates a temporary Py_UNICODE representation in addition to the
+ # original (Py_UCS2) one
+ # There's also some overallocation when resizing the ascii() result
+ # that isn't taken into account here.
+ @bigmemtest(size=_2G // 5 + 1, memuse=ucs2_char_size +
+ ucs4_char_size + ascii_char_size * 6)
def test_unicode_repr(self, size):
# Use an assigned, but not printable code point.
# It is in the range of the low surrogates \uDC00-\uDFFF.
@@ -686,9 +724,7 @@ class StrTest(unittest.TestCase, BaseStrTest):
finally:
r = s = None
- # The character takes 4 bytes even in UCS-2 builds because it will
- # be decomposed into surrogates.
- @bigmemtest(size=_2G // 5 + 1, memuse=4 + character_size * 9)
+ @bigmemtest(size=_2G // 5 + 1, memuse=ucs4_char_size * 2 + ascii_char_size * 10)
def test_unicode_repr_wide(self, size):
char = "\U0001DCBA"
s = char * size
@@ -701,39 +737,76 @@ class StrTest(unittest.TestCase, BaseStrTest):
finally:
r = s = None
- @bigmemtest(size=_4G // 5, memuse=character_size * (6 + 1))
- def _test_unicode_repr_overflow(self, size):
- # XXX not sure what this test is about
- char = "\uDCBA"
- s = char * size
- try:
- r = repr(s)
- self.assertTrue(s == eval(r))
- finally:
- r = s = None
+ # The original test_translate is overriden here, so as to get the
+ # correct size estimate: str.translate() uses an intermediate Py_UCS4
+ # representation.
+
+ @bigmemtest(size=_2G, memuse=ascii_char_size * 2 + ucs4_char_size)
+ def test_translate(self, size):
+ _ = self.from_latin1
+ SUBSTR = _('aZz.z.Aaz.')
+ trans = {
+ ord(_('.')): _('-'),
+ ord(_('a')): _('!'),
+ ord(_('Z')): _('$'),
+ }
+ sublen = len(SUBSTR)
+ repeats = size // sublen + 2
+ s = SUBSTR * repeats
+ s = s.translate(trans)
+ self.assertEqual(len(s), repeats * sublen)
+ self.assertEqual(s[:sublen], SUBSTR.translate(trans))
+ self.assertEqual(s[-sublen:], SUBSTR.translate(trans))
+ self.assertEqual(s.count(_('.')), 0)
+ self.assertEqual(s.count(_('!')), repeats * 2)
+ self.assertEqual(s.count(_('z')), repeats * 3)
class BytesTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
- return s.encode("latin1")
+ return s.encode("latin-1")
- @bigmemtest(size=_2G + 2, memuse=1 + character_size)
+ @bigmemtest(size=_2G + 2, memuse=1 + ascii_char_size)
def test_decode(self, size):
s = self.from_latin1('.') * size
self.assertEqual(len(s.decode('utf-8')), size)
+ @bigmemtest(size=_2G, memuse=2)
+ def test_capitalize(self, size):
+ self._test_capitalize(size)
+
+ @bigmemtest(size=_2G, memuse=2)
+ def test_title(self, size):
+ self._test_title(size)
+
+ @bigmemtest(size=_2G, memuse=2)
+ def test_swapcase(self, size):
+ self._test_swapcase(size)
+
class BytearrayTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
- return bytearray(s.encode("latin1"))
+ return bytearray(s.encode("latin-1"))
- @bigmemtest(size=_2G + 2, memuse=1 + character_size)
+ @bigmemtest(size=_2G + 2, memuse=1 + ascii_char_size)
def test_decode(self, size):
s = self.from_latin1('.') * size
self.assertEqual(len(s.decode('utf-8')), size)
+ @bigmemtest(size=_2G, memuse=2)
+ def test_capitalize(self, size):
+ self._test_capitalize(size)
+
+ @bigmemtest(size=_2G, memuse=2)
+ def test_title(self, size):
+ self._test_title(size)
+
+ @bigmemtest(size=_2G, memuse=2)
+ def test_swapcase(self, size):
+ self._test_swapcase(size)
+
test_hash = None
test_split_large = None
@@ -752,7 +825,7 @@ class TupleTest(unittest.TestCase):
def test_compare(self, size):
t1 = ('',) * size
t2 = ('',) * size
- self.assertEqual(t1, t2)
+ self.assertTrue(t1 == t2)
del t2
t2 = ('',) * (size + 1)
self.assertFalse(t1 == t2)
@@ -783,9 +856,9 @@ class TupleTest(unittest.TestCase):
def test_contains(self, size):
t = (1, 2, 3, 4, 5) * size
self.assertEqual(len(t), size * 5)
- self.assertIn(5, t)
- self.assertNotIn((1, 2, 3, 4, 5), t)
- self.assertNotIn(0, t)
+ self.assertTrue(5 in t)
+ self.assertFalse((1, 2, 3, 4, 5) in t)
+ self.assertFalse(0 in t)
@bigmemtest(size=_2G + 10, memuse=8)
def test_hash(self, size):
@@ -869,11 +942,11 @@ class TupleTest(unittest.TestCase):
self.assertEqual(s[-5:], '0, 0)')
self.assertEqual(s.count('0'), size)
- @bigmemtest(size=_2G // 3 + 2, memuse=8 + 3 * character_size)
+ @bigmemtest(size=_2G // 3 + 2, memuse=8 + 3 * ascii_char_size)
def test_repr_small(self, size):
return self.basic_test_repr(size)
- @bigmemtest(size=_2G + 2, memuse=8 + 3 * character_size)
+ @bigmemtest(size=_2G + 2, memuse=8 + 3 * ascii_char_size)
def test_repr_large(self, size):
return self.basic_test_repr(size)
@@ -888,7 +961,7 @@ class ListTest(unittest.TestCase):
def test_compare(self, size):
l1 = [''] * size
l2 = [''] * size
- self.assertEqual(l1, l2)
+ self.assertTrue(l1 == l2)
del l2
l2 = [''] * (size + 1)
self.assertFalse(l1 == l2)
@@ -934,9 +1007,9 @@ class ListTest(unittest.TestCase):
def test_contains(self, size):
l = [1, 2, 3, 4, 5] * size
self.assertEqual(len(l), size * 5)
- self.assertIn(5, l)
- self.assertNotIn([1, 2, 3, 4, 5], l)
- self.assertNotIn(0, l)
+ self.assertTrue(5 in l)
+ self.assertFalse([1, 2, 3, 4, 5] in l)
+ self.assertFalse(0 in l)
@bigmemtest(size=_2G + 10, memuse=8)
def test_hash(self, size):
@@ -1044,11 +1117,11 @@ class ListTest(unittest.TestCase):
self.assertEqual(s[-5:], '0, 0]')
self.assertEqual(s.count('0'), size)
- @bigmemtest(size=_2G // 3 + 2, memuse=8 + 3 * character_size)
+ @bigmemtest(size=_2G // 3 + 2, memuse=8 + 3 * ascii_char_size)
def test_repr_small(self, size):
return self.basic_test_repr(size)
- @bigmemtest(size=_2G + 2, memuse=8 + 3 * character_size)
+ @bigmemtest(size=_2G + 2, memuse=8 + 3 * ascii_char_size)
def test_repr_large(self, size):
return self.basic_test_repr(size)
diff --git a/Lib/test/test_binascii.py b/Lib/test/test_binascii.py
index 1e9e888..04d8f9d 100644
--- a/Lib/test/test_binascii.py
+++ b/Lib/test/test_binascii.py
@@ -208,9 +208,9 @@ class BinASCIITest(unittest.TestCase):
except Exception as err:
self.fail("{}({!r}) raises {!r}".format(func, empty, err))
- def test_unicode_strings(self):
- # Unicode strings are not accepted.
- for func in all_functions:
+ def test_unicode_b2a(self):
+ # Unicode strings are not accepted by b2a_* functions.
+ for func in set(all_functions) - set(a2b_functions) | {'rledecode_hqx'}:
try:
self.assertRaises(TypeError, getattr(binascii, func), "test")
except Exception as err:
@@ -218,6 +218,34 @@ class BinASCIITest(unittest.TestCase):
# crc_hqx needs 2 arguments
self.assertRaises(TypeError, binascii.crc_hqx, "test", 0)
+ def test_unicode_a2b(self):
+ # Unicode strings are accepted by a2b_* functions.
+ MAX_ALL = 45
+ raw = self.rawdata[:MAX_ALL]
+ for fa, fb in zip(a2b_functions, b2a_functions):
+ if fa == 'rledecode_hqx':
+ # Takes non-ASCII data
+ continue
+ a2b = getattr(binascii, fa)
+ b2a = getattr(binascii, fb)
+ try:
+ a = b2a(self.type2test(raw))
+ binary_res = a2b(a)
+ a = a.decode('ascii')
+ res = a2b(a)
+ except Exception as err:
+ self.fail("{}/{} conversion raises {!r}".format(fb, fa, err))
+ if fb == 'b2a_hqx':
+ # b2a_hqx returns a tuple
+ res, _ = res
+ binary_res, _ = binary_res
+ self.assertEqual(res, raw, "{}/{} conversion: "
+ "{!r} != {!r}".format(fb, fa, res, raw))
+ self.assertEqual(res, binary_res)
+ self.assertIsInstance(res, bytes)
+ # non-ASCII string
+ self.assertRaises(ValueError, a2b, "\x80")
+
class ArrayBinASCIITest(BinASCIITest):
def type2test(self, s):
diff --git a/Lib/test/test_bool.py b/Lib/test/test_bool.py
index b296870..4bab28b 100644
--- a/Lib/test/test_bool.py
+++ b/Lib/test/test_bool.py
@@ -330,6 +330,16 @@ class BoolTest(unittest.TestCase):
except (Exception) as e_len:
self.assertEqual(str(e_bool), str(e_len))
+ def test_real_and_imag(self):
+ self.assertEqual(True.real, 1)
+ self.assertEqual(True.imag, 0)
+ self.assertIs(type(True.real), int)
+ self.assertIs(type(True.imag), int)
+ self.assertEqual(False.real, 0)
+ self.assertEqual(False.imag, 0)
+ self.assertIs(type(False.real), int)
+ self.assertIs(type(False.imag), int)
+
def test_main():
support.run_unittest(BoolTest)
diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py
new file mode 100644
index 0000000..e0006f2
--- /dev/null
+++ b/Lib/test/test_buffer.py
@@ -0,0 +1,3628 @@
+#
+# The ndarray object from _testbuffer.c is a complete implementation of
+# a PEP-3118 buffer provider. It is independent from NumPy's ndarray
+# and the tests don't require NumPy.
+#
+# If NumPy is present, some tests check both ndarray implementations
+# against each other.
+#
+# Most ndarray tests also check that memoryview(ndarray) behaves in
+# the same way as the original. Thus, a substantial part of the
+# memoryview tests is now in this module.
+#
+
+import unittest
+from test import support
+from itertools import permutations, product
+from random import randrange, sample, choice
+from sysconfig import get_config_var
+from platform import architecture
+import warnings
+import sys, array, io
+from decimal import Decimal
+from fractions import Fraction
+
+try:
+ from _testbuffer import *
+except ImportError:
+ ndarray = None
+
+try:
+ import struct
+except ImportError:
+ struct = None
+
+try:
+ with warnings.catch_warnings():
+ from numpy import ndarray as numpy_array
+except ImportError:
+ numpy_array = None
+
+
+SHORT_TEST = True
+
+
+# ======================================================================
+# Random lists by format specifier
+# ======================================================================
+
+# Native format chars and their ranges.
+NATIVE = {
+ '?':0, 'c':0, 'b':0, 'B':0,
+ 'h':0, 'H':0, 'i':0, 'I':0,
+ 'l':0, 'L':0, 'n':0, 'N':0,
+ 'f':0, 'd':0, 'P':0
+}
+
+if struct:
+ try:
+ # Add "qQ" if present in native mode.
+ struct.pack('Q', 2**64-1)
+ NATIVE['q'] = 0
+ NATIVE['Q'] = 0
+ except struct.error:
+ pass
+
+# Standard format chars and their ranges.
+STANDARD = {
+ '?':(0, 2), 'c':(0, 1<<8),
+ 'b':(-(1<<7), 1<<7), 'B':(0, 1<<8),
+ 'h':(-(1<<15), 1<<15), 'H':(0, 1<<16),
+ 'i':(-(1<<31), 1<<31), 'I':(0, 1<<32),
+ 'l':(-(1<<31), 1<<31), 'L':(0, 1<<32),
+ 'q':(-(1<<63), 1<<63), 'Q':(0, 1<<64),
+ 'f':(-(1<<63), 1<<63), 'd':(-(1<<1023), 1<<1023)
+}
+
+def native_type_range(fmt):
+ """Return range of a native type."""
+ if fmt == 'c':
+ lh = (0, 256)
+ elif fmt == '?':
+ lh = (0, 2)
+ elif fmt == 'f':
+ lh = (-(1<<63), 1<<63)
+ elif fmt == 'd':
+ lh = (-(1<<1023), 1<<1023)
+ else:
+ for exp in (128, 127, 64, 63, 32, 31, 16, 15, 8, 7):
+ try:
+ struct.pack(fmt, (1<<exp)-1)
+ break
+ except struct.error:
+ pass
+ lh = (-(1<<exp), 1<<exp) if exp & 1 else (0, 1<<exp)
+ return lh
+
+fmtdict = {
+ '':NATIVE,
+ '@':NATIVE,
+ '<':STANDARD,
+ '>':STANDARD,
+ '=':STANDARD,
+ '!':STANDARD
+}
+
+if struct:
+ for fmt in fmtdict['@']:
+ fmtdict['@'][fmt] = native_type_range(fmt)
+
+MEMORYVIEW = NATIVE.copy()
+ARRAY = NATIVE.copy()
+for k in NATIVE:
+ if not k in "bBhHiIlLfd":
+ del ARRAY[k]
+
+BYTEFMT = NATIVE.copy()
+for k in NATIVE:
+ if not k in "Bbc":
+ del BYTEFMT[k]
+
+fmtdict['m'] = MEMORYVIEW
+fmtdict['@m'] = MEMORYVIEW
+fmtdict['a'] = ARRAY
+fmtdict['b'] = BYTEFMT
+fmtdict['@b'] = BYTEFMT
+
+# Capabilities of the test objects:
+MODE = 0
+MULT = 1
+cap = { # format chars # multiplier
+ 'ndarray': (['', '@', '<', '>', '=', '!'], ['', '1', '2', '3']),
+ 'array': (['a'], ['']),
+ 'numpy': ([''], ['']),
+ 'memoryview': (['@m', 'm'], ['']),
+ 'bytefmt': (['@b', 'b'], ['']),
+}
+
+def randrange_fmt(mode, char, obj):
+ """Return random item for a type specified by a mode and a single
+ format character."""
+ x = randrange(*fmtdict[mode][char])
+ if char == 'c':
+ x = bytes(chr(x), 'latin1')
+ if char == '?':
+ x = bool(x)
+ if char == 'f' or char == 'd':
+ x = struct.pack(char, x)
+ x = struct.unpack(char, x)[0]
+ if obj == 'numpy' and x == b'\x00':
+ # http://projects.scipy.org/numpy/ticket/1925
+ x = b'\x01'
+ return x
+
+def gen_item(fmt, obj):
+ """Return single random item."""
+ mode, chars = fmt.split('#')
+ x = []
+ for c in chars:
+ x.append(randrange_fmt(mode, c, obj))
+ return x[0] if len(x) == 1 else tuple(x)
+
+def gen_items(n, fmt, obj):
+ """Return a list of random items (or a scalar)."""
+ if n == 0:
+ return gen_item(fmt, obj)
+ lst = [0] * n
+ for i in range(n):
+ lst[i] = gen_item(fmt, obj)
+ return lst
+
+def struct_items(n, obj):
+ mode = choice(cap[obj][MODE])
+ xfmt = mode + '#'
+ fmt = mode.strip('amb')
+ nmemb = randrange(2, 10) # number of struct members
+ for _ in range(nmemb):
+ char = choice(tuple(fmtdict[mode]))
+ multiplier = choice(cap[obj][MULT])
+ xfmt += (char * int(multiplier if multiplier else 1))
+ fmt += (multiplier + char)
+ items = gen_items(n, xfmt, obj)
+ item = gen_item(xfmt, obj)
+ return fmt, items, item
+
+def randitems(n, obj='ndarray', mode=None, char=None):
+ """Return random format, items, item."""
+ if mode is None:
+ mode = choice(cap[obj][MODE])
+ if char is None:
+ char = choice(tuple(fmtdict[mode]))
+ multiplier = choice(cap[obj][MULT])
+ fmt = mode + '#' + char * int(multiplier if multiplier else 1)
+ items = gen_items(n, fmt, obj)
+ item = gen_item(fmt, obj)
+ fmt = mode.strip('amb') + multiplier + char
+ return fmt, items, item
+
+def iter_mode(n, obj='ndarray'):
+ """Iterate through supported mode/char combinations."""
+ for mode in cap[obj][MODE]:
+ for char in fmtdict[mode]:
+ yield randitems(n, obj, mode, char)
+
+def iter_format(nitems, testobj='ndarray'):
+ """Yield (format, items, item) for all possible modes and format
+ characters plus one random compound format string."""
+ for t in iter_mode(nitems, testobj):
+ yield t
+ if testobj != 'ndarray':
+ raise StopIteration
+ yield struct_items(nitems, testobj)
+
+
+def is_byte_format(fmt):
+ return 'c' in fmt or 'b' in fmt or 'B' in fmt
+
+def is_memoryview_format(fmt):
+ """format suitable for memoryview"""
+ x = len(fmt)
+ return ((x == 1 or (x == 2 and fmt[0] == '@')) and
+ fmt[x-1] in MEMORYVIEW)
+
+NON_BYTE_FORMAT = [c for c in fmtdict['@'] if not is_byte_format(c)]
+
+
+# ======================================================================
+# Multi-dimensional tolist(), slicing and slice assignments
+# ======================================================================
+
+def atomp(lst):
+ """Tuple items (representing structs) are regarded as atoms."""
+ return not isinstance(lst, list)
+
+def listp(lst):
+ return isinstance(lst, list)
+
+def prod(lst):
+ """Product of list elements."""
+ if len(lst) == 0:
+ return 0
+ x = lst[0]
+ for v in lst[1:]:
+ x *= v
+ return x
+
+def strides_from_shape(ndim, shape, itemsize, layout):
+ """Calculate strides of a contiguous array. Layout is 'C' or
+ 'F' (Fortran)."""
+ if ndim == 0:
+ return ()
+ if layout == 'C':
+ strides = list(shape[1:]) + [itemsize]
+ for i in range(ndim-2, -1, -1):
+ strides[i] *= strides[i+1]
+ else:
+ strides = [itemsize] + list(shape[:-1])
+ for i in range(1, ndim):
+ strides[i] *= strides[i-1]
+ return strides
+
+def _ca(items, s):
+ """Convert flat item list to the nested list representation of a
+ multidimensional C array with shape 's'."""
+ if atomp(items):
+ return items
+ if len(s) == 0:
+ return items[0]
+ lst = [0] * s[0]
+ stride = len(items) // s[0] if s[0] else 0
+ for i in range(s[0]):
+ start = i*stride
+ lst[i] = _ca(items[start:start+stride], s[1:])
+ return lst
+
+def _fa(items, s):
+ """Convert flat item list to the nested list representation of a
+ multidimensional Fortran array with shape 's'."""
+ if atomp(items):
+ return items
+ if len(s) == 0:
+ return items[0]
+ lst = [0] * s[0]
+ stride = s[0]
+ for i in range(s[0]):
+ lst[i] = _fa(items[i::stride], s[1:])
+ return lst
+
+def carray(items, shape):
+ if listp(items) and not 0 in shape and prod(shape) != len(items):
+ raise ValueError("prod(shape) != len(items)")
+ return _ca(items, shape)
+
+def farray(items, shape):
+ if listp(items) and not 0 in shape and prod(shape) != len(items):
+ raise ValueError("prod(shape) != len(items)")
+ return _fa(items, shape)
+
+def indices(shape):
+ """Generate all possible tuples of indices."""
+ iterables = [range(v) for v in shape]
+ return product(*iterables)
+
+def getindex(ndim, ind, strides):
+ """Convert multi-dimensional index to the position in the flat list."""
+ ret = 0
+ for i in range(ndim):
+ ret += strides[i] * ind[i]
+ return ret
+
+def transpose(src, shape):
+ """Transpose flat item list that is regarded as a multi-dimensional
+ matrix defined by shape: dest...[k][j][i] = src[i][j][k]... """
+ if not shape:
+ return src
+ ndim = len(shape)
+ sstrides = strides_from_shape(ndim, shape, 1, 'C')
+ dstrides = strides_from_shape(ndim, shape[::-1], 1, 'C')
+ dest = [0] * len(src)
+ for ind in indices(shape):
+ fr = getindex(ndim, ind, sstrides)
+ to = getindex(ndim, ind[::-1], dstrides)
+ dest[to] = src[fr]
+ return dest
+
+def _flatten(lst):
+ """flatten list"""
+ if lst == []:
+ return lst
+ if atomp(lst):
+ return [lst]
+ return _flatten(lst[0]) + _flatten(lst[1:])
+
+def flatten(lst):
+ """flatten list or return scalar"""
+ if atomp(lst): # scalar
+ return lst
+ return _flatten(lst)
+
+def slice_shape(lst, slices):
+ """Get the shape of lst after slicing: slices is a list of slice
+ objects."""
+ if atomp(lst):
+ return []
+ return [len(lst[slices[0]])] + slice_shape(lst[0], slices[1:])
+
+def multislice(lst, slices):
+ """Multi-dimensional slicing: slices is a list of slice objects."""
+ if atomp(lst):
+ return lst
+ return [multislice(sublst, slices[1:]) for sublst in lst[slices[0]]]
+
+def m_assign(llst, rlst, lslices, rslices):
+ """Multi-dimensional slice assignment: llst and rlst are the operands,
+ lslices and rslices are lists of slice objects. llst and rlst must
+ have the same structure.
+
+ For a two-dimensional example, this is not implemented in Python:
+
+ llst[0:3:2, 0:3:2] = rlst[1:3:1, 1:3:1]
+
+ Instead we write:
+
+ lslices = [slice(0,3,2), slice(0,3,2)]
+ rslices = [slice(1,3,1), slice(1,3,1)]
+ multislice_assign(llst, rlst, lslices, rslices)
+ """
+ if atomp(rlst):
+ return rlst
+ rlst = [m_assign(l, r, lslices[1:], rslices[1:])
+ for l, r in zip(llst[lslices[0]], rlst[rslices[0]])]
+ llst[lslices[0]] = rlst
+ return llst
+
+def cmp_structure(llst, rlst, lslices, rslices):
+ """Compare the structure of llst[lslices] and rlst[rslices]."""
+ lshape = slice_shape(llst, lslices)
+ rshape = slice_shape(rlst, rslices)
+ if (len(lshape) != len(rshape)):
+ return -1
+ for i in range(len(lshape)):
+ if lshape[i] != rshape[i]:
+ return -1
+ if lshape[i] == 0:
+ return 0
+ return 0
+
+def multislice_assign(llst, rlst, lslices, rslices):
+ """Return llst after assigning: llst[lslices] = rlst[rslices]"""
+ if cmp_structure(llst, rlst, lslices, rslices) < 0:
+ raise ValueError("lvalue and rvalue have different structures")
+ return m_assign(llst, rlst, lslices, rslices)
+
+
+# ======================================================================
+# Random structures
+# ======================================================================
+
+#
+# PEP-3118 is very permissive with respect to the contents of a
+# Py_buffer. In particular:
+#
+# - shape can be zero
+# - strides can be any integer, including zero
+# - offset can point to any location in the underlying
+# memory block, provided that it is a multiple of
+# itemsize.
+#
+# The functions in this section test and verify random structures
+# in full generality. A structure is valid iff it fits in the
+# underlying memory block.
+#
+# The structure 't' (short for 'tuple') is fully defined by:
+#
+# t = (memlen, itemsize, ndim, shape, strides, offset)
+#
+
+def verify_structure(memlen, itemsize, ndim, shape, strides, offset):
+ """Verify that the parameters represent a valid array within
+ the bounds of the allocated memory:
+ char *mem: start of the physical memory block
+ memlen: length of the physical memory block
+ offset: (char *)buf - mem
+ """
+ if offset % itemsize:
+ return False
+ if offset < 0 or offset+itemsize > memlen:
+ return False
+ if any(v % itemsize for v in strides):
+ return False
+
+ if ndim <= 0:
+ return ndim == 0 and not shape and not strides
+ if 0 in shape:
+ return True
+
+ imin = sum(strides[j]*(shape[j]-1) for j in range(ndim)
+ if strides[j] <= 0)
+ imax = sum(strides[j]*(shape[j]-1) for j in range(ndim)
+ if strides[j] > 0)
+
+ return 0 <= offset+imin and offset+imax+itemsize <= memlen
+
+def get_item(lst, indices):
+ for i in indices:
+ lst = lst[i]
+ return lst
+
+def memory_index(indices, t):
+ """Location of an item in the underlying memory."""
+ memlen, itemsize, ndim, shape, strides, offset = t
+ p = offset
+ for i in range(ndim):
+ p += strides[i]*indices[i]
+ return p
+
+def is_overlapping(t):
+ """The structure 't' is overlapping if at least one memory location
+ is visited twice while iterating through all possible tuples of
+ indices."""
+ memlen, itemsize, ndim, shape, strides, offset = t
+ visited = 1<<memlen
+ for ind in indices(shape):
+ i = memory_index(ind, t)
+ bit = 1<<i
+ if visited & bit:
+ return True
+ visited |= bit
+ return False
+
+def rand_structure(itemsize, valid, maxdim=5, maxshape=16, shape=()):
+ """Return random structure:
+ (memlen, itemsize, ndim, shape, strides, offset)
+ If 'valid' is true, the returned structure is valid, otherwise invalid.
+ If 'shape' is given, use that instead of creating a random shape.
+ """
+ if not shape:
+ ndim = randrange(maxdim+1)
+ if (ndim == 0):
+ if valid:
+ return itemsize, itemsize, ndim, (), (), 0
+ else:
+ nitems = randrange(1, 16+1)
+ memlen = nitems * itemsize
+ offset = -itemsize if randrange(2) == 0 else memlen
+ return memlen, itemsize, ndim, (), (), offset
+
+ minshape = 2
+ n = randrange(100)
+ if n >= 95 and valid:
+ minshape = 0
+ elif n >= 90:
+ minshape = 1
+ shape = [0] * ndim
+
+ for i in range(ndim):
+ shape[i] = randrange(minshape, maxshape+1)
+ else:
+ ndim = len(shape)
+
+ maxstride = 5
+ n = randrange(100)
+ zero_stride = True if n >= 95 and n & 1 else False
+
+ strides = [0] * ndim
+ strides[ndim-1] = itemsize * randrange(-maxstride, maxstride+1)
+ if not zero_stride and strides[ndim-1] == 0:
+ strides[ndim-1] = itemsize
+
+ for i in range(ndim-2, -1, -1):
+ maxstride *= shape[i+1] if shape[i+1] else 1
+ if zero_stride:
+ strides[i] = itemsize * randrange(-maxstride, maxstride+1)
+ else:
+ strides[i] = ((1,-1)[randrange(2)] *
+ itemsize * randrange(1, maxstride+1))
+
+ imin = imax = 0
+ if not 0 in shape:
+ imin = sum(strides[j]*(shape[j]-1) for j in range(ndim)
+ if strides[j] <= 0)
+ imax = sum(strides[j]*(shape[j]-1) for j in range(ndim)
+ if strides[j] > 0)
+
+ nitems = imax - imin
+ if valid:
+ offset = -imin * itemsize
+ memlen = offset + (imax+1) * itemsize
+ else:
+ memlen = (-imin + imax) * itemsize
+ offset = -imin-itemsize if randrange(2) == 0 else memlen
+ return memlen, itemsize, ndim, shape, strides, offset
+
+def randslice_from_slicelen(slicelen, listlen):
+ """Create a random slice of len slicelen that fits into listlen."""
+ maxstart = listlen - slicelen
+ start = randrange(maxstart+1)
+ maxstep = (listlen - start) // slicelen if slicelen else 1
+ step = randrange(1, maxstep+1)
+ stop = start + slicelen * step
+ s = slice(start, stop, step)
+ _, _, _, control = slice_indices(s, listlen)
+ if control != slicelen:
+ raise RuntimeError
+ return s
+
+def randslice_from_shape(ndim, shape):
+ """Create two sets of slices for an array x with shape 'shape'
+ such that shapeof(x[lslices]) == shapeof(x[rslices])."""
+ lslices = [0] * ndim
+ rslices = [0] * ndim
+ for n in range(ndim):
+ l = shape[n]
+ slicelen = randrange(1, l+1) if l > 0 else 0
+ lslices[n] = randslice_from_slicelen(slicelen, l)
+ rslices[n] = randslice_from_slicelen(slicelen, l)
+ return tuple(lslices), tuple(rslices)
+
+def rand_aligned_slices(maxdim=5, maxshape=16):
+ """Create (lshape, rshape, tuple(lslices), tuple(rslices)) such that
+ shapeof(x[lslices]) == shapeof(y[rslices]), where x is an array
+ with shape 'lshape' and y is an array with shape 'rshape'."""
+ ndim = randrange(1, maxdim+1)
+ minshape = 2
+ n = randrange(100)
+ if n >= 95:
+ minshape = 0
+ elif n >= 90:
+ minshape = 1
+ all_random = True if randrange(100) >= 80 else False
+ lshape = [0]*ndim; rshape = [0]*ndim
+ lslices = [0]*ndim; rslices = [0]*ndim
+
+ for n in range(ndim):
+ small = randrange(minshape, maxshape+1)
+ big = randrange(minshape, maxshape+1)
+ if big < small:
+ big, small = small, big
+
+ # Create a slice that fits the smaller value.
+ if all_random:
+ start = randrange(-small, small+1)
+ stop = randrange(-small, small+1)
+ step = (1,-1)[randrange(2)] * randrange(1, small+2)
+ s_small = slice(start, stop, step)
+ _, _, _, slicelen = slice_indices(s_small, small)
+ else:
+ slicelen = randrange(1, small+1) if small > 0 else 0
+ s_small = randslice_from_slicelen(slicelen, small)
+
+ # Create a slice of the same length for the bigger value.
+ s_big = randslice_from_slicelen(slicelen, big)
+ if randrange(2) == 0:
+ rshape[n], lshape[n] = big, small
+ rslices[n], lslices[n] = s_big, s_small
+ else:
+ rshape[n], lshape[n] = small, big
+ rslices[n], lslices[n] = s_small, s_big
+
+ return lshape, rshape, tuple(lslices), tuple(rslices)
+
+def randitems_from_structure(fmt, t):
+ """Return a list of random items for structure 't' with format
+ 'fmtchar'."""
+ memlen, itemsize, _, _, _, _ = t
+ return gen_items(memlen//itemsize, '#'+fmt, 'numpy')
+
+def ndarray_from_structure(items, fmt, t, flags=0):
+ """Return ndarray from the tuple returned by rand_structure()"""
+ memlen, itemsize, ndim, shape, strides, offset = t
+ return ndarray(items, shape=shape, strides=strides, format=fmt,
+ offset=offset, flags=ND_WRITABLE|flags)
+
+def numpy_array_from_structure(items, fmt, t):
+ """Return numpy_array from the tuple returned by rand_structure()"""
+ memlen, itemsize, ndim, shape, strides, offset = t
+ buf = bytearray(memlen)
+ for j, v in enumerate(items):
+ struct.pack_into(fmt, buf, j*itemsize, v)
+ return numpy_array(buffer=buf, shape=shape, strides=strides,
+ dtype=fmt, offset=offset)
+
+
+# ======================================================================
+# memoryview casts
+# ======================================================================
+
+def cast_items(exporter, fmt, itemsize, shape=None):
+ """Interpret the raw memory of 'exporter' as a list of items with
+ size 'itemsize'. If shape=None, the new structure is assumed to
+ be 1-D with n * itemsize = bytelen. If shape is given, the usual
+ constraint for contiguous arrays prod(shape) * itemsize = bytelen
+ applies. On success, return (items, shape). If the constraints
+ cannot be met, return (None, None). If a chunk of bytes is interpreted
+ as NaN as a result of float conversion, return ('nan', None)."""
+ bytelen = exporter.nbytes
+ if shape:
+ if prod(shape) * itemsize != bytelen:
+ return None, shape
+ elif shape == []:
+ if exporter.ndim == 0 or itemsize != bytelen:
+ return None, shape
+ else:
+ n, r = divmod(bytelen, itemsize)
+ shape = [n]
+ if r != 0:
+ return None, shape
+
+ mem = exporter.tobytes()
+ byteitems = [mem[i:i+itemsize] for i in range(0, len(mem), itemsize)]
+
+ items = []
+ for v in byteitems:
+ item = struct.unpack(fmt, v)[0]
+ if item != item:
+ return 'nan', shape
+ items.append(item)
+
+ return (items, shape) if shape != [] else (items[0], shape)
+
+def gencastshapes():
+ """Generate shapes to test casting."""
+ for n in range(32):
+ yield [n]
+ ndim = randrange(4, 6)
+ minshape = 1 if randrange(100) > 80 else 2
+ yield [randrange(minshape, 5) for _ in range(ndim)]
+ ndim = randrange(2, 4)
+ minshape = 1 if randrange(100) > 80 else 2
+ yield [randrange(minshape, 5) for _ in range(ndim)]
+
+
+# ======================================================================
+# Actual tests
+# ======================================================================
+
+def genslices(n):
+ """Generate all possible slices for a single dimension."""
+ return product(range(-n, n+1), range(-n, n+1), range(-n, n+1))
+
+def genslices_ndim(ndim, shape):
+ """Generate all possible slice tuples for 'shape'."""
+ iterables = [genslices(shape[n]) for n in range(ndim)]
+ return product(*iterables)
+
+def rslice(n, allow_empty=False):
+ """Generate random slice for a single dimension of length n.
+ If zero=True, the slices may be empty, otherwise they will
+ be non-empty."""
+ minlen = 0 if allow_empty or n == 0 else 1
+ slicelen = randrange(minlen, n+1)
+ return randslice_from_slicelen(slicelen, n)
+
+def rslices(n, allow_empty=False):
+ """Generate random slices for a single dimension."""
+ for _ in range(5):
+ yield rslice(n, allow_empty)
+
+def rslices_ndim(ndim, shape, iterations=5):
+ """Generate random slice tuples for 'shape'."""
+ # non-empty slices
+ for _ in range(iterations):
+ yield tuple(rslice(shape[n]) for n in range(ndim))
+ # possibly empty slices
+ for _ in range(iterations):
+ yield tuple(rslice(shape[n], allow_empty=True) for n in range(ndim))
+ # invalid slices
+ yield tuple(slice(0,1,0) for _ in range(ndim))
+
+def rpermutation(iterable, r=None):
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ yield tuple(sample(pool, r))
+
+def ndarray_print(nd):
+ """Print ndarray for debugging."""
+ try:
+ x = nd.tolist()
+ except (TypeError, NotImplementedError):
+ x = nd.tobytes()
+ if isinstance(nd, ndarray):
+ offset = nd.offset
+ flags = nd.flags
+ else:
+ offset = 'unknown'
+ flags = 'unknown'
+ print("ndarray(%s, shape=%s, strides=%s, suboffsets=%s, offset=%s, "
+ "format='%s', itemsize=%s, flags=%s)" %
+ (x, nd.shape, nd.strides, nd.suboffsets, offset,
+ nd.format, nd.itemsize, flags))
+ sys.stdout.flush()
+
+
+ITERATIONS = 100
+MAXDIM = 5
+MAXSHAPE = 10
+
+if SHORT_TEST:
+ ITERATIONS = 10
+ MAXDIM = 3
+ MAXSHAPE = 4
+ genslices = rslices
+ genslices_ndim = rslices_ndim
+ permutations = rpermutation
+
+
+@unittest.skipUnless(struct, 'struct module required for this test.')
+@unittest.skipUnless(ndarray, 'ndarray object required for this test')
+class TestBufferProtocol(unittest.TestCase):
+
+ def setUp(self):
+ self.sizeof_void_p = get_config_var('SIZEOF_VOID_P')
+ if not self.sizeof_void_p:
+ self.sizeof_void_p = 8 if architecture()[0] == '64bit' else 4
+
+ def verify(self, result, obj=-1,
+ itemsize={1}, fmt=-1, readonly={1},
+ ndim={1}, shape=-1, strides=-1,
+ lst=-1, sliced=False, cast=False):
+ # Verify buffer contents against expected values. Default values
+ # are deliberately initialized to invalid types.
+ if shape:
+ expected_len = prod(shape)*itemsize
+ else:
+ if not fmt: # array has been implicitly cast to unsigned bytes
+ expected_len = len(lst)
+ else: # ndim = 0
+ expected_len = itemsize
+
+ # Reconstruct suboffsets from strides. Support for slicing
+ # could be added, but is currently only needed for test_getbuf().
+ suboffsets = ()
+ if result.suboffsets:
+ self.assertGreater(ndim, 0)
+
+ suboffset0 = 0
+ for n in range(1, ndim):
+ if shape[n] == 0:
+ break
+ if strides[n] <= 0:
+ suboffset0 += -strides[n] * (shape[n]-1)
+
+ suboffsets = [suboffset0] + [-1 for v in range(ndim-1)]
+
+ # Not correct if slicing has occurred in the first dimension.
+ stride0 = self.sizeof_void_p
+ if strides[0] < 0:
+ stride0 = -stride0
+ strides = [stride0] + list(strides[1:])
+
+ self.assertIs(result.obj, obj)
+ self.assertEqual(result.nbytes, expected_len)
+ self.assertEqual(result.itemsize, itemsize)
+ self.assertEqual(result.format, fmt)
+ self.assertEqual(result.readonly, readonly)
+ self.assertEqual(result.ndim, ndim)
+ self.assertEqual(result.shape, tuple(shape))
+ if not (sliced and suboffsets):
+ self.assertEqual(result.strides, tuple(strides))
+ self.assertEqual(result.suboffsets, tuple(suboffsets))
+
+ if isinstance(result, ndarray) or is_memoryview_format(fmt):
+ rep = result.tolist() if fmt else result.tobytes()
+ self.assertEqual(rep, lst)
+
+ if not fmt: # array has been cast to unsigned bytes,
+ return # the remaining tests won't work.
+
+ # PyBuffer_GetPointer() is the definition how to access an item.
+ # If PyBuffer_GetPointer(indices) is correct for all possible
+ # combinations of indices, the buffer is correct.
+ #
+ # Also test tobytes() against the flattened 'lst', with all items
+ # packed to bytes.
+ if not cast: # casts chop up 'lst' in different ways
+ b = bytearray()
+ buf_err = None
+ for ind in indices(shape):
+ try:
+ item1 = get_pointer(result, ind)
+ item2 = get_item(lst, ind)
+ if isinstance(item2, tuple):
+ x = struct.pack(fmt, *item2)
+ else:
+ x = struct.pack(fmt, item2)
+ b.extend(x)
+ except BufferError:
+ buf_err = True # re-exporter does not provide full buffer
+ break
+ self.assertEqual(item1, item2)
+
+ if not buf_err:
+ # test tobytes()
+ self.assertEqual(result.tobytes(), b)
+
+ if not buf_err and is_memoryview_format(fmt):
+
+ # lst := expected multi-dimensional logical representation
+ # flatten(lst) := elements in C-order
+ ff = fmt if fmt else 'B'
+ flattened = flatten(lst)
+
+ # Rules for 'A': if the array is already contiguous, return
+ # the array unaltered. Otherwise, return a contiguous 'C'
+ # representation.
+ for order in ['C', 'F', 'A']:
+ expected = result
+ if order == 'F':
+ if not is_contiguous(result, 'A') or \
+ is_contiguous(result, 'C'):
+ # For constructing the ndarray, convert the
+ # flattened logical representation to Fortran order.
+ trans = transpose(flattened, shape)
+ expected = ndarray(trans, shape=shape, format=ff,
+ flags=ND_FORTRAN)
+ else: # 'C', 'A'
+ if not is_contiguous(result, 'A') or \
+ is_contiguous(result, 'F') and order == 'C':
+ # The flattened list is already in C-order.
+ expected = ndarray(flattened, shape=shape, format=ff)
+ contig = get_contiguous(result, PyBUF_READ, order)
+ contig = get_contiguous(result, PyBUF_READ, order)
+ self.assertEqual(contig.tobytes(), b)
+ self.assertTrue(cmp_contig(contig, expected))
+
+ if is_memoryview_format(fmt):
+ try:
+ m = memoryview(result)
+ except BufferError: # re-exporter does not provide full information
+ return
+ ex = result.obj if isinstance(result, memoryview) else result
+ self.assertIs(m.obj, ex)
+ self.assertEqual(m.nbytes, expected_len)
+ self.assertEqual(m.itemsize, itemsize)
+ self.assertEqual(m.format, fmt)
+ self.assertEqual(m.readonly, readonly)
+ self.assertEqual(m.ndim, ndim)
+ self.assertEqual(m.shape, tuple(shape))
+ if not (sliced and suboffsets):
+ self.assertEqual(m.strides, tuple(strides))
+ self.assertEqual(m.suboffsets, tuple(suboffsets))
+
+ n = 1 if ndim == 0 else len(lst)
+ self.assertEqual(len(m), n)
+
+ rep = result.tolist() if fmt else result.tobytes()
+ self.assertEqual(rep, lst)
+ self.assertEqual(m, result)
+
+ def verify_getbuf(self, orig_ex, ex, req, sliced=False):
+ def simple_fmt(ex):
+ return ex.format == '' or ex.format == 'B'
+ def match(req, flag):
+ return ((req&flag) == flag)
+
+ if (# writable request to read-only exporter
+ (ex.readonly and match(req, PyBUF_WRITABLE)) or
+ # cannot match explicit contiguity request
+ (match(req, PyBUF_C_CONTIGUOUS) and not ex.c_contiguous) or
+ (match(req, PyBUF_F_CONTIGUOUS) and not ex.f_contiguous) or
+ (match(req, PyBUF_ANY_CONTIGUOUS) and not ex.contiguous) or
+ # buffer needs suboffsets
+ (not match(req, PyBUF_INDIRECT) and ex.suboffsets) or
+ # buffer without strides must be C-contiguous
+ (not match(req, PyBUF_STRIDES) and not ex.c_contiguous) or
+ # PyBUF_SIMPLE|PyBUF_FORMAT and PyBUF_WRITABLE|PyBUF_FORMAT
+ (not match(req, PyBUF_ND) and match(req, PyBUF_FORMAT))):
+
+ self.assertRaises(BufferError, ndarray, ex, getbuf=req)
+ return
+
+ if isinstance(ex, ndarray) or is_memoryview_format(ex.format):
+ lst = ex.tolist()
+ else:
+ nd = ndarray(ex, getbuf=PyBUF_FULL_RO)
+ lst = nd.tolist()
+
+ # The consumer may have requested default values or a NULL format.
+ ro = 0 if match(req, PyBUF_WRITABLE) else ex.readonly
+ fmt = ex.format
+ itemsize = ex.itemsize
+ ndim = ex.ndim
+ if not match(req, PyBUF_FORMAT):
+ # itemsize refers to the original itemsize before the cast.
+ # The equality product(shape) * itemsize = len still holds.
+ # The equality calcsize(format) = itemsize does _not_ hold.
+ fmt = ''
+ lst = orig_ex.tobytes() # Issue 12834
+ if not match(req, PyBUF_ND):
+ ndim = 1
+ shape = orig_ex.shape if match(req, PyBUF_ND) else ()
+ strides = orig_ex.strides if match(req, PyBUF_STRIDES) else ()
+
+ nd = ndarray(ex, getbuf=req)
+ self.verify(nd, obj=ex,
+ itemsize=itemsize, fmt=fmt, readonly=ro,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst, sliced=sliced)
+
+ def test_ndarray_getbuf(self):
+ requests = (
+ # distinct flags
+ PyBUF_INDIRECT, PyBUF_STRIDES, PyBUF_ND, PyBUF_SIMPLE,
+ PyBUF_C_CONTIGUOUS, PyBUF_F_CONTIGUOUS, PyBUF_ANY_CONTIGUOUS,
+ # compound requests
+ PyBUF_FULL, PyBUF_FULL_RO,
+ PyBUF_RECORDS, PyBUF_RECORDS_RO,
+ PyBUF_STRIDED, PyBUF_STRIDED_RO,
+ PyBUF_CONTIG, PyBUF_CONTIG_RO,
+ )
+ # items and format
+ items_fmt = (
+ ([True if x % 2 else False for x in range(12)], '?'),
+ ([1,2,3,4,5,6,7,8,9,10,11,12], 'b'),
+ ([1,2,3,4,5,6,7,8,9,10,11,12], 'B'),
+ ([(2**31-x) if x % 2 else (-2**31+x) for x in range(12)], 'l')
+ )
+ # shape, strides, offset
+ structure = (
+ ([], [], 0),
+ ([12], [], 0),
+ ([12], [-1], 11),
+ ([6], [2], 0),
+ ([6], [-2], 11),
+ ([3, 4], [], 0),
+ ([3, 4], [-4, -1], 11),
+ ([2, 2], [4, 1], 4),
+ ([2, 2], [-4, -1], 8)
+ )
+ # ndarray creation flags
+ ndflags = (
+ 0, ND_WRITABLE, ND_FORTRAN, ND_FORTRAN|ND_WRITABLE,
+ ND_PIL, ND_PIL|ND_WRITABLE
+ )
+ # flags that can actually be used as flags
+ real_flags = (0, PyBUF_WRITABLE, PyBUF_FORMAT,
+ PyBUF_WRITABLE|PyBUF_FORMAT)
+
+ for items, fmt in items_fmt:
+ itemsize = struct.calcsize(fmt)
+ for shape, strides, offset in structure:
+ strides = [v * itemsize for v in strides]
+ offset *= itemsize
+ for flags in ndflags:
+
+ if strides and (flags&ND_FORTRAN):
+ continue
+ if not shape and (flags&ND_PIL):
+ continue
+
+ _items = items if shape else items[0]
+ ex1 = ndarray(_items, format=fmt, flags=flags,
+ shape=shape, strides=strides, offset=offset)
+ ex2 = ex1[::-2] if shape else None
+
+ m1 = memoryview(ex1)
+ if ex2:
+ m2 = memoryview(ex2)
+ if ex1.ndim == 0 or (ex1.ndim == 1 and shape and strides):
+ self.assertEqual(m1, ex1)
+ if ex2 and ex2.ndim == 1 and shape and strides:
+ self.assertEqual(m2, ex2)
+
+ for req in requests:
+ for bits in real_flags:
+ self.verify_getbuf(ex1, ex1, req|bits)
+ self.verify_getbuf(ex1, m1, req|bits)
+ if ex2:
+ self.verify_getbuf(ex2, ex2, req|bits,
+ sliced=True)
+ self.verify_getbuf(ex2, m2, req|bits,
+ sliced=True)
+
+ items = [1,2,3,4,5,6,7,8,9,10,11,12]
+
+ # ND_GETBUF_FAIL
+ ex = ndarray(items, shape=[12], flags=ND_GETBUF_FAIL)
+ self.assertRaises(BufferError, ndarray, ex)
+
+ # Request complex structure from a simple exporter. In this
+ # particular case the test object is not PEP-3118 compliant.
+ base = ndarray([9], [1])
+ ex = ndarray(base, getbuf=PyBUF_SIMPLE)
+ self.assertRaises(BufferError, ndarray, ex, getbuf=PyBUF_WRITABLE)
+ self.assertRaises(BufferError, ndarray, ex, getbuf=PyBUF_ND)
+ self.assertRaises(BufferError, ndarray, ex, getbuf=PyBUF_STRIDES)
+ self.assertRaises(BufferError, ndarray, ex, getbuf=PyBUF_C_CONTIGUOUS)
+ self.assertRaises(BufferError, ndarray, ex, getbuf=PyBUF_F_CONTIGUOUS)
+ self.assertRaises(BufferError, ndarray, ex, getbuf=PyBUF_ANY_CONTIGUOUS)
+ nd = ndarray(ex, getbuf=PyBUF_SIMPLE)
+
+ def test_ndarray_exceptions(self):
+ nd = ndarray([9], [1])
+ ndm = ndarray([9], [1], flags=ND_VAREXPORT)
+
+ # Initialization of a new ndarray or mutation of an existing array.
+ for c in (ndarray, nd.push, ndm.push):
+ # Invalid types.
+ self.assertRaises(TypeError, c, {1,2,3})
+ self.assertRaises(TypeError, c, [1,2,'3'])
+ self.assertRaises(TypeError, c, [1,2,(3,4)])
+ self.assertRaises(TypeError, c, [1,2,3], shape={3})
+ self.assertRaises(TypeError, c, [1,2,3], shape=[3], strides={1})
+ self.assertRaises(TypeError, c, [1,2,3], shape=[3], offset=[])
+ self.assertRaises(TypeError, c, [1], shape=[1], format={})
+ self.assertRaises(TypeError, c, [1], shape=[1], flags={})
+ self.assertRaises(TypeError, c, [1], shape=[1], getbuf={})
+
+ # ND_FORTRAN flag is only valid without strides.
+ self.assertRaises(TypeError, c, [1], shape=[1], strides=[1],
+ flags=ND_FORTRAN)
+
+ # ND_PIL flag is only valid with ndim > 0.
+ self.assertRaises(TypeError, c, [1], shape=[], flags=ND_PIL)
+
+ # Invalid items.
+ self.assertRaises(ValueError, c, [], shape=[1])
+ self.assertRaises(ValueError, c, ['XXX'], shape=[1], format="L")
+ # Invalid combination of items and format.
+ self.assertRaises(struct.error, c, [1000], shape=[1], format="B")
+ self.assertRaises(ValueError, c, [1,(2,3)], shape=[2], format="B")
+ self.assertRaises(ValueError, c, [1,2,3], shape=[3], format="QL")
+
+ # Invalid ndim.
+ n = ND_MAX_NDIM+1
+ self.assertRaises(ValueError, c, [1]*n, shape=[1]*n)
+
+ # Invalid shape.
+ self.assertRaises(ValueError, c, [1], shape=[-1])
+ self.assertRaises(ValueError, c, [1,2,3], shape=['3'])
+ self.assertRaises(OverflowError, c, [1], shape=[2**128])
+ # prod(shape) * itemsize != len(items)
+ self.assertRaises(ValueError, c, [1,2,3,4,5], shape=[2,2], offset=3)
+
+ # Invalid strides.
+ self.assertRaises(ValueError, c, [1,2,3], shape=[3], strides=['1'])
+ self.assertRaises(OverflowError, c, [1], shape=[1],
+ strides=[2**128])
+
+ # Invalid combination of strides and shape.
+ self.assertRaises(ValueError, c, [1,2], shape=[2,1], strides=[1])
+ # Invalid combination of strides and format.
+ self.assertRaises(ValueError, c, [1,2,3,4], shape=[2], strides=[3],
+ format="L")
+
+ # Invalid offset.
+ self.assertRaises(ValueError, c, [1,2,3], shape=[3], offset=4)
+ self.assertRaises(ValueError, c, [1,2,3], shape=[1], offset=3,
+ format="L")
+
+ # Invalid format.
+ self.assertRaises(ValueError, c, [1,2,3], shape=[3], format="")
+ self.assertRaises(struct.error, c, [(1,2,3)], shape=[1],
+ format="@#$")
+
+ # Striding out of the memory bounds.
+ items = [1,2,3,4,5,6,7,8,9,10]
+ self.assertRaises(ValueError, c, items, shape=[2,3],
+ strides=[-3, -2], offset=5)
+
+ # Constructing consumer: format argument invalid.
+ self.assertRaises(TypeError, c, bytearray(), format="Q")
+
+ # Constructing original base object: getbuf argument invalid.
+ self.assertRaises(TypeError, c, [1], shape=[1], getbuf=PyBUF_FULL)
+
+ # Shape argument is mandatory for original base objects.
+ self.assertRaises(TypeError, c, [1])
+
+
+ # PyBUF_WRITABLE request to read-only provider.
+ self.assertRaises(BufferError, ndarray, b'123', getbuf=PyBUF_WRITABLE)
+
+ # ND_VAREXPORT can only be specified during construction.
+ nd = ndarray([9], [1], flags=ND_VAREXPORT)
+ self.assertRaises(ValueError, nd.push, [1], [1], flags=ND_VAREXPORT)
+
+ # Invalid operation for consumers: push/pop
+ nd = ndarray(b'123')
+ self.assertRaises(BufferError, nd.push, [1], [1])
+ self.assertRaises(BufferError, nd.pop)
+
+ # ND_VAREXPORT not set: push/pop fail with exported buffers
+ nd = ndarray([9], [1])
+ nd.push([1], [1])
+ m = memoryview(nd)
+ self.assertRaises(BufferError, nd.push, [1], [1])
+ self.assertRaises(BufferError, nd.pop)
+ m.release()
+ nd.pop()
+
+ # Single remaining buffer: pop fails
+ self.assertRaises(BufferError, nd.pop)
+ del nd
+
+ # get_pointer()
+ self.assertRaises(TypeError, get_pointer, {}, [1,2,3])
+ self.assertRaises(TypeError, get_pointer, b'123', {})
+
+ nd = ndarray(list(range(100)), shape=[1]*100)
+ self.assertRaises(ValueError, get_pointer, nd, [5])
+
+ nd = ndarray(list(range(12)), shape=[3,4])
+ self.assertRaises(ValueError, get_pointer, nd, [2,3,4])
+ self.assertRaises(ValueError, get_pointer, nd, [3,3])
+ self.assertRaises(ValueError, get_pointer, nd, [-3,3])
+ self.assertRaises(OverflowError, get_pointer, nd, [1<<64,3])
+
+ # tolist() needs format
+ ex = ndarray([1,2,3], shape=[3], format='L')
+ nd = ndarray(ex, getbuf=PyBUF_SIMPLE)
+ self.assertRaises(ValueError, nd.tolist)
+
+ # memoryview_from_buffer()
+ ex1 = ndarray([1,2,3], shape=[3], format='L')
+ ex2 = ndarray(ex1)
+ nd = ndarray(ex2)
+ self.assertRaises(TypeError, nd.memoryview_from_buffer)
+
+ nd = ndarray([(1,)*200], shape=[1], format='L'*200)
+ self.assertRaises(TypeError, nd.memoryview_from_buffer)
+
+ n = ND_MAX_NDIM
+ nd = ndarray(list(range(n)), shape=[1]*n)
+ self.assertRaises(ValueError, nd.memoryview_from_buffer)
+
+ # get_contiguous()
+ nd = ndarray([1], shape=[1])
+ self.assertRaises(TypeError, get_contiguous, 1, 2, 3, 4, 5)
+ self.assertRaises(TypeError, get_contiguous, nd, "xyz", 'C')
+ self.assertRaises(OverflowError, get_contiguous, nd, 2**64, 'C')
+ self.assertRaises(TypeError, get_contiguous, nd, PyBUF_READ, 961)
+ self.assertRaises(UnicodeEncodeError, get_contiguous, nd, PyBUF_READ,
+ '\u2007')
+
+ # cmp_contig()
+ nd = ndarray([1], shape=[1])
+ self.assertRaises(TypeError, cmp_contig, 1, 2, 3, 4, 5)
+ self.assertRaises(TypeError, cmp_contig, {}, nd)
+ self.assertRaises(TypeError, cmp_contig, nd, {})
+
+ # is_contiguous()
+ nd = ndarray([1], shape=[1])
+ self.assertRaises(TypeError, is_contiguous, 1, 2, 3, 4, 5)
+ self.assertRaises(TypeError, is_contiguous, {}, 'A')
+ self.assertRaises(TypeError, is_contiguous, nd, 201)
+
+ def test_ndarray_linked_list(self):
+ for perm in permutations(range(5)):
+ m = [0]*5
+ nd = ndarray([1,2,3], shape=[3], flags=ND_VAREXPORT)
+ m[0] = memoryview(nd)
+
+ for i in range(1, 5):
+ nd.push([1,2,3], shape=[3])
+ m[i] = memoryview(nd)
+
+ for i in range(5):
+ m[perm[i]].release()
+
+ self.assertRaises(BufferError, nd.pop)
+ del nd
+
+ def test_ndarray_format_scalar(self):
+ # ndim = 0: scalar
+ for fmt, scalar, _ in iter_format(0):
+ itemsize = struct.calcsize(fmt)
+ nd = ndarray(scalar, shape=(), format=fmt)
+ self.verify(nd, obj=None,
+ itemsize=itemsize, fmt=fmt, readonly=1,
+ ndim=0, shape=(), strides=(),
+ lst=scalar)
+
+ def test_ndarray_format_shape(self):
+ # ndim = 1, shape = [n]
+ nitems = randrange(1, 10)
+ for fmt, items, _ in iter_format(nitems):
+ itemsize = struct.calcsize(fmt)
+ for flags in (0, ND_PIL):
+ nd = ndarray(items, shape=[nitems], format=fmt, flags=flags)
+ self.verify(nd, obj=None,
+ itemsize=itemsize, fmt=fmt, readonly=1,
+ ndim=1, shape=(nitems,), strides=(itemsize,),
+ lst=items)
+
+ def test_ndarray_format_strides(self):
+ # ndim = 1, strides
+ nitems = randrange(1, 30)
+ for fmt, items, _ in iter_format(nitems):
+ itemsize = struct.calcsize(fmt)
+ for step in range(-5, 5):
+ if step == 0:
+ continue
+
+ shape = [len(items[::step])]
+ strides = [step*itemsize]
+ offset = itemsize*(nitems-1) if step < 0 else 0
+
+ for flags in (0, ND_PIL):
+ nd = ndarray(items, shape=shape, strides=strides,
+ format=fmt, offset=offset, flags=flags)
+ self.verify(nd, obj=None,
+ itemsize=itemsize, fmt=fmt, readonly=1,
+ ndim=1, shape=shape, strides=strides,
+ lst=items[::step])
+
+ def test_ndarray_fortran(self):
+ items = [1,2,3,4,5,6,7,8,9,10,11,12]
+ ex = ndarray(items, shape=(3, 4), strides=(1, 3))
+ nd = ndarray(ex, getbuf=PyBUF_F_CONTIGUOUS|PyBUF_FORMAT)
+ self.assertEqual(nd.tolist(), farray(items, (3, 4)))
+
+ def test_ndarray_multidim(self):
+ for ndim in range(5):
+ shape_t = [randrange(2, 10) for _ in range(ndim)]
+ nitems = prod(shape_t)
+ for shape in permutations(shape_t):
+
+ fmt, items, _ = randitems(nitems)
+ itemsize = struct.calcsize(fmt)
+
+ for flags in (0, ND_PIL):
+ if ndim == 0 and flags == ND_PIL:
+ continue
+
+ # C array
+ nd = ndarray(items, shape=shape, format=fmt, flags=flags)
+
+ strides = strides_from_shape(ndim, shape, itemsize, 'C')
+ lst = carray(items, shape)
+ self.verify(nd, obj=None,
+ itemsize=itemsize, fmt=fmt, readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ if is_memoryview_format(fmt):
+ # memoryview: reconstruct strides
+ ex = ndarray(items, shape=shape, format=fmt)
+ nd = ndarray(ex, getbuf=PyBUF_CONTIG_RO|PyBUF_FORMAT)
+ self.assertTrue(nd.strides == ())
+ mv = nd.memoryview_from_buffer()
+ self.verify(mv, obj=None,
+ itemsize=itemsize, fmt=fmt, readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ # Fortran array
+ nd = ndarray(items, shape=shape, format=fmt,
+ flags=flags|ND_FORTRAN)
+
+ strides = strides_from_shape(ndim, shape, itemsize, 'F')
+ lst = farray(items, shape)
+ self.verify(nd, obj=None,
+ itemsize=itemsize, fmt=fmt, readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ def test_ndarray_index_invalid(self):
+ # not writable
+ nd = ndarray([1], shape=[1])
+ self.assertRaises(TypeError, nd.__setitem__, 1, 8)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ self.assertRaises(TypeError, mv.__setitem__, 1, 8)
+
+ # cannot be deleted
+ nd = ndarray([1], shape=[1], flags=ND_WRITABLE)
+ self.assertRaises(TypeError, nd.__delitem__, 1)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ self.assertRaises(TypeError, mv.__delitem__, 1)
+
+ # overflow
+ nd = ndarray([1], shape=[1], flags=ND_WRITABLE)
+ self.assertRaises(OverflowError, nd.__getitem__, 1<<64)
+ self.assertRaises(OverflowError, nd.__setitem__, 1<<64, 8)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ self.assertRaises(IndexError, mv.__getitem__, 1<<64)
+ self.assertRaises(IndexError, mv.__setitem__, 1<<64, 8)
+
+ # format
+ items = [1,2,3,4,5,6,7,8]
+ nd = ndarray(items, shape=[len(items)], format="B", flags=ND_WRITABLE)
+ self.assertRaises(struct.error, nd.__setitem__, 2, 300)
+ self.assertRaises(ValueError, nd.__setitem__, 1, (100, 200))
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ self.assertRaises(ValueError, mv.__setitem__, 2, 300)
+ self.assertRaises(TypeError, mv.__setitem__, 1, (100, 200))
+
+ items = [(1,2), (3,4), (5,6)]
+ nd = ndarray(items, shape=[len(items)], format="LQ", flags=ND_WRITABLE)
+ self.assertRaises(ValueError, nd.__setitem__, 2, 300)
+ self.assertRaises(struct.error, nd.__setitem__, 1, (b'\x001', 200))
+
+ def test_ndarray_index_scalar(self):
+ # scalar
+ nd = ndarray(1, shape=(), flags=ND_WRITABLE)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+
+ x = nd[()]; self.assertEqual(x, 1)
+ x = nd[...]; self.assertEqual(x.tolist(), nd.tolist())
+
+ x = mv[()]; self.assertEqual(x, 1)
+ x = mv[...]; self.assertEqual(x.tolist(), nd.tolist())
+
+ self.assertRaises(TypeError, nd.__getitem__, 0)
+ self.assertRaises(TypeError, mv.__getitem__, 0)
+ self.assertRaises(TypeError, nd.__setitem__, 0, 8)
+ self.assertRaises(TypeError, mv.__setitem__, 0, 8)
+
+ self.assertEqual(nd.tolist(), 1)
+ self.assertEqual(mv.tolist(), 1)
+
+ nd[()] = 9; self.assertEqual(nd.tolist(), 9)
+ mv[()] = 9; self.assertEqual(mv.tolist(), 9)
+
+ nd[...] = 5; self.assertEqual(nd.tolist(), 5)
+ mv[...] = 5; self.assertEqual(mv.tolist(), 5)
+
+ def test_ndarray_index_null_strides(self):
+ ex = ndarray(list(range(2*4)), shape=[2, 4], flags=ND_WRITABLE)
+ nd = ndarray(ex, getbuf=PyBUF_CONTIG)
+
+ # Sub-views are only possible for full exporters.
+ self.assertRaises(BufferError, nd.__getitem__, 1)
+ # Same for slices.
+ self.assertRaises(BufferError, nd.__getitem__, slice(3,5,1))
+
+ def test_ndarray_index_getitem_single(self):
+ # getitem
+ for fmt, items, _ in iter_format(5):
+ nd = ndarray(items, shape=[5], format=fmt)
+ for i in range(-5, 5):
+ self.assertEqual(nd[i], items[i])
+
+ self.assertRaises(IndexError, nd.__getitem__, -6)
+ self.assertRaises(IndexError, nd.__getitem__, 5)
+
+ if is_memoryview_format(fmt):
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ for i in range(-5, 5):
+ self.assertEqual(mv[i], items[i])
+
+ self.assertRaises(IndexError, mv.__getitem__, -6)
+ self.assertRaises(IndexError, mv.__getitem__, 5)
+
+ # getitem with null strides
+ for fmt, items, _ in iter_format(5):
+ ex = ndarray(items, shape=[5], flags=ND_WRITABLE, format=fmt)
+ nd = ndarray(ex, getbuf=PyBUF_CONTIG|PyBUF_FORMAT)
+
+ for i in range(-5, 5):
+ self.assertEqual(nd[i], items[i])
+
+ if is_memoryview_format(fmt):
+ mv = nd.memoryview_from_buffer()
+ self.assertIs(mv.__eq__(nd), NotImplemented)
+ for i in range(-5, 5):
+ self.assertEqual(mv[i], items[i])
+
+ # getitem with null format
+ items = [1,2,3,4,5]
+ ex = ndarray(items, shape=[5])
+ nd = ndarray(ex, getbuf=PyBUF_CONTIG_RO)
+ for i in range(-5, 5):
+ self.assertEqual(nd[i], items[i])
+
+ # getitem with null shape/strides/format
+ items = [1,2,3,4,5]
+ ex = ndarray(items, shape=[5])
+ nd = ndarray(ex, getbuf=PyBUF_SIMPLE)
+
+ for i in range(-5, 5):
+ self.assertEqual(nd[i], items[i])
+
+ def test_ndarray_index_setitem_single(self):
+ # assign single value
+ for fmt, items, single_item in iter_format(5):
+ nd = ndarray(items, shape=[5], format=fmt, flags=ND_WRITABLE)
+ for i in range(5):
+ items[i] = single_item
+ nd[i] = single_item
+ self.assertEqual(nd.tolist(), items)
+
+ self.assertRaises(IndexError, nd.__setitem__, -6, single_item)
+ self.assertRaises(IndexError, nd.__setitem__, 5, single_item)
+
+ if not is_memoryview_format(fmt):
+ continue
+
+ nd = ndarray(items, shape=[5], format=fmt, flags=ND_WRITABLE)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ for i in range(5):
+ items[i] = single_item
+ mv[i] = single_item
+ self.assertEqual(mv.tolist(), items)
+
+ self.assertRaises(IndexError, mv.__setitem__, -6, single_item)
+ self.assertRaises(IndexError, mv.__setitem__, 5, single_item)
+
+
+ # assign single value: lobject = robject
+ for fmt, items, single_item in iter_format(5):
+ nd = ndarray(items, shape=[5], format=fmt, flags=ND_WRITABLE)
+ for i in range(-5, 4):
+ items[i] = items[i+1]
+ nd[i] = nd[i+1]
+ self.assertEqual(nd.tolist(), items)
+
+ if not is_memoryview_format(fmt):
+ continue
+
+ nd = ndarray(items, shape=[5], format=fmt, flags=ND_WRITABLE)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ for i in range(-5, 4):
+ items[i] = items[i+1]
+ mv[i] = mv[i+1]
+ self.assertEqual(mv.tolist(), items)
+
+ def test_ndarray_index_getitem_multidim(self):
+ shape_t = (2, 3, 5)
+ nitems = prod(shape_t)
+ for shape in permutations(shape_t):
+
+ fmt, items, _ = randitems(nitems)
+
+ for flags in (0, ND_PIL):
+ # C array
+ nd = ndarray(items, shape=shape, format=fmt, flags=flags)
+ lst = carray(items, shape)
+
+ for i in range(-shape[0], shape[0]):
+ self.assertEqual(lst[i], nd[i].tolist())
+ for j in range(-shape[1], shape[1]):
+ self.assertEqual(lst[i][j], nd[i][j].tolist())
+ for k in range(-shape[2], shape[2]):
+ self.assertEqual(lst[i][j][k], nd[i][j][k])
+
+ # Fortran array
+ nd = ndarray(items, shape=shape, format=fmt,
+ flags=flags|ND_FORTRAN)
+ lst = farray(items, shape)
+
+ for i in range(-shape[0], shape[0]):
+ self.assertEqual(lst[i], nd[i].tolist())
+ for j in range(-shape[1], shape[1]):
+ self.assertEqual(lst[i][j], nd[i][j].tolist())
+ for k in range(shape[2], shape[2]):
+ self.assertEqual(lst[i][j][k], nd[i][j][k])
+
+ def test_ndarray_sequence(self):
+ nd = ndarray(1, shape=())
+ self.assertRaises(TypeError, eval, "1 in nd", locals())
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ self.assertRaises(TypeError, eval, "1 in mv", locals())
+
+ for fmt, items, _ in iter_format(5):
+ nd = ndarray(items, shape=[5], format=fmt)
+ for i, v in enumerate(nd):
+ self.assertEqual(v, items[i])
+ self.assertTrue(v in nd)
+
+ if is_memoryview_format(fmt):
+ mv = memoryview(nd)
+ for i, v in enumerate(mv):
+ self.assertEqual(v, items[i])
+ self.assertTrue(v in mv)
+
+ def test_ndarray_slice_invalid(self):
+ items = [1,2,3,4,5,6,7,8]
+
+ # rvalue is not an exporter
+ xl = ndarray(items, shape=[8], flags=ND_WRITABLE)
+ ml = memoryview(xl)
+ self.assertRaises(TypeError, xl.__setitem__, slice(0,8,1), items)
+ self.assertRaises(TypeError, ml.__setitem__, slice(0,8,1), items)
+
+ # rvalue is not a full exporter
+ xl = ndarray(items, shape=[8], flags=ND_WRITABLE)
+ ex = ndarray(items, shape=[8], flags=ND_WRITABLE)
+ xr = ndarray(ex, getbuf=PyBUF_ND)
+ self.assertRaises(BufferError, xl.__setitem__, slice(0,8,1), xr)
+
+ # zero step
+ nd = ndarray(items, shape=[8], format="L", flags=ND_WRITABLE)
+ mv = memoryview(nd)
+ self.assertRaises(ValueError, nd.__getitem__, slice(0,1,0))
+ self.assertRaises(ValueError, mv.__getitem__, slice(0,1,0))
+
+ nd = ndarray(items, shape=[2,4], format="L", flags=ND_WRITABLE)
+ mv = memoryview(nd)
+
+ self.assertRaises(ValueError, nd.__getitem__,
+ (slice(0,1,1), slice(0,1,0)))
+ self.assertRaises(ValueError, nd.__getitem__,
+ (slice(0,1,0), slice(0,1,1)))
+ self.assertRaises(TypeError, nd.__getitem__, "@%$")
+ self.assertRaises(TypeError, nd.__getitem__, ("@%$", slice(0,1,1)))
+ self.assertRaises(TypeError, nd.__getitem__, (slice(0,1,1), {}))
+
+ # memoryview: not implemented
+ self.assertRaises(NotImplementedError, mv.__getitem__,
+ (slice(0,1,1), slice(0,1,0)))
+ self.assertRaises(TypeError, mv.__getitem__, "@%$")
+
+ # differing format
+ xl = ndarray(items, shape=[8], format="B", flags=ND_WRITABLE)
+ xr = ndarray(items, shape=[8], format="b")
+ ml = memoryview(xl)
+ mr = memoryview(xr)
+ self.assertRaises(ValueError, xl.__setitem__, slice(0,1,1), xr[7:8])
+ self.assertEqual(xl.tolist(), items)
+ self.assertRaises(ValueError, ml.__setitem__, slice(0,1,1), mr[7:8])
+ self.assertEqual(ml.tolist(), items)
+
+ # differing itemsize
+ xl = ndarray(items, shape=[8], format="B", flags=ND_WRITABLE)
+ yr = ndarray(items, shape=[8], format="L")
+ ml = memoryview(xl)
+ mr = memoryview(xr)
+ self.assertRaises(ValueError, xl.__setitem__, slice(0,1,1), xr[7:8])
+ self.assertEqual(xl.tolist(), items)
+ self.assertRaises(ValueError, ml.__setitem__, slice(0,1,1), mr[7:8])
+ self.assertEqual(ml.tolist(), items)
+
+ # differing ndim
+ xl = ndarray(items, shape=[2, 4], format="b", flags=ND_WRITABLE)
+ xr = ndarray(items, shape=[8], format="b")
+ ml = memoryview(xl)
+ mr = memoryview(xr)
+ self.assertRaises(ValueError, xl.__setitem__, slice(0,1,1), xr[7:8])
+ self.assertEqual(xl.tolist(), [[1,2,3,4], [5,6,7,8]])
+ self.assertRaises(NotImplementedError, ml.__setitem__, slice(0,1,1),
+ mr[7:8])
+
+ # differing shape
+ xl = ndarray(items, shape=[8], format="b", flags=ND_WRITABLE)
+ xr = ndarray(items, shape=[8], format="b")
+ ml = memoryview(xl)
+ mr = memoryview(xr)
+ self.assertRaises(ValueError, xl.__setitem__, slice(0,2,1), xr[7:8])
+ self.assertEqual(xl.tolist(), items)
+ self.assertRaises(ValueError, ml.__setitem__, slice(0,2,1), mr[7:8])
+ self.assertEqual(ml.tolist(), items)
+
+ # _testbuffer.c module functions
+ self.assertRaises(TypeError, slice_indices, slice(0,1,2), {})
+ self.assertRaises(TypeError, slice_indices, "###########", 1)
+ self.assertRaises(ValueError, slice_indices, slice(0,1,0), 4)
+
+ x = ndarray(items, shape=[8], format="b", flags=ND_PIL)
+ self.assertRaises(TypeError, x.add_suboffsets)
+
+ ex = ndarray(items, shape=[8], format="B")
+ x = ndarray(ex, getbuf=PyBUF_SIMPLE)
+ self.assertRaises(TypeError, x.add_suboffsets)
+
+ def test_ndarray_slice_zero_shape(self):
+ items = [1,2,3,4,5,6,7,8,9,10,11,12]
+
+ x = ndarray(items, shape=[12], format="L", flags=ND_WRITABLE)
+ y = ndarray(items, shape=[12], format="L")
+ x[4:4] = y[9:9]
+ self.assertEqual(x.tolist(), items)
+
+ ml = memoryview(x)
+ mr = memoryview(y)
+ self.assertEqual(ml, x)
+ self.assertEqual(ml, y)
+ ml[4:4] = mr[9:9]
+ self.assertEqual(ml.tolist(), items)
+
+ x = ndarray(items, shape=[3, 4], format="L", flags=ND_WRITABLE)
+ y = ndarray(items, shape=[4, 3], format="L")
+ x[1:2, 2:2] = y[1:2, 3:3]
+ self.assertEqual(x.tolist(), carray(items, [3, 4]))
+
+ def test_ndarray_slice_multidim(self):
+ shape_t = (2, 3, 5)
+ ndim = len(shape_t)
+ nitems = prod(shape_t)
+ for shape in permutations(shape_t):
+
+ fmt, items, _ = randitems(nitems)
+ itemsize = struct.calcsize(fmt)
+
+ for flags in (0, ND_PIL):
+ nd = ndarray(items, shape=shape, format=fmt, flags=flags)
+ lst = carray(items, shape)
+
+ for slices in rslices_ndim(ndim, shape):
+
+ listerr = None
+ try:
+ sliced = multislice(lst, slices)
+ except Exception as e:
+ listerr = e.__class__
+
+ nderr = None
+ try:
+ ndsliced = nd[slices]
+ except Exception as e:
+ nderr = e.__class__
+
+ if nderr or listerr:
+ self.assertIs(nderr, listerr)
+ else:
+ self.assertEqual(ndsliced.tolist(), sliced)
+
+ def test_ndarray_slice_redundant_suboffsets(self):
+ shape_t = (2, 3, 5, 2)
+ ndim = len(shape_t)
+ nitems = prod(shape_t)
+ for shape in permutations(shape_t):
+
+ fmt, items, _ = randitems(nitems)
+ itemsize = struct.calcsize(fmt)
+
+ nd = ndarray(items, shape=shape, format=fmt)
+ nd.add_suboffsets()
+ ex = ndarray(items, shape=shape, format=fmt)
+ ex.add_suboffsets()
+ mv = memoryview(ex)
+ lst = carray(items, shape)
+
+ for slices in rslices_ndim(ndim, shape):
+
+ listerr = None
+ try:
+ sliced = multislice(lst, slices)
+ except Exception as e:
+ listerr = e.__class__
+
+ nderr = None
+ try:
+ ndsliced = nd[slices]
+ except Exception as e:
+ nderr = e.__class__
+
+ if nderr or listerr:
+ self.assertIs(nderr, listerr)
+ else:
+ self.assertEqual(ndsliced.tolist(), sliced)
+
+ def test_ndarray_slice_assign_single(self):
+ for fmt, items, _ in iter_format(5):
+ for lslice in genslices(5):
+ for rslice in genslices(5):
+ for flags in (0, ND_PIL):
+
+ f = flags|ND_WRITABLE
+ nd = ndarray(items, shape=[5], format=fmt, flags=f)
+ ex = ndarray(items, shape=[5], format=fmt, flags=f)
+ mv = memoryview(ex)
+
+ lsterr = None
+ diff_structure = None
+ lst = items[:]
+ try:
+ lval = lst[lslice]
+ rval = lst[rslice]
+ lst[lslice] = lst[rslice]
+ diff_structure = len(lval) != len(rval)
+ except Exception as e:
+ lsterr = e.__class__
+
+ nderr = None
+ try:
+ nd[lslice] = nd[rslice]
+ except Exception as e:
+ nderr = e.__class__
+
+ if diff_structure: # ndarray cannot change shape
+ self.assertIs(nderr, ValueError)
+ else:
+ self.assertEqual(nd.tolist(), lst)
+ self.assertIs(nderr, lsterr)
+
+ if not is_memoryview_format(fmt):
+ continue
+
+ mverr = None
+ try:
+ mv[lslice] = mv[rslice]
+ except Exception as e:
+ mverr = e.__class__
+
+ if diff_structure: # memoryview cannot change shape
+ self.assertIs(mverr, ValueError)
+ else:
+ self.assertEqual(mv.tolist(), lst)
+ self.assertEqual(mv, nd)
+ self.assertIs(mverr, lsterr)
+ self.verify(mv, obj=ex,
+ itemsize=nd.itemsize, fmt=fmt, readonly=0,
+ ndim=nd.ndim, shape=nd.shape, strides=nd.strides,
+ lst=nd.tolist())
+
+ def test_ndarray_slice_assign_multidim(self):
+ shape_t = (2, 3, 5)
+ ndim = len(shape_t)
+ nitems = prod(shape_t)
+ for shape in permutations(shape_t):
+
+ fmt, items, _ = randitems(nitems)
+
+ for flags in (0, ND_PIL):
+ for _ in range(ITERATIONS):
+ lslices, rslices = randslice_from_shape(ndim, shape)
+
+ nd = ndarray(items, shape=shape, format=fmt,
+ flags=flags|ND_WRITABLE)
+ lst = carray(items, shape)
+
+ listerr = None
+ try:
+ result = multislice_assign(lst, lst, lslices, rslices)
+ except Exception as e:
+ listerr = e.__class__
+
+ nderr = None
+ try:
+ nd[lslices] = nd[rslices]
+ except Exception as e:
+ nderr = e.__class__
+
+ if nderr or listerr:
+ self.assertIs(nderr, listerr)
+ else:
+ self.assertEqual(nd.tolist(), result)
+
+ def test_ndarray_random(self):
+ # construction of valid arrays
+ for _ in range(ITERATIONS):
+ for fmt in fmtdict['@']:
+ itemsize = struct.calcsize(fmt)
+
+ t = rand_structure(itemsize, True, maxdim=MAXDIM,
+ maxshape=MAXSHAPE)
+ self.assertTrue(verify_structure(*t))
+ items = randitems_from_structure(fmt, t)
+
+ x = ndarray_from_structure(items, fmt, t)
+ xlist = x.tolist()
+
+ mv = memoryview(x)
+ if is_memoryview_format(fmt):
+ mvlist = mv.tolist()
+ self.assertEqual(mvlist, xlist)
+
+ if t[2] > 0:
+ # ndim > 0: test against suboffsets representation.
+ y = ndarray_from_structure(items, fmt, t, flags=ND_PIL)
+ ylist = y.tolist()
+ self.assertEqual(xlist, ylist)
+
+ mv = memoryview(y)
+ if is_memoryview_format(fmt):
+ self.assertEqual(mv, y)
+ mvlist = mv.tolist()
+ self.assertEqual(mvlist, ylist)
+
+ if numpy_array:
+ shape = t[3]
+ if 0 in shape:
+ continue # http://projects.scipy.org/numpy/ticket/1910
+ z = numpy_array_from_structure(items, fmt, t)
+ self.verify(x, obj=None,
+ itemsize=z.itemsize, fmt=fmt, readonly=0,
+ ndim=z.ndim, shape=z.shape, strides=z.strides,
+ lst=z.tolist())
+
+ def test_ndarray_random_invalid(self):
+ # exceptions during construction of invalid arrays
+ for _ in range(ITERATIONS):
+ for fmt in fmtdict['@']:
+ itemsize = struct.calcsize(fmt)
+
+ t = rand_structure(itemsize, False, maxdim=MAXDIM,
+ maxshape=MAXSHAPE)
+ self.assertFalse(verify_structure(*t))
+ items = randitems_from_structure(fmt, t)
+
+ nderr = False
+ try:
+ x = ndarray_from_structure(items, fmt, t)
+ except Exception as e:
+ nderr = e.__class__
+ self.assertTrue(nderr)
+
+ if numpy_array:
+ numpy_err = False
+ try:
+ y = numpy_array_from_structure(items, fmt, t)
+ except Exception as e:
+ numpy_err = e.__class__
+
+ if 0: # http://projects.scipy.org/numpy/ticket/1910
+ self.assertTrue(numpy_err)
+
+ def test_ndarray_random_slice_assign(self):
+ # valid slice assignments
+ for _ in range(ITERATIONS):
+ for fmt in fmtdict['@']:
+ itemsize = struct.calcsize(fmt)
+
+ lshape, rshape, lslices, rslices = \
+ rand_aligned_slices(maxdim=MAXDIM, maxshape=MAXSHAPE)
+ tl = rand_structure(itemsize, True, shape=lshape)
+ tr = rand_structure(itemsize, True, shape=rshape)
+ self.assertTrue(verify_structure(*tl))
+ self.assertTrue(verify_structure(*tr))
+ litems = randitems_from_structure(fmt, tl)
+ ritems = randitems_from_structure(fmt, tr)
+
+ xl = ndarray_from_structure(litems, fmt, tl)
+ xr = ndarray_from_structure(ritems, fmt, tr)
+ xl[lslices] = xr[rslices]
+ xllist = xl.tolist()
+ xrlist = xr.tolist()
+
+ ml = memoryview(xl)
+ mr = memoryview(xr)
+ self.assertEqual(ml.tolist(), xllist)
+ self.assertEqual(mr.tolist(), xrlist)
+
+ if tl[2] > 0 and tr[2] > 0:
+ # ndim > 0: test against suboffsets representation.
+ yl = ndarray_from_structure(litems, fmt, tl, flags=ND_PIL)
+ yr = ndarray_from_structure(ritems, fmt, tr, flags=ND_PIL)
+ yl[lslices] = yr[rslices]
+ yllist = yl.tolist()
+ yrlist = yr.tolist()
+ self.assertEqual(xllist, yllist)
+ self.assertEqual(xrlist, yrlist)
+
+ ml = memoryview(yl)
+ mr = memoryview(yr)
+ self.assertEqual(ml.tolist(), yllist)
+ self.assertEqual(mr.tolist(), yrlist)
+
+ if numpy_array:
+ if 0 in lshape or 0 in rshape:
+ continue # http://projects.scipy.org/numpy/ticket/1910
+
+ zl = numpy_array_from_structure(litems, fmt, tl)
+ zr = numpy_array_from_structure(ritems, fmt, tr)
+ zl[lslices] = zr[rslices]
+
+ if not is_overlapping(tl) and not is_overlapping(tr):
+ # Slice assignment of overlapping structures
+ # is undefined in NumPy.
+ self.verify(xl, obj=None,
+ itemsize=zl.itemsize, fmt=fmt, readonly=0,
+ ndim=zl.ndim, shape=zl.shape,
+ strides=zl.strides, lst=zl.tolist())
+
+ self.verify(xr, obj=None,
+ itemsize=zr.itemsize, fmt=fmt, readonly=0,
+ ndim=zr.ndim, shape=zr.shape,
+ strides=zr.strides, lst=zr.tolist())
+
+ def test_ndarray_re_export(self):
+ items = [1,2,3,4,5,6,7,8,9,10,11,12]
+
+ nd = ndarray(items, shape=[3,4], flags=ND_PIL)
+ ex = ndarray(nd)
+
+ self.assertTrue(ex.flags & ND_PIL)
+ self.assertIs(ex.obj, nd)
+ self.assertEqual(ex.suboffsets, (0, -1))
+ self.assertFalse(ex.c_contiguous)
+ self.assertFalse(ex.f_contiguous)
+ self.assertFalse(ex.contiguous)
+
+ def test_ndarray_zero_shape(self):
+ # zeros in shape
+ for flags in (0, ND_PIL):
+ nd = ndarray([1,2,3], shape=[0], flags=flags)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ self.assertEqual(nd.tolist(), [])
+ self.assertEqual(mv.tolist(), [])
+
+ nd = ndarray([1,2,3], shape=[0,3,3], flags=flags)
+ self.assertEqual(nd.tolist(), [])
+
+ nd = ndarray([1,2,3], shape=[3,0,3], flags=flags)
+ self.assertEqual(nd.tolist(), [[], [], []])
+
+ nd = ndarray([1,2,3], shape=[3,3,0], flags=flags)
+ self.assertEqual(nd.tolist(),
+ [[[], [], []], [[], [], []], [[], [], []]])
+
+ def test_ndarray_zero_strides(self):
+ # zero strides
+ for flags in (0, ND_PIL):
+ nd = ndarray([1], shape=[5], strides=[0], flags=flags)
+ mv = memoryview(nd)
+ self.assertEqual(mv, nd)
+ self.assertEqual(nd.tolist(), [1, 1, 1, 1, 1])
+ self.assertEqual(mv.tolist(), [1, 1, 1, 1, 1])
+
+ def test_ndarray_offset(self):
+ nd = ndarray(list(range(20)), shape=[3], offset=7)
+ self.assertEqual(nd.offset, 7)
+ self.assertEqual(nd.tolist(), [7,8,9])
+
+ def test_ndarray_memoryview_from_buffer(self):
+ for flags in (0, ND_PIL):
+ nd = ndarray(list(range(3)), shape=[3], flags=flags)
+ m = nd.memoryview_from_buffer()
+ self.assertEqual(m, nd)
+
+ def test_ndarray_get_pointer(self):
+ for flags in (0, ND_PIL):
+ nd = ndarray(list(range(3)), shape=[3], flags=flags)
+ for i in range(3):
+ self.assertEqual(nd[i], get_pointer(nd, [i]))
+
+ def test_ndarray_tolist_null_strides(self):
+ ex = ndarray(list(range(20)), shape=[2,2,5])
+
+ nd = ndarray(ex, getbuf=PyBUF_ND|PyBUF_FORMAT)
+ self.assertEqual(nd.tolist(), ex.tolist())
+
+ m = memoryview(ex)
+ self.assertEqual(m.tolist(), ex.tolist())
+
+ def test_ndarray_cmp_contig(self):
+
+ self.assertFalse(cmp_contig(b"123", b"456"))
+
+ x = ndarray(list(range(12)), shape=[3,4])
+ y = ndarray(list(range(12)), shape=[4,3])
+ self.assertFalse(cmp_contig(x, y))
+
+ x = ndarray([1], shape=[1], format="B")
+ self.assertTrue(cmp_contig(x, b'\x01'))
+ self.assertTrue(cmp_contig(b'\x01', x))
+
+ def test_ndarray_hash(self):
+
+ a = array.array('L', [1,2,3])
+ nd = ndarray(a)
+ self.assertRaises(ValueError, hash, nd)
+
+ # one-dimensional
+ b = bytes(list(range(12)))
+
+ nd = ndarray(list(range(12)), shape=[12])
+ self.assertEqual(hash(nd), hash(b))
+
+ # C-contiguous
+ nd = ndarray(list(range(12)), shape=[3,4])
+ self.assertEqual(hash(nd), hash(b))
+
+ nd = ndarray(list(range(12)), shape=[3,2,2])
+ self.assertEqual(hash(nd), hash(b))
+
+ # Fortran contiguous
+ b = bytes(transpose(list(range(12)), shape=[4,3]))
+ nd = ndarray(list(range(12)), shape=[3,4], flags=ND_FORTRAN)
+ self.assertEqual(hash(nd), hash(b))
+
+ b = bytes(transpose(list(range(12)), shape=[2,3,2]))
+ nd = ndarray(list(range(12)), shape=[2,3,2], flags=ND_FORTRAN)
+ self.assertEqual(hash(nd), hash(b))
+
+ # suboffsets
+ b = bytes(list(range(12)))
+ nd = ndarray(list(range(12)), shape=[2,2,3], flags=ND_PIL)
+ self.assertEqual(hash(nd), hash(b))
+
+ # non-byte formats
+ nd = ndarray(list(range(12)), shape=[2,2,3], format='L')
+ self.assertEqual(hash(nd), hash(nd.tobytes()))
+
+ def test_memoryview_construction(self):
+
+ items_shape = [(9, []), ([1,2,3], [3]), (list(range(2*3*5)), [2,3,5])]
+
+ # NumPy style, C-contiguous:
+ for items, shape in items_shape:
+
+ # From PEP-3118 compliant exporter:
+ ex = ndarray(items, shape=shape)
+ m = memoryview(ex)
+ self.assertTrue(m.c_contiguous)
+ self.assertTrue(m.contiguous)
+
+ ndim = len(shape)
+ strides = strides_from_shape(ndim, shape, 1, 'C')
+ lst = carray(items, shape)
+
+ self.verify(m, obj=ex,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ # From memoryview:
+ m2 = memoryview(m)
+ self.verify(m2, obj=ex,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ # PyMemoryView_FromBuffer(): no strides
+ nd = ndarray(ex, getbuf=PyBUF_CONTIG_RO|PyBUF_FORMAT)
+ self.assertEqual(nd.strides, ())
+ m = nd.memoryview_from_buffer()
+ self.verify(m, obj=None,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ # PyMemoryView_FromBuffer(): no format, shape, strides
+ nd = ndarray(ex, getbuf=PyBUF_SIMPLE)
+ self.assertEqual(nd.format, '')
+ self.assertEqual(nd.shape, ())
+ self.assertEqual(nd.strides, ())
+ m = nd.memoryview_from_buffer()
+
+ lst = [items] if ndim == 0 else items
+ self.verify(m, obj=None,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=1, shape=[ex.nbytes], strides=(1,),
+ lst=lst)
+
+ # NumPy style, Fortran contiguous:
+ for items, shape in items_shape:
+
+ # From PEP-3118 compliant exporter:
+ ex = ndarray(items, shape=shape, flags=ND_FORTRAN)
+ m = memoryview(ex)
+ self.assertTrue(m.f_contiguous)
+ self.assertTrue(m.contiguous)
+
+ ndim = len(shape)
+ strides = strides_from_shape(ndim, shape, 1, 'F')
+ lst = farray(items, shape)
+
+ self.verify(m, obj=ex,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ # From memoryview:
+ m2 = memoryview(m)
+ self.verify(m2, obj=ex,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst)
+
+ # PIL style:
+ for items, shape in items_shape[1:]:
+
+ # From PEP-3118 compliant exporter:
+ ex = ndarray(items, shape=shape, flags=ND_PIL)
+ m = memoryview(ex)
+
+ ndim = len(shape)
+ lst = carray(items, shape)
+
+ self.verify(m, obj=ex,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=ndim, shape=shape, strides=ex.strides,
+ lst=lst)
+
+ # From memoryview:
+ m2 = memoryview(m)
+ self.verify(m2, obj=ex,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=ndim, shape=shape, strides=ex.strides,
+ lst=lst)
+
+ # Invalid number of arguments:
+ self.assertRaises(TypeError, memoryview, b'9', 'x')
+ # Not a buffer provider:
+ self.assertRaises(TypeError, memoryview, {})
+ # Non-compliant buffer provider:
+ ex = ndarray([1,2,3], shape=[3])
+ nd = ndarray(ex, getbuf=PyBUF_SIMPLE)
+ self.assertRaises(BufferError, memoryview, nd)
+ nd = ndarray(ex, getbuf=PyBUF_CONTIG_RO|PyBUF_FORMAT)
+ self.assertRaises(BufferError, memoryview, nd)
+
+ # ndim > 64
+ nd = ndarray([1]*128, shape=[1]*128, format='L')
+ self.assertRaises(ValueError, memoryview, nd)
+ self.assertRaises(ValueError, nd.memoryview_from_buffer)
+ self.assertRaises(ValueError, get_contiguous, nd, PyBUF_READ, 'C')
+ self.assertRaises(ValueError, get_contiguous, nd, PyBUF_READ, 'F')
+ self.assertRaises(ValueError, get_contiguous, nd[::-1], PyBUF_READ, 'C')
+
+ def test_memoryview_cast_zero_shape(self):
+ # Casts are undefined if shape contains zeros. These arrays are
+ # regarded as C-contiguous by Numpy and PyBuffer_GetContiguous(),
+ # so they are not caught by the test for C-contiguity in memory_cast().
+ items = [1,2,3]
+ for shape in ([0,3,3], [3,0,3], [0,3,3]):
+ ex = ndarray(items, shape=shape)
+ self.assertTrue(ex.c_contiguous)
+ msrc = memoryview(ex)
+ self.assertRaises(TypeError, msrc.cast, 'c')
+
+ def test_memoryview_struct_module(self):
+
+ class INT(object):
+ def __init__(self, val):
+ self.val = val
+ def __int__(self):
+ return self.val
+
+ class IDX(object):
+ def __init__(self, val):
+ self.val = val
+ def __index__(self):
+ return self.val
+
+ def f(): return 7
+
+ values = [INT(9), IDX(9),
+ 2.2+3j, Decimal("-21.1"), 12.2, Fraction(5, 2),
+ [1,2,3], {4,5,6}, {7:8}, (), (9,),
+ True, False, None, NotImplemented,
+ b'a', b'abc', bytearray(b'a'), bytearray(b'abc'),
+ 'a', 'abc', r'a', r'abc',
+ f, lambda x: x]
+
+ for fmt, items, item in iter_format(10, 'memoryview'):
+ ex = ndarray(items, shape=[10], format=fmt, flags=ND_WRITABLE)
+ nd = ndarray(items, shape=[10], format=fmt, flags=ND_WRITABLE)
+ m = memoryview(ex)
+
+ struct.pack_into(fmt, nd, 0, item)
+ m[0] = item
+ self.assertEqual(m[0], nd[0])
+
+ itemsize = struct.calcsize(fmt)
+ if 'P' in fmt:
+ continue
+
+ for v in values:
+ struct_err = None
+ try:
+ struct.pack_into(fmt, nd, itemsize, v)
+ except struct.error:
+ struct_err = struct.error
+
+ mv_err = None
+ try:
+ m[1] = v
+ except (TypeError, ValueError) as e:
+ mv_err = e.__class__
+
+ if struct_err or mv_err:
+ self.assertIsNot(struct_err, None)
+ self.assertIsNot(mv_err, None)
+ else:
+ self.assertEqual(m[1], nd[1])
+
+ def test_memoryview_cast_zero_strides(self):
+ # Casts are undefined if strides contains zeros. These arrays are
+ # (sometimes!) regarded as C-contiguous by Numpy, but not by
+ # PyBuffer_GetContiguous().
+ ex = ndarray([1,2,3], shape=[3], strides=[0])
+ self.assertFalse(ex.c_contiguous)
+ msrc = memoryview(ex)
+ self.assertRaises(TypeError, msrc.cast, 'c')
+
+ def test_memoryview_cast_invalid(self):
+ # invalid format
+ for sfmt in NON_BYTE_FORMAT:
+ sformat = '@' + sfmt if randrange(2) else sfmt
+ ssize = struct.calcsize(sformat)
+ for dfmt in NON_BYTE_FORMAT:
+ dformat = '@' + dfmt if randrange(2) else dfmt
+ dsize = struct.calcsize(dformat)
+ ex = ndarray(list(range(32)), shape=[32//ssize], format=sformat)
+ msrc = memoryview(ex)
+ self.assertRaises(TypeError, msrc.cast, dfmt, [32//dsize])
+
+ for sfmt, sitems, _ in iter_format(1):
+ ex = ndarray(sitems, shape=[1], format=sfmt)
+ msrc = memoryview(ex)
+ for dfmt, _, _ in iter_format(1):
+ if (not is_memoryview_format(sfmt) or
+ not is_memoryview_format(dfmt)):
+ self.assertRaises(ValueError, msrc.cast, dfmt,
+ [32//dsize])
+ else:
+ if not is_byte_format(sfmt) and not is_byte_format(dfmt):
+ self.assertRaises(TypeError, msrc.cast, dfmt,
+ [32//dsize])
+
+ # invalid shape
+ size_h = struct.calcsize('h')
+ size_d = struct.calcsize('d')
+ ex = ndarray(list(range(2*2*size_d)), shape=[2,2,size_d], format='h')
+ msrc = memoryview(ex)
+ self.assertRaises(TypeError, msrc.cast, shape=[2,2,size_h], format='d')
+
+ ex = ndarray(list(range(120)), shape=[1,2,3,4,5])
+ m = memoryview(ex)
+
+ # incorrect number of args
+ self.assertRaises(TypeError, m.cast)
+ self.assertRaises(TypeError, m.cast, 1, 2, 3)
+
+ # incorrect dest format type
+ self.assertRaises(TypeError, m.cast, {})
+
+ # incorrect dest format
+ self.assertRaises(ValueError, m.cast, "X")
+ self.assertRaises(ValueError, m.cast, "@X")
+ self.assertRaises(ValueError, m.cast, "@XY")
+
+ # dest format not implemented
+ self.assertRaises(ValueError, m.cast, "=B")
+ self.assertRaises(ValueError, m.cast, "!L")
+ self.assertRaises(ValueError, m.cast, "<P")
+ self.assertRaises(ValueError, m.cast, ">l")
+ self.assertRaises(ValueError, m.cast, "BI")
+ self.assertRaises(ValueError, m.cast, "xBI")
+
+ # src format not implemented
+ ex = ndarray([(1,2), (3,4)], shape=[2], format="II")
+ m = memoryview(ex)
+ self.assertRaises(NotImplementedError, m.__getitem__, 0)
+ self.assertRaises(NotImplementedError, m.__setitem__, 0, 8)
+ self.assertRaises(NotImplementedError, m.tolist)
+
+ # incorrect shape type
+ ex = ndarray(list(range(120)), shape=[1,2,3,4,5])
+ m = memoryview(ex)
+ self.assertRaises(TypeError, m.cast, "B", shape={})
+
+ # incorrect shape elements
+ ex = ndarray(list(range(120)), shape=[2*3*4*5])
+ m = memoryview(ex)
+ self.assertRaises(OverflowError, m.cast, "B", shape=[2**64])
+ self.assertRaises(ValueError, m.cast, "B", shape=[-1])
+ self.assertRaises(ValueError, m.cast, "B", shape=[2,3,4,5,6,7,-1])
+ self.assertRaises(ValueError, m.cast, "B", shape=[2,3,4,5,6,7,0])
+ self.assertRaises(TypeError, m.cast, "B", shape=[2,3,4,5,6,7,'x'])
+
+ # N-D -> N-D cast
+ ex = ndarray(list([9 for _ in range(3*5*7*11)]), shape=[3,5,7,11])
+ m = memoryview(ex)
+ self.assertRaises(TypeError, m.cast, "I", shape=[2,3,4,5])
+
+ # cast with ndim > 64
+ nd = ndarray(list(range(128)), shape=[128], format='I')
+ m = memoryview(nd)
+ self.assertRaises(ValueError, m.cast, 'I', [1]*128)
+
+ # view->len not a multiple of itemsize
+ ex = ndarray(list([9 for _ in range(3*5*7*11)]), shape=[3*5*7*11])
+ m = memoryview(ex)
+ self.assertRaises(TypeError, m.cast, "I", shape=[2,3,4,5])
+
+ # product(shape) * itemsize != buffer size
+ ex = ndarray(list([9 for _ in range(3*5*7*11)]), shape=[3*5*7*11])
+ m = memoryview(ex)
+ self.assertRaises(TypeError, m.cast, "B", shape=[2,3,4,5])
+
+ # product(shape) * itemsize overflow
+ nd = ndarray(list(range(128)), shape=[128], format='I')
+ m1 = memoryview(nd)
+ nd = ndarray(list(range(128)), shape=[128], format='B')
+ m2 = memoryview(nd)
+ if sys.maxsize == 2**63-1:
+ self.assertRaises(TypeError, m1.cast, 'B',
+ [7, 7, 73, 127, 337, 92737, 649657])
+ self.assertRaises(ValueError, m1.cast, 'B',
+ [2**20, 2**20, 2**10, 2**10, 2**3])
+ self.assertRaises(ValueError, m2.cast, 'I',
+ [2**20, 2**20, 2**10, 2**10, 2**1])
+ else:
+ self.assertRaises(TypeError, m1.cast, 'B',
+ [1, 2147483647])
+ self.assertRaises(ValueError, m1.cast, 'B',
+ [2**10, 2**10, 2**5, 2**5, 2**1])
+ self.assertRaises(ValueError, m2.cast, 'I',
+ [2**10, 2**10, 2**5, 2**3, 2**1])
+
+ def test_memoryview_cast(self):
+ bytespec = (
+ ('B', lambda ex: list(ex.tobytes())),
+ ('b', lambda ex: [x-256 if x > 127 else x for x in list(ex.tobytes())]),
+ ('c', lambda ex: [bytes(chr(x), 'latin-1') for x in list(ex.tobytes())]),
+ )
+
+ def iter_roundtrip(ex, m, items, fmt):
+ srcsize = struct.calcsize(fmt)
+ for bytefmt, to_bytelist in bytespec:
+
+ m2 = m.cast(bytefmt)
+ lst = to_bytelist(ex)
+ self.verify(m2, obj=ex,
+ itemsize=1, fmt=bytefmt, readonly=0,
+ ndim=1, shape=[31*srcsize], strides=(1,),
+ lst=lst, cast=True)
+
+ m3 = m2.cast(fmt)
+ self.assertEqual(m3, ex)
+ lst = ex.tolist()
+ self.verify(m3, obj=ex,
+ itemsize=srcsize, fmt=fmt, readonly=0,
+ ndim=1, shape=[31], strides=(srcsize,),
+ lst=lst, cast=True)
+
+ # cast from ndim = 0 to ndim = 1
+ srcsize = struct.calcsize('I')
+ ex = ndarray(9, shape=[], format='I')
+ destitems, destshape = cast_items(ex, 'B', 1)
+ m = memoryview(ex)
+ m2 = m.cast('B')
+ self.verify(m2, obj=ex,
+ itemsize=1, fmt='B', readonly=1,
+ ndim=1, shape=destshape, strides=(1,),
+ lst=destitems, cast=True)
+
+ # cast from ndim = 1 to ndim = 0
+ destsize = struct.calcsize('I')
+ ex = ndarray([9]*destsize, shape=[destsize], format='B')
+ destitems, destshape = cast_items(ex, 'I', destsize, shape=[])
+ m = memoryview(ex)
+ m2 = m.cast('I', shape=[])
+ self.verify(m2, obj=ex,
+ itemsize=destsize, fmt='I', readonly=1,
+ ndim=0, shape=(), strides=(),
+ lst=destitems, cast=True)
+
+ # array.array: roundtrip to/from bytes
+ for fmt, items, _ in iter_format(31, 'array'):
+ ex = array.array(fmt, items)
+ m = memoryview(ex)
+ iter_roundtrip(ex, m, items, fmt)
+
+ # ndarray: roundtrip to/from bytes
+ for fmt, items, _ in iter_format(31, 'memoryview'):
+ ex = ndarray(items, shape=[31], format=fmt, flags=ND_WRITABLE)
+ m = memoryview(ex)
+ iter_roundtrip(ex, m, items, fmt)
+
+ def test_memoryview_cast_1D_ND(self):
+ # Cast between C-contiguous buffers. At least one buffer must
+ # be 1D, at least one format must be 'c', 'b' or 'B'.
+ for _tshape in gencastshapes():
+ for char in fmtdict['@']:
+ tfmt = ('', '@')[randrange(2)] + char
+ tsize = struct.calcsize(tfmt)
+ n = prod(_tshape) * tsize
+ obj = 'memoryview' if is_byte_format(tfmt) else 'bytefmt'
+ for fmt, items, _ in iter_format(n, obj):
+ size = struct.calcsize(fmt)
+ shape = [n] if n > 0 else []
+ tshape = _tshape + [size]
+
+ ex = ndarray(items, shape=shape, format=fmt)
+ m = memoryview(ex)
+
+ titems, tshape = cast_items(ex, tfmt, tsize, shape=tshape)
+
+ if titems is None:
+ self.assertRaises(TypeError, m.cast, tfmt, tshape)
+ continue
+ if titems == 'nan':
+ continue # NaNs in lists are a recipe for trouble.
+
+ # 1D -> ND
+ nd = ndarray(titems, shape=tshape, format=tfmt)
+
+ m2 = m.cast(tfmt, shape=tshape)
+ ndim = len(tshape)
+ strides = nd.strides
+ lst = nd.tolist()
+ self.verify(m2, obj=ex,
+ itemsize=tsize, fmt=tfmt, readonly=1,
+ ndim=ndim, shape=tshape, strides=strides,
+ lst=lst, cast=True)
+
+ # ND -> 1D
+ m3 = m2.cast(fmt)
+ m4 = m2.cast(fmt, shape=shape)
+ ndim = len(shape)
+ strides = ex.strides
+ lst = ex.tolist()
+
+ self.verify(m3, obj=ex,
+ itemsize=size, fmt=fmt, readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst, cast=True)
+
+ self.verify(m4, obj=ex,
+ itemsize=size, fmt=fmt, readonly=1,
+ ndim=ndim, shape=shape, strides=strides,
+ lst=lst, cast=True)
+
+ def test_memoryview_tolist(self):
+
+ # Most tolist() tests are in self.verify() etc.
+
+ a = array.array('h', list(range(-6, 6)))
+ m = memoryview(a)
+ self.assertEqual(m, a)
+ self.assertEqual(m.tolist(), a.tolist())
+
+ a = a[2::3]
+ m = m[2::3]
+ self.assertEqual(m, a)
+ self.assertEqual(m.tolist(), a.tolist())
+
+ ex = ndarray(list(range(2*3*5*7*11)), shape=[11,2,7,3,5], format='L')
+ m = memoryview(ex)
+ self.assertEqual(m.tolist(), ex.tolist())
+
+ ex = ndarray([(2, 5), (7, 11)], shape=[2], format='lh')
+ m = memoryview(ex)
+ self.assertRaises(NotImplementedError, m.tolist)
+
+ ex = ndarray([b'12345'], shape=[1], format="s")
+ m = memoryview(ex)
+ self.assertRaises(NotImplementedError, m.tolist)
+
+ ex = ndarray([b"a",b"b",b"c",b"d",b"e",b"f"], shape=[2,3], format='s')
+ m = memoryview(ex)
+ self.assertRaises(NotImplementedError, m.tolist)
+
+ def test_memoryview_repr(self):
+ m = memoryview(bytearray(9))
+ r = m.__repr__()
+ self.assertTrue(r.startswith("<memory"))
+
+ m.release()
+ r = m.__repr__()
+ self.assertTrue(r.startswith("<released"))
+
+ def test_memoryview_sequence(self):
+
+ for fmt in ('d', 'f'):
+ inf = float(3e400)
+ ex = array.array(fmt, [1.0, inf, 3.0])
+ m = memoryview(ex)
+ self.assertIn(1.0, m)
+ self.assertIn(5e700, m)
+ self.assertIn(3.0, m)
+
+ ex = ndarray(9.0, [], format='f')
+ m = memoryview(ex)
+ self.assertRaises(TypeError, eval, "9.0 in m", locals())
+
+ def test_memoryview_index(self):
+
+ # ndim = 0
+ ex = ndarray(12.5, shape=[], format='d')
+ m = memoryview(ex)
+ self.assertEqual(m[()], 12.5)
+ self.assertEqual(m[...], m)
+ self.assertEqual(m[...], ex)
+ self.assertRaises(TypeError, m.__getitem__, 0)
+
+ ex = ndarray((1,2,3), shape=[], format='iii')
+ m = memoryview(ex)
+ self.assertRaises(NotImplementedError, m.__getitem__, ())
+
+ # range
+ ex = ndarray(list(range(7)), shape=[7], flags=ND_WRITABLE)
+ m = memoryview(ex)
+
+ self.assertRaises(IndexError, m.__getitem__, 2**64)
+ self.assertRaises(TypeError, m.__getitem__, 2.0)
+ self.assertRaises(TypeError, m.__getitem__, 0.0)
+
+ # out of bounds
+ self.assertRaises(IndexError, m.__getitem__, -8)
+ self.assertRaises(IndexError, m.__getitem__, 8)
+
+ # Not implemented: multidimensional sub-views
+ ex = ndarray(list(range(12)), shape=[3,4], flags=ND_WRITABLE)
+ m = memoryview(ex)
+
+ self.assertRaises(NotImplementedError, m.__getitem__, 0)
+ self.assertRaises(NotImplementedError, m.__setitem__, 0, 9)
+ self.assertRaises(NotImplementedError, m.__getitem__, 0)
+
+ def test_memoryview_assign(self):
+
+ # ndim = 0
+ ex = ndarray(12.5, shape=[], format='f', flags=ND_WRITABLE)
+ m = memoryview(ex)
+ m[()] = 22.5
+ self.assertEqual(m[()], 22.5)
+ m[...] = 23.5
+ self.assertEqual(m[()], 23.5)
+ self.assertRaises(TypeError, m.__setitem__, 0, 24.7)
+
+ # read-only
+ ex = ndarray(list(range(7)), shape=[7])
+ m = memoryview(ex)
+ self.assertRaises(TypeError, m.__setitem__, 2, 10)
+
+ # range
+ ex = ndarray(list(range(7)), shape=[7], flags=ND_WRITABLE)
+ m = memoryview(ex)
+
+ self.assertRaises(IndexError, m.__setitem__, 2**64, 9)
+ self.assertRaises(TypeError, m.__setitem__, 2.0, 10)
+ self.assertRaises(TypeError, m.__setitem__, 0.0, 11)
+
+ # out of bounds
+ self.assertRaises(IndexError, m.__setitem__, -8, 20)
+ self.assertRaises(IndexError, m.__setitem__, 8, 25)
+
+ # pack_single() success:
+ for fmt in fmtdict['@']:
+ if fmt == 'c' or fmt == '?':
+ continue
+ ex = ndarray([1,2,3], shape=[3], format=fmt, flags=ND_WRITABLE)
+ m = memoryview(ex)
+ i = randrange(-3, 3)
+ m[i] = 8
+ self.assertEqual(m[i], 8)
+ self.assertEqual(m[i], ex[i])
+
+ ex = ndarray([b'1', b'2', b'3'], shape=[3], format='c',
+ flags=ND_WRITABLE)
+ m = memoryview(ex)
+ m[2] = b'9'
+ self.assertEqual(m[2], b'9')
+
+ ex = ndarray([True, False, True], shape=[3], format='?',
+ flags=ND_WRITABLE)
+ m = memoryview(ex)
+ m[1] = True
+ self.assertEqual(m[1], True)
+
+ # pack_single() exceptions:
+ nd = ndarray([b'x'], shape=[1], format='c', flags=ND_WRITABLE)
+ m = memoryview(nd)
+ self.assertRaises(TypeError, m.__setitem__, 0, 100)
+
+ ex = ndarray(list(range(120)), shape=[1,2,3,4,5], flags=ND_WRITABLE)
+ m1 = memoryview(ex)
+
+ for fmt, _range in fmtdict['@'].items():
+ if (fmt == '?'): # PyObject_IsTrue() accepts anything
+ continue
+ if fmt == 'c': # special case tested above
+ continue
+ m2 = m1.cast(fmt)
+ lo, hi = _range
+ if fmt == 'd' or fmt == 'f':
+ lo, hi = -2**1024, 2**1024
+ if fmt != 'P': # PyLong_AsVoidPtr() accepts negative numbers
+ self.assertRaises(ValueError, m2.__setitem__, 0, lo-1)
+ self.assertRaises(TypeError, m2.__setitem__, 0, "xyz")
+ self.assertRaises(ValueError, m2.__setitem__, 0, hi)
+
+ # invalid item
+ m2 = m1.cast('c')
+ self.assertRaises(ValueError, m2.__setitem__, 0, b'\xff\xff')
+
+ # format not implemented
+ ex = ndarray(list(range(1)), shape=[1], format="xL", flags=ND_WRITABLE)
+ m = memoryview(ex)
+ self.assertRaises(NotImplementedError, m.__setitem__, 0, 1)
+
+ ex = ndarray([b'12345'], shape=[1], format="s", flags=ND_WRITABLE)
+ m = memoryview(ex)
+ self.assertRaises(NotImplementedError, m.__setitem__, 0, 1)
+
+ # Not implemented: multidimensional sub-views
+ ex = ndarray(list(range(12)), shape=[3,4], flags=ND_WRITABLE)
+ m = memoryview(ex)
+
+ self.assertRaises(NotImplementedError, m.__setitem__, 0, [2, 3])
+
+ def test_memoryview_slice(self):
+
+ ex = ndarray(list(range(12)), shape=[12], flags=ND_WRITABLE)
+ m = memoryview(ex)
+
+ # zero step
+ self.assertRaises(ValueError, m.__getitem__, slice(0,2,0))
+ self.assertRaises(ValueError, m.__setitem__, slice(0,2,0),
+ bytearray([1,2]))
+
+ # invalid slice key
+ self.assertRaises(TypeError, m.__getitem__, ())
+
+ # multidimensional slices
+ ex = ndarray(list(range(12)), shape=[12], flags=ND_WRITABLE)
+ m = memoryview(ex)
+
+ self.assertRaises(NotImplementedError, m.__getitem__,
+ (slice(0,2,1), slice(0,2,1)))
+ self.assertRaises(NotImplementedError, m.__setitem__,
+ (slice(0,2,1), slice(0,2,1)), bytearray([1,2]))
+
+ # invalid slice tuple
+ self.assertRaises(TypeError, m.__getitem__, (slice(0,2,1), {}))
+ self.assertRaises(TypeError, m.__setitem__, (slice(0,2,1), {}),
+ bytearray([1,2]))
+
+ # rvalue is not an exporter
+ self.assertRaises(TypeError, m.__setitem__, slice(0,1,1), [1])
+
+ # non-contiguous slice assignment
+ for flags in (0, ND_PIL):
+ ex1 = ndarray(list(range(12)), shape=[12], strides=[-1], offset=11,
+ flags=ND_WRITABLE|flags)
+ ex2 = ndarray(list(range(24)), shape=[12], strides=[2], flags=flags)
+ m1 = memoryview(ex1)
+ m2 = memoryview(ex2)
+
+ ex1[2:5] = ex1[2:5]
+ m1[2:5] = m2[2:5]
+
+ self.assertEqual(m1, ex1)
+ self.assertEqual(m2, ex2)
+
+ ex1[1:3][::-1] = ex2[0:2][::1]
+ m1[1:3][::-1] = m2[0:2][::1]
+
+ self.assertEqual(m1, ex1)
+ self.assertEqual(m2, ex2)
+
+ ex1[4:1:-2][::-1] = ex1[1:4:2][::1]
+ m1[4:1:-2][::-1] = m1[1:4:2][::1]
+
+ self.assertEqual(m1, ex1)
+ self.assertEqual(m2, ex2)
+
+ def test_memoryview_array(self):
+
+ def cmptest(testcase, a, b, m, singleitem):
+ for i, _ in enumerate(a):
+ ai = a[i]
+ mi = m[i]
+ testcase.assertEqual(ai, mi)
+ a[i] = singleitem
+ if singleitem != ai:
+ testcase.assertNotEqual(a, m)
+ testcase.assertNotEqual(a, b)
+ else:
+ testcase.assertEqual(a, m)
+ testcase.assertEqual(a, b)
+ m[i] = singleitem
+ testcase.assertEqual(a, m)
+ testcase.assertEqual(b, m)
+ a[i] = ai
+ m[i] = mi
+
+ for n in range(1, 5):
+ for fmt, items, singleitem in iter_format(n, 'array'):
+ for lslice in genslices(n):
+ for rslice in genslices(n):
+
+ a = array.array(fmt, items)
+ b = array.array(fmt, items)
+ m = memoryview(b)
+
+ self.assertEqual(m, a)
+ self.assertEqual(m.tolist(), a.tolist())
+ self.assertEqual(m.tobytes(), a.tobytes())
+ self.assertEqual(len(m), len(a))
+
+ cmptest(self, a, b, m, singleitem)
+
+ array_err = None
+ have_resize = None
+ try:
+ al = a[lslice]
+ ar = a[rslice]
+ a[lslice] = a[rslice]
+ have_resize = len(al) != len(ar)
+ except Exception as e:
+ array_err = e.__class__
+
+ m_err = None
+ try:
+ m[lslice] = m[rslice]
+ except Exception as e:
+ m_err = e.__class__
+
+ if have_resize: # memoryview cannot change shape
+ self.assertIs(m_err, ValueError)
+ elif m_err or array_err:
+ self.assertIs(m_err, array_err)
+ else:
+ self.assertEqual(m, a)
+ self.assertEqual(m.tolist(), a.tolist())
+ self.assertEqual(m.tobytes(), a.tobytes())
+ cmptest(self, a, b, m, singleitem)
+
+ def test_memoryview_compare(self):
+
+ a = array.array('L', [1, 2, 3])
+ b = array.array('L', [1, 2, 7])
+
+ # Ordering comparisons raise:
+ v = memoryview(a)
+ w = memoryview(b)
+ for attr in ('__lt__', '__le__', '__gt__', '__ge__'):
+ self.assertIs(getattr(v, attr)(w), NotImplemented)
+ self.assertIs(getattr(a, attr)(v), NotImplemented)
+
+ # Released views compare equal to themselves:
+ v = memoryview(a)
+ v.release()
+ self.assertEqual(v, v)
+ self.assertNotEqual(v, a)
+ self.assertNotEqual(a, v)
+
+ v = memoryview(a)
+ w = memoryview(a)
+ w.release()
+ self.assertNotEqual(v, w)
+ self.assertNotEqual(w, v)
+
+ # Operand does not implement the buffer protocol:
+ v = memoryview(a)
+ self.assertNotEqual(v, [1, 2, 3])
+
+ # Different formats:
+ c = array.array('l', [1, 2, 3])
+ v = memoryview(a)
+ self.assertNotEqual(v, c)
+ self.assertNotEqual(c, v)
+
+ # Not implemented formats. Ugly, but inevitable. This is the same as
+ # issue #2531: equality is also used for membership testing and must
+ # return a result.
+ a = ndarray([(1, 1.5), (2, 2.7)], shape=[2], format='ld')
+ v = memoryview(a)
+ self.assertNotEqual(v, a)
+ self.assertNotEqual(a, v)
+
+ a = ndarray([b'12345'], shape=[1], format="s")
+ v = memoryview(a)
+ self.assertNotEqual(v, a)
+ self.assertNotEqual(a, v)
+
+ nd = ndarray([(1,1,1), (2,2,2), (3,3,3)], shape=[3], format='iii')
+ v = memoryview(nd)
+ self.assertNotEqual(v, nd)
+ self.assertNotEqual(nd, v)
+
+ # '@' prefix can be dropped:
+ nd1 = ndarray([1,2,3], shape=[3], format='@i')
+ nd2 = ndarray([1,2,3], shape=[3], format='i')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+ self.assertEqual(v, w)
+ self.assertEqual(w, v)
+ self.assertEqual(v, nd2)
+ self.assertEqual(nd2, v)
+ self.assertEqual(w, nd1)
+ self.assertEqual(nd1, w)
+
+ # ndim = 0
+ nd1 = ndarray(1729, shape=[], format='@L')
+ nd2 = ndarray(1729, shape=[], format='L', flags=ND_WRITABLE)
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+ self.assertEqual(v, w)
+ self.assertEqual(w, v)
+ self.assertEqual(v, nd2)
+ self.assertEqual(nd2, v)
+ self.assertEqual(w, nd1)
+ self.assertEqual(nd1, w)
+
+ self.assertFalse(v.__ne__(w))
+ self.assertFalse(w.__ne__(v))
+
+ w[()] = 1728
+ self.assertNotEqual(v, w)
+ self.assertNotEqual(w, v)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(nd2, v)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(nd1, w)
+
+ self.assertFalse(v.__eq__(w))
+ self.assertFalse(w.__eq__(v))
+
+ nd = ndarray(list(range(12)), shape=[12], flags=ND_WRITABLE|ND_PIL)
+ ex = ndarray(list(range(12)), shape=[12], flags=ND_WRITABLE|ND_PIL)
+ m = memoryview(ex)
+
+ self.assertEqual(m, nd)
+ m[9] = 100
+ self.assertNotEqual(m, nd)
+
+ # ndim = 1: contiguous
+ nd1 = ndarray([-529, 576, -625, 676, -729], shape=[5], format='@h')
+ nd2 = ndarray([-529, 576, -625, 676, 729], shape=[5], format='@h')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # ndim = 1: non-contiguous
+ nd1 = ndarray([-529, -625, -729], shape=[3], format='@h')
+ nd2 = ndarray([-529, 576, -625, 676, -729], shape=[5], format='@h')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd2[::2])
+ self.assertEqual(w[::2], nd1)
+ self.assertEqual(v, w[::2])
+ self.assertEqual(v[::-1], w[::-2])
+
+ # ndim = 1: non-contiguous, suboffsets
+ nd1 = ndarray([-529, -625, -729], shape=[3], format='@h')
+ nd2 = ndarray([-529, 576, -625, 676, -729], shape=[5], format='@h',
+ flags=ND_PIL)
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd2[::2])
+ self.assertEqual(w[::2], nd1)
+ self.assertEqual(v, w[::2])
+ self.assertEqual(v[::-1], w[::-2])
+
+ # ndim = 1: zeros in shape
+ nd1 = ndarray([900, 961], shape=[0], format='@h')
+ nd2 = ndarray([-900, -961], shape=[0], format='@h')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertEqual(v, nd2)
+ self.assertEqual(w, nd1)
+ self.assertEqual(v, w)
+
+ # ndim = 1: zero strides
+ nd1 = ndarray([900, 900, 900, 900], shape=[4], format='@L')
+ nd2 = ndarray([900], shape=[4], strides=[0], format='L')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertEqual(v, nd2)
+ self.assertEqual(w, nd1)
+ self.assertEqual(v, w)
+
+ n = 10
+ for char in fmtdict['@m']:
+ fmt, items, singleitem = randitems(n, 'memoryview', '@', char)
+ for flags in (0, ND_PIL):
+ nd = ndarray(items, shape=[n], format=fmt, flags=flags)
+ m = memoryview(nd)
+ self.assertEqual(m, nd)
+
+ nd = nd[::-3]
+ m = memoryview(nd)
+ self.assertEqual(m, nd)
+
+ ##### ndim > 1: C-contiguous
+ # different values
+ nd1 = ndarray(list(range(-15, 15)), shape=[3, 2, 5], format='@h')
+ nd2 = ndarray(list(range(0, 30)), shape=[3, 2, 5], format='@h')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different shape
+ nd1 = ndarray(list(range(30)), shape=[2, 3, 5], format='L')
+ nd2 = ndarray(list(range(30)), shape=[3, 2, 5], format='L')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different format
+ nd1 = ndarray(list(range(30)), shape=[2, 3, 5], format='L')
+ nd2 = ndarray(list(range(30)), shape=[2, 3, 5], format='l')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ ##### ndim > 1: Fortran contiguous
+ # different values
+ nd1 = ndarray(list(range(-15, 15)), shape=[5, 2, 3], format='@h',
+ flags=ND_FORTRAN)
+ nd2 = ndarray(list(range(0, 30)), shape=[5, 2, 3], format='@h',
+ flags=ND_FORTRAN)
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different shape
+ nd1 = ndarray(list(range(-15, 15)), shape=[2, 3, 5], format='l',
+ flags=ND_FORTRAN)
+ nd2 = ndarray(list(range(-15, 15)), shape=[3, 2, 5], format='l',
+ flags=ND_FORTRAN)
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different format
+ nd1 = ndarray(list(range(30)), shape=[5, 2, 3], format='@h',
+ flags=ND_FORTRAN)
+ nd2 = ndarray(list(range(30)), shape=[5, 2, 3], format='@b',
+ flags=ND_FORTRAN)
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ ##### ndim > 1: mixed C/Fortran contiguous
+ lst1 = list(range(-15, 15))
+ lst2 = transpose(lst1, [3, 2, 5])
+ nd1 = ndarray(lst1, shape=[3, 2, 5], format='@l')
+ nd2 = ndarray(lst2, shape=[3, 2, 5], format='l', flags=ND_FORTRAN)
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertEqual(v, w)
+
+ ##### ndim > 1: non-contiguous
+ # different values
+ ex1 = ndarray(list(range(40)), shape=[5, 8], format='@I')
+ nd1 = ex1[3:1:-1, ::-2]
+ ex2 = ndarray(list(range(40)), shape=[5, 8], format='I')
+ nd2 = ex2[1:3:1, ::-2]
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different shape
+ ex1 = ndarray(list(range(30)), shape=[2, 3, 5], format='b')
+ nd1 = ex1[1:3:, ::-2]
+ nd2 = ndarray(list(range(30)), shape=[3, 2, 5], format='b')
+ nd2 = ex2[1:3:, ::-2]
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different format
+ ex1 = ndarray(list(range(30)), shape=[5, 3, 2], format='i')
+ nd1 = ex1[1:3:, ::-2]
+ nd2 = ndarray(list(range(30)), shape=[5, 3, 2], format='@I')
+ nd2 = ex2[1:3:, ::-2]
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ ##### ndim > 1: zeros in shape
+ nd1 = ndarray(list(range(30)), shape=[0, 3, 2], format='i')
+ nd2 = ndarray(list(range(30)), shape=[5, 0, 2], format='@i')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # ndim > 1: zero strides
+ nd1 = ndarray([900]*80, shape=[4, 5, 4], format='@L')
+ nd2 = ndarray([900], shape=[4, 5, 4], strides=[0, 0, 0], format='L')
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertEqual(v, nd2)
+ self.assertEqual(w, nd1)
+ self.assertEqual(v, w)
+ self.assertEqual(v.tolist(), w.tolist())
+
+ ##### ndim > 1: suboffsets
+ ex1 = ndarray(list(range(40)), shape=[5, 8], format='@I')
+ nd1 = ex1[3:1:-1, ::-2]
+ ex2 = ndarray(list(range(40)), shape=[5, 8], format='I', flags=ND_PIL)
+ nd2 = ex2[1:3:1, ::-2]
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different shape
+ ex1 = ndarray(list(range(30)), shape=[2, 3, 5], format='b', flags=ND_PIL)
+ nd1 = ex1[1:3:, ::-2]
+ nd2 = ndarray(list(range(30)), shape=[3, 2, 5], format='b')
+ nd2 = ex2[1:3:, ::-2]
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # different format
+ ex1 = ndarray(list(range(30)), shape=[5, 3, 2], format='i', flags=ND_PIL)
+ nd1 = ex1[1:3:, ::-2]
+ nd2 = ndarray(list(range(30)), shape=[5, 3, 2], format='@I', flags=ND_PIL)
+ nd2 = ex2[1:3:, ::-2]
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertNotEqual(v, nd2)
+ self.assertNotEqual(w, nd1)
+ self.assertNotEqual(v, w)
+
+ # initialize mixed C/Fortran + suboffsets
+ lst1 = list(range(-15, 15))
+ lst2 = transpose(lst1, [3, 2, 5])
+ nd1 = ndarray(lst1, shape=[3, 2, 5], format='@l', flags=ND_PIL)
+ nd2 = ndarray(lst2, shape=[3, 2, 5], format='l', flags=ND_FORTRAN|ND_PIL)
+ v = memoryview(nd1)
+ w = memoryview(nd2)
+
+ self.assertEqual(v, nd1)
+ self.assertEqual(w, nd2)
+ self.assertEqual(v, w)
+
+ def test_memoryview_check_released(self):
+
+ a = array.array('d', [1.1, 2.2, 3.3])
+
+ m = memoryview(a)
+ m.release()
+
+ # PyMemoryView_FromObject()
+ self.assertRaises(ValueError, memoryview, m)
+ # memoryview.cast()
+ self.assertRaises(ValueError, m.cast, 'c')
+ # getbuffer()
+ self.assertRaises(ValueError, ndarray, m)
+ # memoryview.tolist()
+ self.assertRaises(ValueError, m.tolist)
+ # memoryview.tobytes()
+ self.assertRaises(ValueError, m.tobytes)
+ # sequence
+ self.assertRaises(ValueError, eval, "1.0 in m", locals())
+ # subscript
+ self.assertRaises(ValueError, m.__getitem__, 0)
+ # assignment
+ self.assertRaises(ValueError, m.__setitem__, 0, 1)
+
+ for attr in ('obj', 'nbytes', 'readonly', 'itemsize', 'format', 'ndim',
+ 'shape', 'strides', 'suboffsets', 'c_contiguous',
+ 'f_contiguous', 'contiguous'):
+ self.assertRaises(ValueError, m.__getattribute__, attr)
+
+ # richcompare
+ b = array.array('d', [1.1, 2.2, 3.3])
+ m1 = memoryview(a)
+ m2 = memoryview(b)
+
+ self.assertEqual(m1, m2)
+ m1.release()
+ self.assertNotEqual(m1, m2)
+ self.assertNotEqual(m1, a)
+ self.assertEqual(m1, m1)
+
+ def test_memoryview_tobytes(self):
+ # Many implicit tests are already in self.verify().
+
+ nd = ndarray([-529, 576, -625, 676, -729], shape=[5], format='@h')
+
+ m = memoryview(nd)
+ self.assertEqual(m.tobytes(), nd.tobytes())
+
+ def test_memoryview_get_contiguous(self):
+ # Many implicit tests are already in self.verify().
+
+ # no buffer interface
+ self.assertRaises(TypeError, get_contiguous, {}, PyBUF_READ, 'F')
+
+ # writable request to read-only object
+ self.assertRaises(BufferError, get_contiguous, b'x', PyBUF_WRITE, 'C')
+
+ # writable request to non-contiguous object
+ nd = ndarray([1, 2, 3], shape=[2], strides=[2])
+ self.assertRaises(BufferError, get_contiguous, nd, PyBUF_WRITE, 'A')
+
+ # scalar, read-only request from read-only exporter
+ nd = ndarray(9, shape=(), format="L")
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(m, nd)
+ self.assertEqual(m[()], 9)
+
+ # scalar, read-only request from writable exporter
+ nd = ndarray(9, shape=(), format="L", flags=ND_WRITABLE)
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(m, nd)
+ self.assertEqual(m[()], 9)
+
+ # scalar, writable request
+ for order in ['C', 'F', 'A']:
+ nd[()] = 9
+ m = get_contiguous(nd, PyBUF_WRITE, order)
+ self.assertEqual(m, nd)
+ self.assertEqual(m[()], 9)
+
+ m[()] = 10
+ self.assertEqual(m[()], 10)
+ self.assertEqual(nd[()], 10)
+
+ # zeros in shape
+ nd = ndarray([1], shape=[0], format="L", flags=ND_WRITABLE)
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertRaises(IndexError, m.__getitem__, 0)
+ self.assertEqual(m, nd)
+ self.assertEqual(m.tolist(), [])
+
+ nd = ndarray(list(range(8)), shape=[2, 0, 7], format="L",
+ flags=ND_WRITABLE)
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(ndarray(m).tolist(), [[], []])
+
+ # one-dimensional
+ nd = ndarray([1], shape=[1], format="h", flags=ND_WRITABLE)
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_WRITE, order)
+ self.assertEqual(m, nd)
+ self.assertEqual(m.tolist(), nd.tolist())
+
+ nd = ndarray([1, 2, 3], shape=[3], format="b", flags=ND_WRITABLE)
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_WRITE, order)
+ self.assertEqual(m, nd)
+ self.assertEqual(m.tolist(), nd.tolist())
+
+ # one-dimensional, non-contiguous
+ nd = ndarray([1, 2, 3], shape=[2], strides=[2], flags=ND_WRITABLE)
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(m, nd)
+ self.assertEqual(m.tolist(), nd.tolist())
+ self.assertRaises(TypeError, m.__setitem__, 1, 20)
+ self.assertEqual(m[1], 3)
+ self.assertEqual(nd[1], 3)
+
+ nd = nd[::-1]
+ for order in ['C', 'F', 'A']:
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(m, nd)
+ self.assertEqual(m.tolist(), nd.tolist())
+ self.assertRaises(TypeError, m.__setitem__, 1, 20)
+ self.assertEqual(m[1], 1)
+ self.assertEqual(nd[1], 1)
+
+ # multi-dimensional, contiguous input
+ nd = ndarray(list(range(12)), shape=[3, 4], flags=ND_WRITABLE)
+ for order in ['C', 'A']:
+ m = get_contiguous(nd, PyBUF_WRITE, order)
+ self.assertEqual(ndarray(m).tolist(), nd.tolist())
+
+ self.assertRaises(BufferError, get_contiguous, nd, PyBUF_WRITE, 'F')
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(ndarray(m).tolist(), nd.tolist())
+
+ nd = ndarray(list(range(12)), shape=[3, 4],
+ flags=ND_WRITABLE|ND_FORTRAN)
+ for order in ['F', 'A']:
+ m = get_contiguous(nd, PyBUF_WRITE, order)
+ self.assertEqual(ndarray(m).tolist(), nd.tolist())
+
+ self.assertRaises(BufferError, get_contiguous, nd, PyBUF_WRITE, 'C')
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(ndarray(m).tolist(), nd.tolist())
+
+ # multi-dimensional, non-contiguous input
+ nd = ndarray(list(range(12)), shape=[3, 4], flags=ND_WRITABLE|ND_PIL)
+ for order in ['C', 'F', 'A']:
+ self.assertRaises(BufferError, get_contiguous, nd, PyBUF_WRITE,
+ order)
+ m = get_contiguous(nd, PyBUF_READ, order)
+ self.assertEqual(ndarray(m).tolist(), nd.tolist())
+
+ # flags
+ nd = ndarray([1,2,3,4,5], shape=[3], strides=[2])
+ m = get_contiguous(nd, PyBUF_READ, 'C')
+ self.assertTrue(m.c_contiguous)
+
+ def test_memoryview_serializing(self):
+
+ # C-contiguous
+ size = struct.calcsize('i')
+ a = array.array('i', [1,2,3,4,5])
+ m = memoryview(a)
+ buf = io.BytesIO(m)
+ b = bytearray(5*size)
+ buf.readinto(b)
+ self.assertEqual(m.tobytes(), b)
+
+ # C-contiguous, multi-dimensional
+ size = struct.calcsize('L')
+ nd = ndarray(list(range(12)), shape=[2,3,2], format="L")
+ m = memoryview(nd)
+ buf = io.BytesIO(m)
+ b = bytearray(2*3*2*size)
+ buf.readinto(b)
+ self.assertEqual(m.tobytes(), b)
+
+ # Fortran contiguous, multi-dimensional
+ #size = struct.calcsize('L')
+ #nd = ndarray(list(range(12)), shape=[2,3,2], format="L",
+ # flags=ND_FORTRAN)
+ #m = memoryview(nd)
+ #buf = io.BytesIO(m)
+ #b = bytearray(2*3*2*size)
+ #buf.readinto(b)
+ #self.assertEqual(m.tobytes(), b)
+
+ def test_memoryview_hash(self):
+
+ # bytes exporter
+ b = bytes(list(range(12)))
+ m = memoryview(b)
+ self.assertEqual(hash(b), hash(m))
+
+ # C-contiguous
+ mc = m.cast('c', shape=[3,4])
+ self.assertEqual(hash(mc), hash(b))
+
+ # non-contiguous
+ mx = m[::-2]
+ b = bytes(list(range(12))[::-2])
+ self.assertEqual(hash(mx), hash(b))
+
+ # Fortran contiguous
+ nd = ndarray(list(range(30)), shape=[3,2,5], flags=ND_FORTRAN)
+ m = memoryview(nd)
+ self.assertEqual(hash(m), hash(nd))
+
+ # multi-dimensional slice
+ nd = ndarray(list(range(30)), shape=[3,2,5])
+ x = nd[::2, ::, ::-1]
+ m = memoryview(x)
+ self.assertEqual(hash(m), hash(x))
+
+ # multi-dimensional slice with suboffsets
+ nd = ndarray(list(range(30)), shape=[2,5,3], flags=ND_PIL)
+ x = nd[::2, ::, ::-1]
+ m = memoryview(x)
+ self.assertEqual(hash(m), hash(x))
+
+ # non-byte formats
+ nd = ndarray(list(range(12)), shape=[2,2,3], format='L')
+ m = memoryview(nd)
+ self.assertEqual(hash(m), hash(nd.tobytes()))
+
+ nd = ndarray(list(range(-6, 6)), shape=[2,2,3], format='h')
+ m = memoryview(nd)
+ self.assertEqual(hash(m), hash(nd.tobytes()))
+
+ def test_memoryview_release(self):
+
+ # Create re-exporter from getbuffer(memoryview), then release the view.
+ a = bytearray([1,2,3])
+ m = memoryview(a)
+ nd = ndarray(m) # re-exporter
+ self.assertRaises(BufferError, m.release)
+ del nd
+ m.release()
+
+ a = bytearray([1,2,3])
+ m = memoryview(a)
+ nd1 = ndarray(m, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ nd2 = ndarray(nd1, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ self.assertIs(nd2.obj, m)
+ self.assertRaises(BufferError, m.release)
+ del nd1, nd2
+ m.release()
+
+ # chained views
+ a = bytearray([1,2,3])
+ m1 = memoryview(a)
+ m2 = memoryview(m1)
+ nd = ndarray(m2) # re-exporter
+ m1.release()
+ self.assertRaises(BufferError, m2.release)
+ del nd
+ m2.release()
+
+ a = bytearray([1,2,3])
+ m1 = memoryview(a)
+ m2 = memoryview(m1)
+ nd1 = ndarray(m2, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ nd2 = ndarray(nd1, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ self.assertIs(nd2.obj, m2)
+ m1.release()
+ self.assertRaises(BufferError, m2.release)
+ del nd1, nd2
+ m2.release()
+
+ # Allow changing layout while buffers are exported.
+ nd = ndarray([1,2,3], shape=[3], flags=ND_VAREXPORT)
+ m1 = memoryview(nd)
+
+ nd.push([4,5,6,7,8], shape=[5]) # mutate nd
+ m2 = memoryview(nd)
+
+ x = memoryview(m1)
+ self.assertEqual(x.tolist(), m1.tolist())
+
+ y = memoryview(m2)
+ self.assertEqual(y.tolist(), m2.tolist())
+ self.assertEqual(y.tolist(), nd.tolist())
+ m2.release()
+ y.release()
+
+ nd.pop() # pop the current view
+ self.assertEqual(x.tolist(), nd.tolist())
+
+ del nd
+ m1.release()
+ x.release()
+
+ # If multiple memoryviews share the same managed buffer, implicit
+ # release() in the context manager's __exit__() method should still
+ # work.
+ def catch22(b):
+ with memoryview(b) as m2:
+ pass
+
+ x = bytearray(b'123')
+ with memoryview(x) as m1:
+ catch22(m1)
+ self.assertEqual(m1[0], ord(b'1'))
+
+ x = ndarray(list(range(12)), shape=[2,2,3], format='l')
+ y = ndarray(x, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ z = ndarray(y, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ self.assertIs(z.obj, x)
+ with memoryview(z) as m:
+ catch22(m)
+ self.assertEqual(m[0:1].tolist(), [[[0, 1, 2], [3, 4, 5]]])
+
+ # Test garbage collection.
+ for flags in (0, ND_REDIRECT):
+ x = bytearray(b'123')
+ with memoryview(x) as m1:
+ del x
+ y = ndarray(m1, getbuf=PyBUF_FULL_RO, flags=flags)
+ with memoryview(y) as m2:
+ del y
+ z = ndarray(m2, getbuf=PyBUF_FULL_RO, flags=flags)
+ with memoryview(z) as m3:
+ del z
+ catch22(m3)
+ catch22(m2)
+ catch22(m1)
+ self.assertEqual(m1[0], ord(b'1'))
+ self.assertEqual(m2[1], ord(b'2'))
+ self.assertEqual(m3[2], ord(b'3'))
+ del m3
+ del m2
+ del m1
+
+ x = bytearray(b'123')
+ with memoryview(x) as m1:
+ del x
+ y = ndarray(m1, getbuf=PyBUF_FULL_RO, flags=flags)
+ with memoryview(y) as m2:
+ del y
+ z = ndarray(m2, getbuf=PyBUF_FULL_RO, flags=flags)
+ with memoryview(z) as m3:
+ del z
+ catch22(m1)
+ catch22(m2)
+ catch22(m3)
+ self.assertEqual(m1[0], ord(b'1'))
+ self.assertEqual(m2[1], ord(b'2'))
+ self.assertEqual(m3[2], ord(b'3'))
+ del m1, m2, m3
+
+ # memoryview.release() fails if the view has exported buffers.
+ x = bytearray(b'123')
+ with self.assertRaises(BufferError):
+ with memoryview(x) as m:
+ ex = ndarray(m)
+ m[0] == ord(b'1')
+
+ def test_memoryview_redirect(self):
+
+ nd = ndarray([1.0 * x for x in range(12)], shape=[12], format='d')
+ a = array.array('d', [1.0 * x for x in range(12)])
+
+ for x in (nd, a):
+ y = ndarray(x, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ z = ndarray(y, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ m = memoryview(z)
+
+ self.assertIs(y.obj, x)
+ self.assertIs(z.obj, x)
+ self.assertIs(m.obj, x)
+
+ self.assertEqual(m, x)
+ self.assertEqual(m, y)
+ self.assertEqual(m, z)
+
+ self.assertEqual(m[1:3], x[1:3])
+ self.assertEqual(m[1:3], y[1:3])
+ self.assertEqual(m[1:3], z[1:3])
+ del y, z
+ self.assertEqual(m[1:3], x[1:3])
+
+ def test_memoryview_from_static_exporter(self):
+
+ fmt = 'B'
+ lst = [0,1,2,3,4,5,6,7,8,9,10,11]
+
+ # exceptions
+ self.assertRaises(TypeError, staticarray, 1, 2, 3)
+
+ # view.obj==x
+ x = staticarray()
+ y = memoryview(x)
+ self.verify(y, obj=x,
+ itemsize=1, fmt=fmt, readonly=1,
+ ndim=1, shape=[12], strides=[1],
+ lst=lst)
+ for i in range(12):
+ self.assertEqual(y[i], i)
+ del x
+ del y
+
+ x = staticarray()
+ y = memoryview(x)
+ del y
+ del x
+
+ x = staticarray()
+ y = ndarray(x, getbuf=PyBUF_FULL_RO)
+ z = ndarray(y, getbuf=PyBUF_FULL_RO)
+ m = memoryview(z)
+ self.assertIs(y.obj, x)
+ self.assertIs(m.obj, z)
+ self.verify(m, obj=z,
+ itemsize=1, fmt=fmt, readonly=1,
+ ndim=1, shape=[12], strides=[1],
+ lst=lst)
+ del x, y, z, m
+
+ x = staticarray()
+ y = ndarray(x, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ z = ndarray(y, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ m = memoryview(z)
+ self.assertIs(y.obj, x)
+ self.assertIs(z.obj, x)
+ self.assertIs(m.obj, x)
+ self.verify(m, obj=x,
+ itemsize=1, fmt=fmt, readonly=1,
+ ndim=1, shape=[12], strides=[1],
+ lst=lst)
+ del x, y, z, m
+
+ # view.obj==NULL
+ x = staticarray(legacy_mode=True)
+ y = memoryview(x)
+ self.verify(y, obj=None,
+ itemsize=1, fmt=fmt, readonly=1,
+ ndim=1, shape=[12], strides=[1],
+ lst=lst)
+ for i in range(12):
+ self.assertEqual(y[i], i)
+ del x
+ del y
+
+ x = staticarray(legacy_mode=True)
+ y = memoryview(x)
+ del y
+ del x
+
+ x = staticarray(legacy_mode=True)
+ y = ndarray(x, getbuf=PyBUF_FULL_RO)
+ z = ndarray(y, getbuf=PyBUF_FULL_RO)
+ m = memoryview(z)
+ self.assertIs(y.obj, None)
+ self.assertIs(m.obj, z)
+ self.verify(m, obj=z,
+ itemsize=1, fmt=fmt, readonly=1,
+ ndim=1, shape=[12], strides=[1],
+ lst=lst)
+ del x, y, z, m
+
+ x = staticarray(legacy_mode=True)
+ y = ndarray(x, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ z = ndarray(y, getbuf=PyBUF_FULL_RO, flags=ND_REDIRECT)
+ m = memoryview(z)
+ # Clearly setting view.obj==NULL is inferior, since it
+ # messes up the redirection chain:
+ self.assertIs(y.obj, None)
+ self.assertIs(z.obj, y)
+ self.assertIs(m.obj, y)
+ self.verify(m, obj=y,
+ itemsize=1, fmt=fmt, readonly=1,
+ ndim=1, shape=[12], strides=[1],
+ lst=lst)
+ del x, y, z, m
+
+ def test_memoryview_getbuffer_undefined(self):
+
+ # getbufferproc does not adhere to the new documentation
+ nd = ndarray([1,2,3], [3], flags=ND_GETBUF_FAIL|ND_GETBUF_UNDEFINED)
+ self.assertRaises(BufferError, memoryview, nd)
+
+ def test_issue_7385(self):
+ x = ndarray([1,2,3], shape=[3], flags=ND_GETBUF_FAIL)
+ self.assertRaises(BufferError, memoryview, x)
+
+
+def test_main():
+ support.run_unittest(TestBufferProtocol)
+
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index 55fb63a..bfa5ee7 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -12,7 +12,7 @@ import types
import builtins
import random
import traceback
-from test.support import fcmp, TESTFN, unlink, run_unittest, check_warnings
+from test.support import TESTFN, unlink, run_unittest, check_warnings
from operator import neg
try:
import pty, signal
@@ -255,8 +255,7 @@ class BuiltinTest(unittest.TestCase):
self.assertEqual(chr(0xff), '\xff')
self.assertRaises(ValueError, chr, 1<<24)
self.assertEqual(chr(sys.maxunicode),
- str(('\\U%08x' % (sys.maxunicode)).encode("ascii"),
- 'unicode-escape'))
+ str('\\U0010ffff'.encode("ascii"), 'unicode-escape'))
self.assertRaises(TypeError, chr)
self.assertEqual(chr(0x0000FFFF), "\U0000FFFF")
self.assertEqual(chr(0x00010000), "\U00010000")
@@ -378,7 +377,15 @@ class BuiltinTest(unittest.TestCase):
f = Foo()
self.assertTrue(dir(f) == ["ga", "kan", "roo"])
- # dir(obj__dir__not_list)
+ # dir(obj__dir__tuple)
+ class Foo(object):
+ def __dir__(self):
+ return ("b", "c", "a")
+ res = dir(Foo())
+ self.assertIsInstance(res, list)
+ self.assertTrue(res == ["a", "b", "c"])
+
+ # dir(obj__dir__not_sequence)
class Foo(object):
def __dir__(self):
return 7
@@ -391,6 +398,8 @@ class BuiltinTest(unittest.TestCase):
except:
self.assertEqual(len(dir(sys.exc_info()[2])), 4)
+ # test that object has a __dir__()
+ self.assertEqual(sorted([].__dir__()), dir([]))
def test_divmod(self):
self.assertEqual(divmod(12, 7), (1, 5))
@@ -400,10 +409,13 @@ class BuiltinTest(unittest.TestCase):
self.assertEqual(divmod(-sys.maxsize-1, -1), (sys.maxsize+1, 0))
- self.assertTrue(not fcmp(divmod(3.25, 1.0), (3.0, 0.25)))
- self.assertTrue(not fcmp(divmod(-3.25, 1.0), (-4.0, 0.75)))
- self.assertTrue(not fcmp(divmod(3.25, -1.0), (-4.0, -0.75)))
- self.assertTrue(not fcmp(divmod(-3.25, -1.0), (3.0, -0.25)))
+ for num, denom, exp_result in [ (3.25, 1.0, (3.0, 0.25)),
+ (-3.25, 1.0, (-4.0, 0.75)),
+ (3.25, -1.0, (-4.0, -0.75)),
+ (-3.25, -1.0, (3.0, -0.25))]:
+ result = divmod(num, denom)
+ self.assertAlmostEqual(result[0], exp_result[0])
+ self.assertAlmostEqual(result[1], exp_result[1])
self.assertRaises(TypeError, divmod)
@@ -1197,6 +1209,9 @@ class BuiltinTest(unittest.TestCase):
self.assertRaises(TypeError, sum, 42)
self.assertRaises(TypeError, sum, ['a', 'b', 'c'])
self.assertRaises(TypeError, sum, ['a', 'b', 'c'], '')
+ self.assertRaises(TypeError, sum, [b'a', b'c'], b'')
+ values = [bytearray(b'a'), bytearray(b'b')]
+ self.assertRaises(TypeError, sum, values, bytearray(b''))
self.assertRaises(TypeError, sum, [[1], [2], [3]])
self.assertRaises(TypeError, sum, [{2:3}])
self.assertRaises(TypeError, sum, [{2:3}]*2, {2:3})
@@ -1358,14 +1373,14 @@ class BuiltinTest(unittest.TestCase):
# --------------------------------------------------------------------
# Issue #7994: object.__format__ with a non-empty format string is
- # pending deprecated
+ # deprecated
def test_deprecated_format_string(obj, fmt_str, should_raise_warning):
with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always", PendingDeprecationWarning)
+ warnings.simplefilter("always", DeprecationWarning)
format(obj, fmt_str)
if should_raise_warning:
self.assertEqual(len(w), 1)
- self.assertIsInstance(w[0].message, PendingDeprecationWarning)
+ self.assertIsInstance(w[0].message, DeprecationWarning)
self.assertIn('object.__format__ with a non-empty format '
'string', str(w[0].message))
else:
@@ -1409,6 +1424,13 @@ class BuiltinTest(unittest.TestCase):
self.assertRaises(ValueError, x.translate, b"1", 1)
self.assertRaises(TypeError, x.translate, b"1"*256, 1)
+ def test_construct_singletons(self):
+ for const in None, Ellipsis, NotImplemented:
+ tp = type(const)
+ self.assertIs(tp(), const)
+ self.assertRaises(TypeError, tp, 1, 2)
+ self.assertRaises(TypeError, tp, a=1, b=2)
+
class TestSorted(unittest.TestCase):
def test_basic(self):
diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py
index 5eab8f5..203fc5c 100644
--- a/Lib/test/test_bytes.py
+++ b/Lib/test/test_bytes.py
@@ -188,24 +188,26 @@ class BaseBytesTest(unittest.TestCase):
def test_encoding(self):
sample = "Hello world\n\u1234\u5678\u9abc"
- for enc in ("utf8", "utf16"):
+ for enc in ("utf-8", "utf-16"):
b = self.type2test(sample, enc)
self.assertEqual(b, self.type2test(sample.encode(enc)))
- self.assertRaises(UnicodeEncodeError, self.type2test, sample, "latin1")
- b = self.type2test(sample, "latin1", "ignore")
+ self.assertRaises(UnicodeEncodeError, self.type2test, sample, "latin-1")
+ b = self.type2test(sample, "latin-1", "ignore")
self.assertEqual(b, self.type2test(sample[:-3], "utf-8"))
def test_decode(self):
sample = "Hello world\n\u1234\u5678\u9abc\def0\def0"
- for enc in ("utf8", "utf16"):
+ for enc in ("utf-8", "utf-16"):
b = self.type2test(sample, enc)
self.assertEqual(b.decode(enc), sample)
sample = "Hello world\n\x80\x81\xfe\xff"
- b = self.type2test(sample, "latin1")
- self.assertRaises(UnicodeDecodeError, b.decode, "utf8")
- self.assertEqual(b.decode("utf8", "ignore"), "Hello world\n")
- self.assertEqual(b.decode(errors="ignore", encoding="utf8"),
+ b = self.type2test(sample, "latin-1")
+ self.assertRaises(UnicodeDecodeError, b.decode, "utf-8")
+ self.assertEqual(b.decode("utf-8", "ignore"), "Hello world\n")
+ self.assertEqual(b.decode(errors="ignore", encoding="utf-8"),
"Hello world\n")
+ # Default encoding is utf-8
+ self.assertEqual(self.type2test(b'\xe2\x98\x83').decode(), '\u2603')
def test_from_int(self):
b = self.type2test(0)
@@ -291,10 +293,27 @@ class BaseBytesTest(unittest.TestCase):
def test_count(self):
b = self.type2test(b'mississippi')
+ i = 105
+ p = 112
+ w = 119
+
self.assertEqual(b.count(b'i'), 4)
self.assertEqual(b.count(b'ss'), 2)
self.assertEqual(b.count(b'w'), 0)
+ self.assertEqual(b.count(i), 4)
+ self.assertEqual(b.count(w), 0)
+
+ self.assertEqual(b.count(b'i', 6), 2)
+ self.assertEqual(b.count(b'p', 6), 2)
+ self.assertEqual(b.count(b'i', 1, 3), 1)
+ self.assertEqual(b.count(b'p', 7, 9), 1)
+
+ self.assertEqual(b.count(i, 6), 2)
+ self.assertEqual(b.count(p, 6), 2)
+ self.assertEqual(b.count(i, 1, 3), 1)
+ self.assertEqual(b.count(p, 7, 9), 1)
+
def test_startswith(self):
b = self.type2test(b'hello')
self.assertFalse(self.type2test().startswith(b"anything"))
@@ -325,35 +344,86 @@ class BaseBytesTest(unittest.TestCase):
def test_find(self):
b = self.type2test(b'mississippi')
+ i = 105
+ w = 119
+
self.assertEqual(b.find(b'ss'), 2)
+ self.assertEqual(b.find(b'w'), -1)
+ self.assertEqual(b.find(b'mississippian'), -1)
+
+ self.assertEqual(b.find(i), 1)
+ self.assertEqual(b.find(w), -1)
+
self.assertEqual(b.find(b'ss', 3), 5)
self.assertEqual(b.find(b'ss', 1, 7), 2)
self.assertEqual(b.find(b'ss', 1, 3), -1)
- self.assertEqual(b.find(b'w'), -1)
- self.assertEqual(b.find(b'mississippian'), -1)
+
+ self.assertEqual(b.find(i, 6), 7)
+ self.assertEqual(b.find(i, 1, 3), 1)
+ self.assertEqual(b.find(w, 1, 3), -1)
+
+ for index in (-1, 256, sys.maxsize + 1):
+ self.assertRaisesRegex(
+ ValueError, r'byte must be in range\(0, 256\)',
+ b.find, index)
def test_rfind(self):
b = self.type2test(b'mississippi')
+ i = 105
+ w = 119
+
self.assertEqual(b.rfind(b'ss'), 5)
- self.assertEqual(b.rfind(b'ss', 3), 5)
- self.assertEqual(b.rfind(b'ss', 0, 6), 2)
self.assertEqual(b.rfind(b'w'), -1)
self.assertEqual(b.rfind(b'mississippian'), -1)
+ self.assertEqual(b.rfind(i), 10)
+ self.assertEqual(b.rfind(w), -1)
+
+ self.assertEqual(b.rfind(b'ss', 3), 5)
+ self.assertEqual(b.rfind(b'ss', 0, 6), 2)
+
+ self.assertEqual(b.rfind(i, 1, 3), 1)
+ self.assertEqual(b.rfind(i, 3, 9), 7)
+ self.assertEqual(b.rfind(w, 1, 3), -1)
+
def test_index(self):
- b = self.type2test(b'world')
- self.assertEqual(b.index(b'w'), 0)
- self.assertEqual(b.index(b'orl'), 1)
- self.assertRaises(ValueError, b.index, b'worm')
- self.assertRaises(ValueError, b.index, b'ldo')
+ b = self.type2test(b'mississippi')
+ i = 105
+ w = 119
+
+ self.assertEqual(b.index(b'ss'), 2)
+ self.assertRaises(ValueError, b.index, b'w')
+ self.assertRaises(ValueError, b.index, b'mississippian')
+
+ self.assertEqual(b.index(i), 1)
+ self.assertRaises(ValueError, b.index, w)
+
+ self.assertEqual(b.index(b'ss', 3), 5)
+ self.assertEqual(b.index(b'ss', 1, 7), 2)
+ self.assertRaises(ValueError, b.index, b'ss', 1, 3)
+
+ self.assertEqual(b.index(i, 6), 7)
+ self.assertEqual(b.index(i, 1, 3), 1)
+ self.assertRaises(ValueError, b.index, w, 1, 3)
def test_rindex(self):
- # XXX could be more rigorous
- b = self.type2test(b'world')
- self.assertEqual(b.rindex(b'w'), 0)
- self.assertEqual(b.rindex(b'orl'), 1)
- self.assertRaises(ValueError, b.rindex, b'worm')
- self.assertRaises(ValueError, b.rindex, b'ldo')
+ b = self.type2test(b'mississippi')
+ i = 105
+ w = 119
+
+ self.assertEqual(b.rindex(b'ss'), 5)
+ self.assertRaises(ValueError, b.rindex, b'w')
+ self.assertRaises(ValueError, b.rindex, b'mississippian')
+
+ self.assertEqual(b.rindex(i), 10)
+ self.assertRaises(ValueError, b.rindex, w)
+
+ self.assertEqual(b.rindex(b'ss', 3), 5)
+ self.assertEqual(b.rindex(b'ss', 0, 6), 2)
+
+ self.assertEqual(b.rindex(i, 1, 3), 1)
+ self.assertEqual(b.rindex(i, 3, 9), 7)
+ self.assertRaises(ValueError, b.rindex, w, 1, 3)
def test_replace(self):
b = self.type2test(b'mississippi')
@@ -365,6 +435,14 @@ class BaseBytesTest(unittest.TestCase):
self.assertEqual(b.split(b'i'), [b'm', b'ss', b'ss', b'pp', b''])
self.assertEqual(b.split(b'ss'), [b'mi', b'i', b'ippi'])
self.assertEqual(b.split(b'w'), [b])
+ # with keyword args
+ b = self.type2test(b'a|b|c|d')
+ self.assertEqual(b.split(sep=b'|'), [b'a', b'b', b'c', b'd'])
+ self.assertEqual(b.split(b'|', maxsplit=1), [b'a', b'b|c|d'])
+ self.assertEqual(b.split(sep=b'|', maxsplit=1), [b'a', b'b|c|d'])
+ self.assertEqual(b.split(maxsplit=1, sep=b'|'), [b'a', b'b|c|d'])
+ b = self.type2test(b'a b c d')
+ self.assertEqual(b.split(maxsplit=1), [b'a', b'b c d'])
def test_split_whitespace(self):
for b in (b' arf barf ', b'arf\tbarf', b'arf\nbarf', b'arf\rbarf',
@@ -393,6 +471,14 @@ class BaseBytesTest(unittest.TestCase):
self.assertEqual(b.rsplit(b'i'), [b'm', b'ss', b'ss', b'pp', b''])
self.assertEqual(b.rsplit(b'ss'), [b'mi', b'i', b'ippi'])
self.assertEqual(b.rsplit(b'w'), [b])
+ # with keyword args
+ b = self.type2test(b'a|b|c|d')
+ self.assertEqual(b.rsplit(sep=b'|'), [b'a', b'b', b'c', b'd'])
+ self.assertEqual(b.rsplit(b'|', maxsplit=1), [b'a|b|c', b'd'])
+ self.assertEqual(b.rsplit(sep=b'|', maxsplit=1), [b'a|b|c', b'd'])
+ self.assertEqual(b.rsplit(maxsplit=1, sep=b'|'), [b'a|b|c', b'd'])
+ b = self.type2test(b'a b c d')
+ self.assertEqual(b.rsplit(maxsplit=1), [b'a b c', b'd'])
def test_rsplit_whitespace(self):
for b in (b' arf barf ', b'arf\tbarf', b'arf\nbarf', b'arf\rbarf',
@@ -473,6 +559,27 @@ class BaseBytesTest(unittest.TestCase):
self.assertRaises(TypeError, self.type2test(b'abc').lstrip, 'b')
self.assertRaises(TypeError, self.type2test(b'abc').rstrip, 'b')
+ def test_center(self):
+ # Fill character can be either bytes or bytearray (issue 12380)
+ b = self.type2test(b'abc')
+ for fill_type in (bytes, bytearray):
+ self.assertEqual(b.center(7, fill_type(b'-')),
+ self.type2test(b'--abc--'))
+
+ def test_ljust(self):
+ # Fill character can be either bytes or bytearray (issue 12380)
+ b = self.type2test(b'abc')
+ for fill_type in (bytes, bytearray):
+ self.assertEqual(b.ljust(7, fill_type(b'-')),
+ self.type2test(b'abc----'))
+
+ def test_rjust(self):
+ # Fill character can be either bytes or bytearray (issue 12380)
+ b = self.type2test(b'abc')
+ for fill_type in (bytes, bytearray):
+ self.assertEqual(b.rjust(7, fill_type(b'-')),
+ self.type2test(b'----abc'))
+
def test_ord(self):
b = self.type2test(b'\0A\x7f\x80\xff')
self.assertEqual([ord(b[i:i+1]) for i in range(len(b))],
@@ -529,6 +636,14 @@ class BaseBytesTest(unittest.TestCase):
self.assertEqual(True, b.startswith(h, None, -2))
self.assertEqual(False, b.startswith(x, None, None))
+ def test_integer_arguments_out_of_byte_range(self):
+ b = self.type2test(b'hello')
+
+ for method in (b.count, b.find, b.index, b.rfind, b.rindex):
+ self.assertRaises(ValueError, method, -1)
+ self.assertRaises(ValueError, method, 256)
+ self.assertRaises(ValueError, method, 9999)
+
def test_find_etc_raise_correct_error_messages(self):
# issue 11828
b = self.type2test(b'hello')
@@ -634,6 +749,39 @@ class ByteArrayTest(BaseBytesTest):
b.reverse()
self.assertFalse(b)
+ def test_clear(self):
+ b = bytearray(b'python')
+ b.clear()
+ self.assertEqual(b, b'')
+
+ b = bytearray(b'')
+ b.clear()
+ self.assertEqual(b, b'')
+
+ b = bytearray(b'')
+ b.append(ord('r'))
+ b.clear()
+ b.append(ord('p'))
+ self.assertEqual(b, b'p')
+
+ def test_copy(self):
+ b = bytearray(b'abc')
+ bb = b.copy()
+ self.assertEqual(bb, b'abc')
+
+ b = bytearray(b'')
+ bb = b.copy()
+ self.assertEqual(bb, b'')
+
+ # test that it's indeed a copy and not a reference
+ b = bytearray(b'abc')
+ bb = b.copy()
+ self.assertEqual(b, bb)
+ self.assertIsNot(b, bb)
+ bb.append(ord('d'))
+ self.assertEqual(bb, b'abcd')
+ self.assertEqual(b, b'abc')
+
def test_regexps(self):
def by(s):
return bytearray(map(ord, s))
@@ -1105,9 +1253,11 @@ class FixedStringTest(test.string_tests.BaseTest):
class ByteArrayAsStringTest(FixedStringTest):
type2test = bytearray
+ contains_bytes = True
class BytesAsStringTest(FixedStringTest):
type2test = bytes
+ contains_bytes = True
class SubclassTest(unittest.TestCase):
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index be35580..cc416ed 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -1,10 +1,11 @@
#!/usr/bin/env python3
from test import support
-from test.support import TESTFN
+from test.support import TESTFN, bigmemtest, _4G
import unittest
from io import BytesIO
import os
+import random
import subprocess
import sys
@@ -21,9 +22,31 @@ has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx")
class BaseTest(unittest.TestCase):
"Base for other testcases."
- TEXT = b'root:x:0:0:root:/root:/bin/bash\nbin:x:1:1:bin:/bin:\ndaemon:x:2:2:daemon:/sbin:\nadm:x:3:4:adm:/var/adm:\nlp:x:4:7:lp:/var/spool/lpd:\nsync:x:5:0:sync:/sbin:/bin/sync\nshutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\nhalt:x:7:0:halt:/sbin:/sbin/halt\nmail:x:8:12:mail:/var/spool/mail:\nnews:x:9:13:news:/var/spool/news:\nuucp:x:10:14:uucp:/var/spool/uucp:\noperator:x:11:0:operator:/root:\ngames:x:12:100:games:/usr/games:\ngopher:x:13:30:gopher:/usr/lib/gopher-data:\nftp:x:14:50:FTP User:/var/ftp:/bin/bash\nnobody:x:65534:65534:Nobody:/home:\npostfix:x:100:101:postfix:/var/spool/postfix:\nniemeyer:x:500:500::/home/niemeyer:/bin/bash\npostgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\nmysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\nwww:x:103:104::/var/www:/bin/false\n'
+ TEXT_LINES = [
+ b'root:x:0:0:root:/root:/bin/bash\n',
+ b'bin:x:1:1:bin:/bin:\n',
+ b'daemon:x:2:2:daemon:/sbin:\n',
+ b'adm:x:3:4:adm:/var/adm:\n',
+ b'lp:x:4:7:lp:/var/spool/lpd:\n',
+ b'sync:x:5:0:sync:/sbin:/bin/sync\n',
+ b'shutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\n',
+ b'halt:x:7:0:halt:/sbin:/sbin/halt\n',
+ b'mail:x:8:12:mail:/var/spool/mail:\n',
+ b'news:x:9:13:news:/var/spool/news:\n',
+ b'uucp:x:10:14:uucp:/var/spool/uucp:\n',
+ b'operator:x:11:0:operator:/root:\n',
+ b'games:x:12:100:games:/usr/games:\n',
+ b'gopher:x:13:30:gopher:/usr/lib/gopher-data:\n',
+ b'ftp:x:14:50:FTP User:/var/ftp:/bin/bash\n',
+ b'nobody:x:65534:65534:Nobody:/home:\n',
+ b'postfix:x:100:101:postfix:/var/spool/postfix:\n',
+ b'niemeyer:x:500:500::/home/niemeyer:/bin/bash\n',
+ b'postgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\n',
+ b'mysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\n',
+ b'www:x:103:104::/var/www:/bin/false\n',
+ ]
+ TEXT = b''.join(TEXT_LINES)
DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
- DATA_CRLF = b'BZh91AY&SY\xaez\xbbN\x00\x01H\xdf\x80\x00\x12@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe0@\x01\xbc\xc6`\x86*\x8d=M\xa9\x9a\x86\xd0L@\x0fI\xa6!\xa1\x13\xc8\x88jdi\x8d@\x03@\x1a\x1a\x0c\x0c\x83 \x00\xc4h2\x19\x01\x82D\x84e\t\xe8\x99\x89\x19\x1ah\x00\r\x1a\x11\xaf\x9b\x0fG\xf5(\x1b\x1f?\t\x12\xcf\xb5\xfc\x95E\x00ps\x89\x12^\xa4\xdd\xa2&\x05(\x87\x04\x98\x89u\xe40%\xb6\x19\'\x8c\xc4\x89\xca\x07\x0e\x1b!\x91UIFU%C\x994!DI\xd2\xfa\xf0\xf1N8W\xde\x13A\xf5\x9cr%?\x9f3;I45A\xd1\x8bT\xb1<l\xba\xcb_\xc00xY\x17r\x17\x88\x08\x08@\xa0\ry@\x10\x04$)`\xf2\xce\x89z\xb0s\xec\x9b.iW\x9d\x81\xb5-+t\x9f\x1a\'\x97dB\xf5x\xb5\xbe.[.\xd7\x0e\x81\xe7\x08\x1cN`\x88\x10\xca\x87\xc3!"\x80\x92R\xa1/\xd1\xc0\xe6mf\xac\xbd\x99\xcca\xb3\x8780>\xa4\xc7\x8d\x1a\\"\xad\xa1\xabyBg\x15\xb9l\x88\x88\x91k"\x94\xa4\xd4\x89\xae*\xa6\x0b\x10\x0c\xd6\xd4m\xe86\xec\xb5j\x8a\x86j\';\xca.\x01I\xf2\xaaJ\xe8\x88\x8cU+t3\xfb\x0c\n\xa33\x13r2\r\x16\xe0\xb3(\xbf\x1d\x83r\xe7M\xf0D\x1365\xd8\x88\xd3\xa4\x92\xcb2\x06\x04\\\xc1\xb0\xea//\xbek&\xd8\xe6+t\xe5\xa1\x13\xada\x16\xder5"w]\xa2i\xb7[\x97R \xe2IT\xcd;Z\x04dk4\xad\x8a\t\xd3\x81z\x10\xf1:^`\xab\x1f\xc5\xdc\x91N\x14$+\x9e\xae\xd3\x80'
if has_cmdline_bunzip2:
def decompress(self, data):
@@ -54,83 +77,135 @@ class BZ2FileTest(BaseTest):
if os.path.isfile(self.filename):
os.unlink(self.filename)
- def createTempFile(self, crlf=0):
+ def createTempFile(self, streams=1):
with open(self.filename, "wb") as f:
- if crlf:
- data = self.DATA_CRLF
- else:
- data = self.DATA
- f.write(data)
+ f.write(self.DATA * streams)
def testRead(self):
- # "Test BZ2File.read()"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.read, None)
self.assertEqual(bz2f.read(), self.TEXT)
+ def testReadMultiStream(self):
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ self.assertRaises(TypeError, bz2f.read, None)
+ self.assertEqual(bz2f.read(), self.TEXT * 5)
+
+ def testReadMonkeyMultiStream(self):
+ # Test BZ2File.read() on a multi-stream archive where a stream
+ # boundary coincides with the end of the raw read buffer.
+ buffer_size = bz2._BUFFER_SIZE
+ bz2._BUFFER_SIZE = len(self.DATA)
+ try:
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ self.assertRaises(TypeError, bz2f.read, None)
+ self.assertEqual(bz2f.read(), self.TEXT * 5)
+ finally:
+ bz2._BUFFER_SIZE = buffer_size
+
def testRead0(self):
- # Test BBZ2File.read(0)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.read, None)
self.assertEqual(bz2f.read(0), b"")
def testReadChunk10(self):
- # "Test BZ2File.read() in chunks of 10 bytes"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
text = b''
- while 1:
+ while True:
str = bz2f.read(10)
if not str:
break
text += str
self.assertEqual(text, self.TEXT)
+ def testReadChunk10MultiStream(self):
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ text = b''
+ while True:
+ str = bz2f.read(10)
+ if not str:
+ break
+ text += str
+ self.assertEqual(text, self.TEXT * 5)
+
def testRead100(self):
- # "Test BZ2File.read(100)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertEqual(bz2f.read(100), self.TEXT[:100])
+ def testPeek(self):
+ self.createTempFile()
+ with BZ2File(self.filename) as bz2f:
+ pdata = bz2f.peek()
+ self.assertNotEqual(len(pdata), 0)
+ self.assertTrue(self.TEXT.startswith(pdata))
+ self.assertEqual(bz2f.read(), self.TEXT)
+
+ def testReadInto(self):
+ self.createTempFile()
+ with BZ2File(self.filename) as bz2f:
+ n = 128
+ b = bytearray(n)
+ self.assertEqual(bz2f.readinto(b), n)
+ self.assertEqual(b, self.TEXT[:n])
+ n = len(self.TEXT) - n
+ b = bytearray(len(self.TEXT))
+ self.assertEqual(bz2f.readinto(b), n)
+ self.assertEqual(b[:n], self.TEXT[-n:])
+
def testReadLine(self):
- # "Test BZ2File.readline()"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.readline, None)
- sio = BytesIO(self.TEXT)
- for line in sio.readlines():
+ for line in self.TEXT_LINES:
+ self.assertEqual(bz2f.readline(), line)
+
+ def testReadLineMultiStream(self):
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ self.assertRaises(TypeError, bz2f.readline, None)
+ for line in self.TEXT_LINES * 5:
self.assertEqual(bz2f.readline(), line)
def testReadLines(self):
- # "Test BZ2File.readlines()"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.readlines, None)
- sio = BytesIO(self.TEXT)
- self.assertEqual(bz2f.readlines(), sio.readlines())
+ self.assertEqual(bz2f.readlines(), self.TEXT_LINES)
+
+ def testReadLinesMultiStream(self):
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ self.assertRaises(TypeError, bz2f.readlines, None)
+ self.assertEqual(bz2f.readlines(), self.TEXT_LINES * 5)
def testIterator(self):
- # "Test iter(BZ2File)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
- sio = BytesIO(self.TEXT)
- self.assertEqual(list(iter(bz2f)), sio.readlines())
+ self.assertEqual(list(iter(bz2f)), self.TEXT_LINES)
+
+ def testIteratorMultiStream(self):
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ self.assertEqual(list(iter(bz2f)), self.TEXT_LINES * 5)
def testClosedIteratorDeadlock(self):
- # "Test that iteration on a closed bz2file releases the lock."
- # http://bugs.python.org/issue3309
+ # Issue #3309: Iteration on a closed BZ2File should release the lock.
self.createTempFile()
bz2f = BZ2File(self.filename)
bz2f.close()
self.assertRaises(ValueError, bz2f.__next__)
- # This call will deadlock of the above .__next__ call failed to
+ # This call will deadlock if the above .__next__ call failed to
# release the lock.
self.assertRaises(ValueError, bz2f.readlines)
def testWrite(self):
- # "Test BZ2File.write()"
with BZ2File(self.filename, "w") as bz2f:
self.assertRaises(TypeError, bz2f.write)
bz2f.write(self.TEXT)
@@ -138,10 +213,9 @@ class BZ2FileTest(BaseTest):
self.assertEqual(self.decompress(f.read()), self.TEXT)
def testWriteChunks10(self):
- # "Test BZ2File.write() with chunks of 10 bytes"
with BZ2File(self.filename, "w") as bz2f:
n = 0
- while 1:
+ while True:
str = self.TEXT[n*10:(n+1)*10]
if not str:
break
@@ -150,13 +224,19 @@ class BZ2FileTest(BaseTest):
with open(self.filename, 'rb') as f:
self.assertEqual(self.decompress(f.read()), self.TEXT)
+ def testWriteNonDefaultCompressLevel(self):
+ expected = bz2.compress(self.TEXT, compresslevel=5)
+ with BZ2File(self.filename, "w", compresslevel=5) as bz2f:
+ bz2f.write(self.TEXT)
+ with open(self.filename, "rb") as f:
+ self.assertEqual(f.read(), expected)
+
def testWriteLines(self):
- # "Test BZ2File.writelines()"
with BZ2File(self.filename, "w") as bz2f:
self.assertRaises(TypeError, bz2f.writelines)
- sio = BytesIO(self.TEXT)
- bz2f.writelines(sio.readlines())
- # patch #1535500
+ bz2f.writelines(self.TEXT_LINES)
+ # Issue #1535500: Calling writelines() on a closed BZ2File
+ # should raise an exception.
self.assertRaises(ValueError, bz2f.writelines, ["a"])
with open(self.filename, 'rb') as f:
self.assertEqual(self.decompress(f.read()), self.TEXT)
@@ -169,39 +249,73 @@ class BZ2FileTest(BaseTest):
self.assertRaises(IOError, bz2f.write, b"a")
self.assertRaises(IOError, bz2f.writelines, [b"a"])
+ def testAppend(self):
+ with BZ2File(self.filename, "w") as bz2f:
+ self.assertRaises(TypeError, bz2f.write)
+ bz2f.write(self.TEXT)
+ with BZ2File(self.filename, "a") as bz2f:
+ self.assertRaises(TypeError, bz2f.write)
+ bz2f.write(self.TEXT)
+ with open(self.filename, 'rb') as f:
+ self.assertEqual(self.decompress(f.read()), self.TEXT * 2)
+
def testSeekForward(self):
- # "Test BZ2File.seek(150, 0)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.seek)
bz2f.seek(150)
self.assertEqual(bz2f.read(), self.TEXT[150:])
+ def testSeekForwardAcrossStreams(self):
+ self.createTempFile(streams=2)
+ with BZ2File(self.filename) as bz2f:
+ self.assertRaises(TypeError, bz2f.seek)
+ bz2f.seek(len(self.TEXT) + 150)
+ self.assertEqual(bz2f.read(), self.TEXT[150:])
+
def testSeekBackwards(self):
- # "Test BZ2File.seek(-150, 1)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.read(500)
bz2f.seek(-150, 1)
self.assertEqual(bz2f.read(), self.TEXT[500-150:])
+ def testSeekBackwardsAcrossStreams(self):
+ self.createTempFile(streams=2)
+ with BZ2File(self.filename) as bz2f:
+ readto = len(self.TEXT) + 100
+ while readto > 0:
+ readto -= len(bz2f.read(readto))
+ bz2f.seek(-150, 1)
+ self.assertEqual(bz2f.read(), self.TEXT[100-150:] + self.TEXT)
+
def testSeekBackwardsFromEnd(self):
- # "Test BZ2File.seek(-150, 2)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(-150, 2)
self.assertEqual(bz2f.read(), self.TEXT[len(self.TEXT)-150:])
+ def testSeekBackwardsFromEndAcrossStreams(self):
+ self.createTempFile(streams=2)
+ with BZ2File(self.filename) as bz2f:
+ bz2f.seek(-1000, 2)
+ self.assertEqual(bz2f.read(), (self.TEXT * 2)[-1000:])
+
def testSeekPostEnd(self):
- # "Test BZ2File.seek(150000)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(150000)
self.assertEqual(bz2f.tell(), len(self.TEXT))
self.assertEqual(bz2f.read(), b"")
+ def testSeekPostEndMultiStream(self):
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ bz2f.seek(150000)
+ self.assertEqual(bz2f.tell(), len(self.TEXT) * 5)
+ self.assertEqual(bz2f.read(), b"")
+
def testSeekPostEndTwice(self):
- # "Test BZ2File.seek(150000) twice"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(150000)
@@ -209,27 +323,109 @@ class BZ2FileTest(BaseTest):
self.assertEqual(bz2f.tell(), len(self.TEXT))
self.assertEqual(bz2f.read(), b"")
+ def testSeekPostEndTwiceMultiStream(self):
+ self.createTempFile(streams=5)
+ with BZ2File(self.filename) as bz2f:
+ bz2f.seek(150000)
+ bz2f.seek(150000)
+ self.assertEqual(bz2f.tell(), len(self.TEXT) * 5)
+ self.assertEqual(bz2f.read(), b"")
+
def testSeekPreStart(self):
- # "Test BZ2File.seek(-150, 0)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(-150)
self.assertEqual(bz2f.tell(), 0)
self.assertEqual(bz2f.read(), self.TEXT)
+ def testSeekPreStartMultiStream(self):
+ self.createTempFile(streams=2)
+ with BZ2File(self.filename) as bz2f:
+ bz2f.seek(-150)
+ self.assertEqual(bz2f.tell(), 0)
+ self.assertEqual(bz2f.read(), self.TEXT * 2)
+
+ def testFileno(self):
+ self.createTempFile()
+ with open(self.filename, 'rb') as rawf:
+ bz2f = BZ2File(fileobj=rawf)
+ try:
+ self.assertEqual(bz2f.fileno(), rawf.fileno())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.fileno)
+
+ def testSeekable(self):
+ bz2f = BZ2File(fileobj=BytesIO(self.DATA))
+ try:
+ self.assertTrue(bz2f.seekable())
+ bz2f.read()
+ self.assertTrue(bz2f.seekable())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.seekable)
+
+ bz2f = BZ2File(fileobj=BytesIO(), mode="w")
+ try:
+ self.assertFalse(bz2f.seekable())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.seekable)
+
+ src = BytesIO(self.DATA)
+ src.seekable = lambda: False
+ bz2f = BZ2File(fileobj=src)
+ try:
+ self.assertFalse(bz2f.seekable())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.seekable)
+
+ def testReadable(self):
+ bz2f = BZ2File(fileobj=BytesIO(self.DATA))
+ try:
+ self.assertTrue(bz2f.readable())
+ bz2f.read()
+ self.assertTrue(bz2f.readable())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.readable)
+
+ bz2f = BZ2File(fileobj=BytesIO(), mode="w")
+ try:
+ self.assertFalse(bz2f.readable())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.readable)
+
+ def testWritable(self):
+ bz2f = BZ2File(fileobj=BytesIO(self.DATA))
+ try:
+ self.assertFalse(bz2f.writable())
+ bz2f.read()
+ self.assertFalse(bz2f.writable())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.writable)
+
+ bz2f = BZ2File(fileobj=BytesIO(), mode="w")
+ try:
+ self.assertTrue(bz2f.writable())
+ finally:
+ bz2f.close()
+ self.assertRaises(ValueError, bz2f.writable)
+
def testOpenDel(self):
- # "Test opening and deleting a file many times"
self.createTempFile()
for i in range(10000):
o = BZ2File(self.filename)
del o
def testOpenNonexistent(self):
- # "Test opening a nonexistent file"
self.assertRaises(IOError, BZ2File, "/non/existent")
- def testBug1191043(self):
- # readlines() for files containing no newline
+ def testReadlinesNoNewline(self):
+ # Issue #1191043: readlines() fails on a file containing no newline.
data = b'BZh91AY&SY\xd9b\x89]\x00\x00\x00\x03\x80\x04\x00\x02\x00\x0c\x00 \x00!\x9ah3M\x13<]\xc9\x14\xe1BCe\x8a%t'
with open(self.filename, "wb") as f:
f.write(data)
@@ -241,7 +437,6 @@ class BZ2FileTest(BaseTest):
self.assertEqual(xlines, [b'Test'])
def testContextProtocol(self):
- # BZ2File supports the context management protocol
f = None
with BZ2File(self.filename, "wb") as f:
f.write(b"xxx")
@@ -264,7 +459,7 @@ class BZ2FileTest(BaseTest):
@unittest.skipUnless(threading, 'Threading required for this test.')
def testThreading(self):
- # Using a BZ2File from several threads doesn't deadlock (issue #7205).
+ # Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
data = b"1" * 2**20
nthreads = 10
with bz2.BZ2File(self.filename, 'wb') as f:
@@ -277,22 +472,83 @@ class BZ2FileTest(BaseTest):
for t in threads:
t.join()
- def testMixedIterationReads(self):
- # Issue #8397: mixed iteration and reads should be forbidden.
- with bz2.BZ2File(self.filename, 'wb') as f:
- # The internal buffer size is hard-wired to 8192 bytes, we must
- # write out more than that for the test to stop half through
- # the buffer.
- f.write(self.TEXT * 100)
- with bz2.BZ2File(self.filename, 'rb') as f:
- next(f)
- self.assertRaises(ValueError, f.read)
- self.assertRaises(ValueError, f.readline)
- self.assertRaises(ValueError, f.readlines)
+ def testWithoutThreading(self):
+ bz2 = support.import_fresh_module("bz2", blocked=("threading",))
+ with bz2.BZ2File(self.filename, "wb") as f:
+ f.write(b"abc")
+ with bz2.BZ2File(self.filename, "rb") as f:
+ self.assertEqual(f.read(), b"abc")
+
+ def testMixedIterationAndReads(self):
+ self.createTempFile()
+ linelen = len(self.TEXT_LINES[0])
+ halflen = linelen // 2
+ with bz2.BZ2File(self.filename) as bz2f:
+ bz2f.read(halflen)
+ self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:])
+ self.assertEqual(bz2f.read(), self.TEXT[linelen:])
+ with bz2.BZ2File(self.filename) as bz2f:
+ bz2f.readline()
+ self.assertEqual(next(bz2f), self.TEXT_LINES[1])
+ self.assertEqual(bz2f.readline(), self.TEXT_LINES[2])
+ with bz2.BZ2File(self.filename) as bz2f:
+ bz2f.readlines()
+ with self.assertRaises(StopIteration):
+ next(bz2f)
+ self.assertEqual(bz2f.readlines(), [])
+
+ def testMultiStreamOrdering(self):
+ # Test the ordering of streams when reading a multi-stream archive.
+ data1 = b"foo" * 1000
+ data2 = b"bar" * 1000
+ with BZ2File(self.filename, "w") as bz2f:
+ bz2f.write(data1)
+ with BZ2File(self.filename, "a") as bz2f:
+ bz2f.write(data2)
+ with BZ2File(self.filename) as bz2f:
+ self.assertEqual(bz2f.read(), data1 + data2)
+
+ # Tests for a BZ2File wrapping another file object:
+
+ def testReadBytesIO(self):
+ with BytesIO(self.DATA) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ self.assertRaises(TypeError, bz2f.read, None)
+ self.assertEqual(bz2f.read(), self.TEXT)
+ self.assertFalse(bio.closed)
+
+ def testPeekBytesIO(self):
+ with BytesIO(self.DATA) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ pdata = bz2f.peek()
+ self.assertNotEqual(len(pdata), 0)
+ self.assertTrue(self.TEXT.startswith(pdata))
+ self.assertEqual(bz2f.read(), self.TEXT)
+
+ def testWriteBytesIO(self):
+ with BytesIO() as bio:
+ with BZ2File(fileobj=bio, mode="w") as bz2f:
+ self.assertRaises(TypeError, bz2f.write)
+ bz2f.write(self.TEXT)
+ self.assertEqual(self.decompress(bio.getvalue()), self.TEXT)
+ self.assertFalse(bio.closed)
+
+ def testSeekForwardBytesIO(self):
+ with BytesIO(self.DATA) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ self.assertRaises(TypeError, bz2f.seek)
+ bz2f.seek(150)
+ self.assertEqual(bz2f.read(), self.TEXT[150:])
+
+ def testSeekBackwardsBytesIO(self):
+ with BytesIO(self.DATA) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ bz2f.read(500)
+ bz2f.seek(-150, 1)
+ self.assertEqual(bz2f.read(), self.TEXT[500-150:])
class BZ2CompressorTest(BaseTest):
def testCompress(self):
- # "Test BZ2Compressor.compress()/flush()"
bz2c = BZ2Compressor()
self.assertRaises(TypeError, bz2c.compress)
data = bz2c.compress(self.TEXT)
@@ -300,11 +556,10 @@ class BZ2CompressorTest(BaseTest):
self.assertEqual(self.decompress(data), self.TEXT)
def testCompressChunks10(self):
- # "Test BZ2Compressor.compress()/flush() with chunks of 10 bytes"
bz2c = BZ2Compressor()
n = 0
data = b''
- while 1:
+ while True:
str = self.TEXT[n*10:(n+1)*10]
if not str:
break
@@ -313,23 +568,38 @@ class BZ2CompressorTest(BaseTest):
data += bz2c.flush()
self.assertEqual(self.decompress(data), self.TEXT)
+ @bigmemtest(size=_4G + 100, memuse=2)
+ def testCompress4G(self, size):
+ # "Test BZ2Compressor.compress()/flush() with >4GiB input"
+ bz2c = BZ2Compressor()
+ data = b"x" * size
+ try:
+ compressed = bz2c.compress(data)
+ compressed += bz2c.flush()
+ finally:
+ data = None # Release memory
+ data = bz2.decompress(compressed)
+ try:
+ self.assertEqual(len(data), size)
+ self.assertEqual(len(data.strip(b"x")), 0)
+ finally:
+ data = None
+
class BZ2DecompressorTest(BaseTest):
def test_Constructor(self):
self.assertRaises(TypeError, BZ2Decompressor, 42)
def testDecompress(self):
- # "Test BZ2Decompressor.decompress()"
bz2d = BZ2Decompressor()
self.assertRaises(TypeError, bz2d.decompress)
text = bz2d.decompress(self.DATA)
self.assertEqual(text, self.TEXT)
def testDecompressChunks10(self):
- # "Test BZ2Decompressor.decompress() with chunks of 10 bytes"
bz2d = BZ2Decompressor()
text = b''
n = 0
- while 1:
+ while True:
str = self.DATA[n*10:(n+1)*10]
if not str:
break
@@ -338,7 +608,6 @@ class BZ2DecompressorTest(BaseTest):
self.assertEqual(text, self.TEXT)
def testDecompressUnusedData(self):
- # "Test BZ2Decompressor.decompress() with unused data"
bz2d = BZ2Decompressor()
unused_data = b"this is unused data"
text = bz2d.decompress(self.DATA+unused_data)
@@ -346,34 +615,49 @@ class BZ2DecompressorTest(BaseTest):
self.assertEqual(bz2d.unused_data, unused_data)
def testEOFError(self):
- # "Calling BZ2Decompressor.decompress() after EOS must raise EOFError"
bz2d = BZ2Decompressor()
text = bz2d.decompress(self.DATA)
self.assertRaises(EOFError, bz2d.decompress, b"anything")
+ @bigmemtest(size=_4G + 100, memuse=3)
+ def testDecompress4G(self, size):
+ # "Test BZ2Decompressor.decompress() with >4GiB input"
+ blocksize = 10 * 1024 * 1024
+ block = random.getrandbits(blocksize * 8).to_bytes(blocksize, 'little')
+ try:
+ data = block * (size // blocksize + 1)
+ compressed = bz2.compress(data)
+ bz2d = BZ2Decompressor()
+ decompressed = bz2d.decompress(compressed)
+ self.assertTrue(decompressed == data)
+ finally:
+ data = None
+ compressed = None
+ decompressed = None
+
class FuncTest(BaseTest):
"Test module functions"
def testCompress(self):
- # "Test compress() function"
data = bz2.compress(self.TEXT)
self.assertEqual(self.decompress(data), self.TEXT)
def testDecompress(self):
- # "Test decompress() function"
text = bz2.decompress(self.DATA)
self.assertEqual(text, self.TEXT)
def testDecompressEmpty(self):
- # "Test decompress() function with empty string"
text = bz2.decompress(b"")
self.assertEqual(text, b"")
def testDecompressIncomplete(self):
- # "Test decompress() function with incomplete data"
self.assertRaises(ValueError, bz2.decompress, self.DATA[:-10])
+ def testDecompressMultiStream(self):
+ text = bz2.decompress(self.DATA * 5)
+ self.assertEqual(text, self.TEXT * 5)
+
def test_main():
support.run_unittest(
BZ2FileTest,
@@ -385,5 +669,3 @@ def test_main():
if __name__ == '__main__':
test_main()
-
-# vim:ts=4:sw=4
diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py
index d3093ac..7180afe 100644
--- a/Lib/test/test_calendar.py
+++ b/Lib/test/test_calendar.py
@@ -177,7 +177,7 @@ class OutputTestCase(unittest.TestCase):
return not c.isspace() and not c.isdigit()
lines = []
- for line in s.splitlines(False):
+ for line in s.splitlines(keepends=False):
# Drop texts, as they are locale dependent
if line and not filter(neitherspacenordigit, line):
lines.append(line)
diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py
index c2c633f..2f94f90 100644
--- a/Lib/test/test_capi.py
+++ b/Lib/test/test_capi.py
@@ -210,18 +210,17 @@ def test_main():
idents = []
def callback():
- idents.append(_thread.get_ident())
+ idents.append(threading.get_ident())
_testcapi._test_thread_state(callback)
a = b = callback
time.sleep(1)
# Check our main thread is in the list exactly 3 times.
- if idents.count(_thread.get_ident()) != 3:
+ if idents.count(threading.get_ident()) != 3:
raise support.TestFailed(
"Couldn't find main thread correctly in the list")
if threading:
- import _thread
import time
TestThreadState()
t = threading.Thread(target=TestThreadState)
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
index 3031fb3..d2510a4 100644
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -160,13 +160,7 @@ class CgiTests(unittest.TestCase):
cgi.logfp = None
cgi.logfile = "/dev/null"
cgi.initlog("%s", "Testing log 3")
- def log_cleanup():
- """Restore the global state of the log vars."""
- cgi.logfile = ''
- cgi.logfp.close()
- cgi.logfp = None
- cgi.log = cgi.initlog
- self.addCleanup(log_cleanup)
+ self.addCleanup(cgi.closelog)
cgi.log("Testing log 4")
def test_fieldstorage_readline(self):
diff --git a/Lib/test/test_cgitb.py b/Lib/test/test_cgitb.py
new file mode 100644
index 0000000..16a4b1a
--- /dev/null
+++ b/Lib/test/test_cgitb.py
@@ -0,0 +1,55 @@
+from test.support import run_unittest
+import unittest
+import sys
+import subprocess
+import cgitb
+
+class TestCgitb(unittest.TestCase):
+
+ def test_fonts(self):
+ text = "Hello Robbie!"
+ self.assertEqual(cgitb.small(text), "<small>{}</small>".format(text))
+ self.assertEqual(cgitb.strong(text), "<strong>{}</strong>".format(text))
+ self.assertEqual(cgitb.grey(text),
+ '<font color="#909090">{}</font>'.format(text))
+
+ def test_blanks(self):
+ self.assertEqual(cgitb.small(""), "")
+ self.assertEqual(cgitb.strong(""), "")
+ self.assertEqual(cgitb.grey(""), "")
+
+ def test_html(self):
+ try:
+ raise ValueError("Hello World")
+ except ValueError as err:
+ # If the html was templated we could do a bit more here.
+ # At least check that we get details on what we just raised.
+ html = cgitb.html(sys.exc_info())
+ self.assertIn("ValueError", html)
+ self.assertIn(str(err), html)
+
+ def test_text(self):
+ try:
+ raise ValueError("Hello World")
+ except ValueError as err:
+ text = cgitb.text(sys.exc_info())
+ self.assertIn("ValueError", text)
+ self.assertIn("Hello World", text)
+
+ def test_hook(self):
+ proc = subprocess.Popen([sys.executable, '-c',
+ ('import cgitb;'
+ 'cgitb.enable();'
+ 'raise ValueError("Hello World")')],
+ stdout=subprocess.PIPE)
+ out = proc.stdout.read().decode(sys.getfilesystemencoding())
+ self.addCleanup(proc.stdout.close)
+ self.assertIn("ValueError", out)
+ self.assertIn("Hello World", out)
+
+
+def test_main():
+ run_unittest(TestCgitb)
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index 1a21281..8c960b1 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -31,12 +31,6 @@ class CmdLineTest(unittest.TestCase):
self.verify_valid_flag('-O')
self.verify_valid_flag('-OO')
- def test_q(self):
- self.verify_valid_flag('-Qold')
- self.verify_valid_flag('-Qnew')
- self.verify_valid_flag('-Qwarn')
- self.verify_valid_flag('-Qwarnall')
-
def test_site_flag(self):
self.verify_valid_flag('-S')
@@ -151,7 +145,7 @@ class CmdLineTest(unittest.TestCase):
@unittest.skipUnless(sys.platform == 'darwin', 'test specific to Mac OS X')
def test_osx_utf8(self):
def check_output(text):
- decoded = text.decode('utf8', 'surrogateescape')
+ decoded = text.decode('utf-8', 'surrogateescape')
expected = ascii(decoded).encode('ascii') + b'\n'
env = os.environ.copy()
@@ -223,7 +217,7 @@ class CmdLineTest(unittest.TestCase):
self.assertIn(path2.encode('ascii'), out)
def test_displayhook_unencodable(self):
- for encoding in ('ascii', 'latin1', 'utf8'):
+ for encoding in ('ascii', 'latin-1', 'utf-8'):
env = os.environ.copy()
env['PYTHONIOENCODING'] = encoding
p = subprocess.Popen(
@@ -282,7 +276,7 @@ class CmdLineTest(unittest.TestCase):
rc, out, err = assert_python_ok('-c', code)
self.assertEqual(b'', out)
self.assertRegex(err.decode('ascii', 'ignore'),
- 'Exception IOError: .* ignored')
+ 'Exception OSError: .* ignored')
def test_closed_stdout(self):
# Issue #13444: if stdout has been explicitly closed, we should
@@ -336,14 +330,14 @@ class CmdLineTest(unittest.TestCase):
hashes = []
for i in range(2):
code = 'print(hash("spam"))'
- rc, out, err = assert_python_ok('-R', '-c', code)
+ rc, out, err = assert_python_ok('-c', code)
self.assertEqual(rc, 0)
hashes.append(out)
self.assertNotEqual(hashes[0], hashes[1])
# Verify that sys.flags contains hash_randomization
code = 'import sys; print("random is", sys.flags.hash_randomization)'
- rc, out, err = assert_python_ok('-R', '-c', code)
+ rc, out, err = assert_python_ok('-c', code)
self.assertEqual(rc, 0)
self.assertIn(b'random is 1', out)
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index e1c7a78..3377a7b 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -16,7 +16,7 @@ cellvars: ('x',)
freevars: ()
nlocals: 2
flags: 3
-consts: ('None', '<code object g>')
+consts: ('None', '<code object g>', "'f.<locals>.g'")
>>> dump(f(4).__code__)
name: g
diff --git a/Lib/test/test_codeccallbacks.py b/Lib/test/test_codeccallbacks.py
index c5b1e25..81bf80d 100644
--- a/Lib/test/test_codeccallbacks.py
+++ b/Lib/test/test_codeccallbacks.py
@@ -1,5 +1,18 @@
-import test.support, unittest
-import sys, codecs, html.entities, unicodedata
+import codecs
+import html.entities
+import sys
+import test.support
+import unicodedata
+import unittest
+import warnings
+
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+ SIZEOF_WCHAR_T = -1
+else:
+ SIZEOF_WCHAR_T = ctypes.sizeof(ctypes.c_wchar)
class PosReturn:
# this can be used for configurable callbacks
@@ -135,22 +148,14 @@ class CodecCallbackTest(unittest.TestCase):
def test_backslashescape(self):
# Does the same as the "unicode-escape" encoding, but with different
# base encodings.
- sin = "a\xac\u1234\u20ac\u8000"
- if sys.maxunicode > 0xffff:
- sin += chr(sys.maxunicode)
- sout = b"a\\xac\\u1234\\u20ac\\u8000"
- if sys.maxunicode > 0xffff:
- sout += bytes("\\U%08x" % sys.maxunicode, "ascii")
+ sin = "a\xac\u1234\u20ac\u8000\U0010ffff"
+ sout = b"a\\xac\\u1234\\u20ac\\u8000\\U0010ffff"
self.assertEqual(sin.encode("ascii", "backslashreplace"), sout)
- sout = b"a\xac\\u1234\\u20ac\\u8000"
- if sys.maxunicode > 0xffff:
- sout += bytes("\\U%08x" % sys.maxunicode, "ascii")
+ sout = b"a\xac\\u1234\\u20ac\\u8000\\U0010ffff"
self.assertEqual(sin.encode("latin-1", "backslashreplace"), sout)
- sout = b"a\xac\\u1234\xa4\\u8000"
- if sys.maxunicode > 0xffff:
- sout += bytes("\\U%08x" % sys.maxunicode, "ascii")
+ sout = b"a\xac\\u1234\xa4\\u8000\\U0010ffff"
self.assertEqual(sin.encode("iso-8859-15", "backslashreplace"), sout)
def test_decoding_callbacks(self):
@@ -200,33 +205,37 @@ class CodecCallbackTest(unittest.TestCase):
self.assertRaises(TypeError, codecs.charmap_encode, sin, "replace", charmap)
def test_decodeunicodeinternal(self):
- self.assertRaises(
- UnicodeDecodeError,
- b"\x00\x00\x00\x00\x00".decode,
- "unicode-internal",
- )
- if sys.maxunicode > 0xffff:
+ with test.support.check_warnings(('unicode_internal codec has been '
+ 'deprecated', DeprecationWarning)):
+ self.assertRaises(
+ UnicodeDecodeError,
+ b"\x00\x00\x00\x00\x00".decode,
+ "unicode-internal",
+ )
+ if SIZEOF_WCHAR_T == 4:
def handler_unicodeinternal(exc):
if not isinstance(exc, UnicodeDecodeError):
raise TypeError("don't know how to handle %r" % exc)
return ("\x01", 1)
- self.assertEqual(
- b"\x00\x00\x00\x00\x00".decode("unicode-internal", "ignore"),
- "\u0000"
- )
+ with test.support.check_warnings(('unicode_internal codec has been '
+ 'deprecated', DeprecationWarning)):
+ self.assertEqual(
+ b"\x00\x00\x00\x00\x00".decode("unicode-internal", "ignore"),
+ "\u0000"
+ )
- self.assertEqual(
- b"\x00\x00\x00\x00\x00".decode("unicode-internal", "replace"),
- "\u0000\ufffd"
- )
+ self.assertEqual(
+ b"\x00\x00\x00\x00\x00".decode("unicode-internal", "replace"),
+ "\u0000\ufffd"
+ )
- codecs.register_error("test.hui", handler_unicodeinternal)
+ codecs.register_error("test.hui", handler_unicodeinternal)
- self.assertEqual(
- b"\x00\x00\x00\x00\x00".decode("unicode-internal", "test.hui"),
- "\u0000\u0001\u0000"
- )
+ self.assertEqual(
+ b"\x00\x00\x00\x00\x00".decode("unicode-internal", "test.hui"),
+ "\u0000\u0001\u0000"
+ )
def test_callbacks(self):
def handler1(exc):
@@ -355,7 +364,7 @@ class CodecCallbackTest(unittest.TestCase):
["ascii", "\uffffx", 0, 1, "ouch"],
"'ascii' codec can't encode character '\\uffff' in position 0: ouch"
)
- if sys.maxunicode > 0xffff:
+ if SIZEOF_WCHAR_T == 4:
self.check_exceptionobjectargs(
UnicodeEncodeError,
["ascii", "\U00010000x", 0, 1, "ouch"],
@@ -390,7 +399,7 @@ class CodecCallbackTest(unittest.TestCase):
["g\uffffrk", 1, 2, "ouch"],
"can't translate character '\\uffff' in position 1: ouch"
)
- if sys.maxunicode > 0xffff:
+ if SIZEOF_WCHAR_T == 4:
self.check_exceptionobjectargs(
UnicodeTranslateError,
["g\U00010000rk", 1, 2, "ouch"],
@@ -577,31 +586,30 @@ class CodecCallbackTest(unittest.TestCase):
UnicodeEncodeError("ascii", "\uffff", 0, 1, "ouch")),
("\\uffff", 1)
)
- # 1 on UCS-4 builds, 2 on UCS-2
- len_wide = len("\U00010000")
- self.assertEqual(
- codecs.backslashreplace_errors(
- UnicodeEncodeError("ascii", "\U00010000",
- 0, len_wide, "ouch")),
- ("\\U00010000", len_wide)
- )
- self.assertEqual(
- codecs.backslashreplace_errors(
- UnicodeEncodeError("ascii", "\U0010ffff",
- 0, len_wide, "ouch")),
- ("\\U0010ffff", len_wide)
- )
- # Lone surrogates (regardless of unicode width)
- self.assertEqual(
- codecs.backslashreplace_errors(
- UnicodeEncodeError("ascii", "\ud800", 0, 1, "ouch")),
- ("\\ud800", 1)
- )
- self.assertEqual(
- codecs.backslashreplace_errors(
- UnicodeEncodeError("ascii", "\udfff", 0, 1, "ouch")),
- ("\\udfff", 1)
- )
+ if SIZEOF_WCHAR_T > 0:
+ self.assertEqual(
+ codecs.backslashreplace_errors(
+ UnicodeEncodeError("ascii", "\U00010000",
+ 0, 1, "ouch")),
+ ("\\U00010000", 1)
+ )
+ self.assertEqual(
+ codecs.backslashreplace_errors(
+ UnicodeEncodeError("ascii", "\U0010ffff",
+ 0, 1, "ouch")),
+ ("\\U0010ffff", 1)
+ )
+ # Lone surrogates (regardless of unicode width)
+ self.assertEqual(
+ codecs.backslashreplace_errors(
+ UnicodeEncodeError("ascii", "\ud800", 0, 1, "ouch")),
+ ("\\ud800", 1)
+ )
+ self.assertEqual(
+ codecs.backslashreplace_errors(
+ UnicodeEncodeError("ascii", "\udfff", 0, 1, "ouch")),
+ ("\\udfff", 1)
+ )
def test_badhandlerresults(self):
results = ( 42, "foo", (1,2,3), ("foo", 1, 3), ("foo", None), ("foo",), ("foo", 1, 3), ("foo", None), ("foo",) )
@@ -622,12 +630,14 @@ class CodecCallbackTest(unittest.TestCase):
("utf-7", b"+x-"),
("unicode-internal", b"\x00"),
):
- self.assertRaises(
- TypeError,
- bytes.decode,
- enc,
- "test.badhandler"
- )
+ with test.support.check_warnings():
+ # unicode-internal has been deprecated
+ self.assertRaises(
+ TypeError,
+ bytes.decode,
+ enc,
+ "test.badhandler"
+ )
def test_lookup(self):
self.assertEqual(codecs.strict_errors, codecs.lookup_error("strict"))
@@ -679,7 +689,7 @@ class CodecCallbackTest(unittest.TestCase):
# Python/codecs.c::PyCodec_XMLCharRefReplaceErrors()
# and inline implementations
v = (1, 5, 10, 50, 100, 500, 1000, 5000, 10000, 50000)
- if sys.maxunicode>=100000:
+ if SIZEOF_WCHAR_T == 4:
v += (100000, 500000, 1000000)
s = "".join([chr(x) for x in v])
codecs.register_error("test.xmlcharrefreplace", codecs.xmlcharrefreplace_errors)
@@ -843,8 +853,12 @@ class CodecCallbackTest(unittest.TestCase):
else:
raise TypeError("don't know how to handle %r" % exc)
codecs.register_error("test.replacing", replacing)
- for (encoding, data) in baddata:
- self.assertRaises(TypeError, data.decode, encoding, "test.replacing")
+
+ with test.support.check_warnings():
+ # unicode-internal has been deprecated
+ for (encoding, data) in baddata:
+ with self.assertRaises(TypeError):
+ data.decode(encoding, "test.replacing")
def mutating(exc):
if isinstance(exc, UnicodeDecodeError):
@@ -855,8 +869,11 @@ class CodecCallbackTest(unittest.TestCase):
codecs.register_error("test.mutating", mutating)
# If the decoder doesn't pick up the modified input the following
# will lead to an endless loop
- for (encoding, data) in baddata:
- self.assertRaises(TypeError, data.decode, encoding, "test.replacing")
+ with test.support.check_warnings():
+ # unicode-internal has been deprecated
+ for (encoding, data) in baddata:
+ with self.assertRaises(TypeError):
+ data.decode(encoding, "test.replacing")
def test_main():
test.support.run_unittest(CodecCallbackTest)
diff --git a/Lib/test/test_codecencodings_cn.py b/Lib/test/test_codecencodings_cn.py
index dca9f10..ee3d165 100644
--- a/Lib/test/test_codecencodings_cn.py
+++ b/Lib/test/test_codecencodings_cn.py
@@ -15,8 +15,8 @@ class Test_GB2312(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x81\x81\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x81\x81\xc1\xc4", "replace", "abc\ufffd\u804a"),
- (b"abc\x81\x81\xc1\xc4\xc8", "replace", "abc\ufffd\u804a\ufffd"),
+ (b"abc\x81\x81\xc1\xc4", "replace", "abc\ufffd\ufffd\u804a"),
+ (b"abc\x81\x81\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u804a\ufffd"),
(b"abc\x81\x81\xc1\xc4", "ignore", "abc\u804a"),
(b"\xc1\x64", "strict", None),
)
@@ -28,8 +28,8 @@ class Test_GBK(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u804a"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u804a\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u804a"),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u804a\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u804a"),
(b"\x83\x34\x83\x31", "strict", None),
("\u30fb", "strict", None),
@@ -42,11 +42,14 @@ class Test_GB18030(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u804a"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u804a\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u804a"),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u804a\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u804a"),
- (b"abc\x84\x39\x84\x39\xc1\xc4", "replace", "abc\ufffd\u804a"),
+ (b"abc\x84\x39\x84\x39\xc1\xc4", "replace", "abc\ufffd9\ufffd9\u804a"),
("\u30fb", "strict", b"\x819\xa79"),
+ (b"abc\x84\x32\x80\x80def", "replace", 'abc\ufffd2\ufffd\ufffddef'),
+ (b"abc\x81\x30\x81\x30def", "strict", 'abc\x80def'),
+ (b"abc\x86\x30\x81\x30def", "replace", 'abc\ufffd0\ufffd0def'),
)
has_iso10646 = True
@@ -74,9 +77,11 @@ class Test_HZ(test_multibytecodec_support.TestBase, unittest.TestCase):
'\u5df1\u6240\u4e0d\u6b32\uff0c\u52ff\u65bd\u65bc\u4eba\u3002'
'Bye.\n'),
# invalid bytes
- (b'ab~cd', 'replace', 'ab\uFFFDd'),
+ (b'ab~cd', 'replace', 'ab\uFFFDcd'),
(b'ab\xffcd', 'replace', 'ab\uFFFDcd'),
(b'ab~{\x81\x81\x41\x44~}cd', 'replace', 'ab\uFFFD\uFFFD\u804Acd'),
+ (b'ab~{\x41\x44~}cd', 'replace', 'ab\u804Acd'),
+ (b"ab~{\x79\x79\x41\x44~}cd", "replace", "ab\ufffd\ufffd\u804acd"),
)
def test_main():
diff --git a/Lib/test/test_codecencodings_hk.py b/Lib/test/test_codecencodings_hk.py
index ccdc0b4..520df43 100644
--- a/Lib/test/test_codecencodings_hk.py
+++ b/Lib/test/test_codecencodings_hk.py
@@ -15,8 +15,8 @@ class Test_Big5HKSCS(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u8b10"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u8b10\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u8b10"),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u8b10\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u8b10"),
)
diff --git a/Lib/test/test_codecencodings_jp.py b/Lib/test/test_codecencodings_jp.py
index f56a373..87e4812 100644
--- a/Lib/test/test_codecencodings_jp.py
+++ b/Lib/test/test_codecencodings_jp.py
@@ -15,50 +15,57 @@ class Test_CP932(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x81\x00\x81\x00\x82\x84", "strict", None),
(b"abc\xf8", "strict", None),
- (b"abc\x81\x00\x82\x84", "replace", "abc\ufffd\uff44"),
- (b"abc\x81\x00\x82\x84\x88", "replace", "abc\ufffd\uff44\ufffd"),
- (b"abc\x81\x00\x82\x84", "ignore", "abc\uff44"),
+ (b"abc\x81\x00\x82\x84", "replace", "abc\ufffd\x00\uff44"),
+ (b"abc\x81\x00\x82\x84\x88", "replace", "abc\ufffd\x00\uff44\ufffd"),
+ (b"abc\x81\x00\x82\x84", "ignore", "abc\x00\uff44"),
+ (b"ab\xEBxy", "replace", "ab\uFFFDxy"),
+ (b"ab\xF0\x39xy", "replace", "ab\uFFFD9xy"),
+ (b"ab\xEA\xF0xy", "replace", 'ab\ufffd\ue038y'),
# sjis vs cp932
(b"\\\x7e", "replace", "\\\x7e"),
(b"\x81\x5f\x81\x61\x81\x7c", "replace", "\uff3c\u2225\uff0d"),
)
+euc_commontests = (
+ # invalid bytes
+ (b"abc\x80\x80\xc1\xc4", "strict", None),
+ (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u7956"),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u7956\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "ignore", "abc\u7956"),
+ (b"abc\xc8", "strict", None),
+ (b"abc\x8f\x83\x83", "replace", "abc\ufffd\ufffd\ufffd"),
+ (b"\x82\xFCxy", "replace", "\ufffd\ufffdxy"),
+ (b"\xc1\x64", "strict", None),
+ (b"\xa1\xc0", "strict", "\uff3c"),
+ (b"\xa1\xc0\\", "strict", "\uff3c\\"),
+ (b"\x8eXY", "replace", "\ufffdXY"),
+)
+
+class Test_EUC_JIS_2004(test_multibytecodec_support.TestBase,
+ unittest.TestCase):
+ encoding = 'euc_jis_2004'
+ tstring = test_multibytecodec_support.load_teststring('euc_jisx0213')
+ codectests = euc_commontests
+ xmlcharnametest = (
+ "\xab\u211c\xbb = \u2329\u1234\u232a",
+ b"\xa9\xa8&real;\xa9\xb2 = &lang;&#4660;&rang;"
+ )
+
class Test_EUC_JISX0213(test_multibytecodec_support.TestBase,
unittest.TestCase):
encoding = 'euc_jisx0213'
tstring = test_multibytecodec_support.load_teststring('euc_jisx0213')
- codectests = (
- # invalid bytes
- (b"abc\x80\x80\xc1\xc4", "strict", None),
- (b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u7956"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u7956\ufffd"),
- (b"abc\x80\x80\xc1\xc4", "ignore", "abc\u7956"),
- (b"abc\x8f\x83\x83", "replace", "abc\ufffd"),
- (b"\xc1\x64", "strict", None),
- (b"\xa1\xc0", "strict", "\uff3c"),
- )
+ codectests = euc_commontests
xmlcharnametest = (
"\xab\u211c\xbb = \u2329\u1234\u232a",
b"\xa9\xa8&real;\xa9\xb2 = &lang;&#4660;&rang;"
)
-eucjp_commontests = (
- (b"abc\x80\x80\xc1\xc4", "strict", None),
- (b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u7956"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u7956\ufffd"),
- (b"abc\x80\x80\xc1\xc4", "ignore", "abc\u7956"),
- (b"abc\x8f\x83\x83", "replace", "abc\ufffd"),
- (b"\xc1\x64", "strict", None),
-)
-
class Test_EUC_JP_COMPAT(test_multibytecodec_support.TestBase,
unittest.TestCase):
encoding = 'euc_jp'
tstring = test_multibytecodec_support.load_teststring('euc_jp')
- codectests = eucjp_commontests + (
- (b"\xa1\xc0\\", "strict", "\uff3c\\"),
+ codectests = euc_commontests + (
("\xa5", "strict", b"\x5c"),
("\u203e", "strict", b"\x7e"),
)
@@ -66,8 +73,6 @@ class Test_EUC_JP_COMPAT(test_multibytecodec_support.TestBase,
shiftjis_commonenctests = (
(b"abc\x80\x80\x82\x84", "strict", None),
(b"abc\xf8", "strict", None),
- (b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\uff44"),
- (b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\uff44\ufffd"),
(b"abc\x80\x80\x82\x84def", "ignore", "abc\uff44def"),
)
@@ -75,20 +80,41 @@ class Test_SJIS_COMPAT(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'shift_jis'
tstring = test_multibytecodec_support.load_teststring('shift_jis')
codectests = shiftjis_commonenctests + (
+ (b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\ufffd\uff44"),
+ (b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\ufffd\uff44\ufffd"),
+
(b"\\\x7e", "strict", "\\\x7e"),
(b"\x81\x5f\x81\x61\x81\x7c", "strict", "\uff3c\u2016\u2212"),
+ (b"abc\x81\x39", "replace", "abc\ufffd9"),
+ (b"abc\xEA\xFC", "replace", "abc\ufffd\ufffd"),
+ (b"abc\xFF\x58", "replace", "abc\ufffdX"),
+ )
+
+class Test_SJIS_2004(test_multibytecodec_support.TestBase, unittest.TestCase):
+ encoding = 'shift_jis_2004'
+ tstring = test_multibytecodec_support.load_teststring('shift_jis')
+ codectests = shiftjis_commonenctests + (
+ (b"\\\x7e", "strict", "\xa5\u203e"),
+ (b"\x81\x5f\x81\x61\x81\x7c", "strict", "\\\u2016\u2212"),
+ (b"abc\xEA\xFC", "strict", "abc\u64bf"),
+ (b"\x81\x39xy", "replace", "\ufffd9xy"),
+ (b"\xFF\x58xy", "replace", "\ufffdXxy"),
+ (b"\x80\x80\x82\x84xy", "replace", "\ufffd\ufffd\uff44xy"),
+ (b"\x80\x80\x82\x84\x88xy", "replace", "\ufffd\ufffd\uff44\u5864y"),
+ (b"\xFC\xFBxy", "replace", '\ufffd\u95b4y'),
+ )
+ xmlcharnametest = (
+ "\xab\u211c\xbb = \u2329\u1234\u232a",
+ b"\x85G&real;\x85Q = &lang;&#4660;&rang;"
)
class Test_SJISX0213(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'shift_jisx0213'
tstring = test_multibytecodec_support.load_teststring('shift_jisx0213')
- codectests = (
- # invalid bytes
- (b"abc\x80\x80\x82\x84", "strict", None),
- (b"abc\xf8", "strict", None),
- (b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\uff44"),
- (b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\uff44\ufffd"),
- (b"abc\x80\x80\x82\x84def", "ignore", "abc\uff44def"),
+ codectests = shiftjis_commonenctests + (
+ (b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\ufffd\uff44"),
+ (b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\ufffd\uff44\ufffd"),
+
# sjis vs cp932
(b"\\\x7e", "replace", "\xa5\u203e"),
(b"\x81\x5f\x81\x61\x81\x7c", "replace", "\x5c\u2016\u2212"),
diff --git a/Lib/test/test_codecencodings_kr.py b/Lib/test/test_codecencodings_kr.py
index de4da7f..4997e83 100644
--- a/Lib/test/test_codecencodings_kr.py
+++ b/Lib/test/test_codecencodings_kr.py
@@ -15,8 +15,8 @@ class Test_CP949(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\uc894"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\uc894\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\uc894"),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\uc894\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\uc894"),
)
@@ -27,8 +27,8 @@ class Test_EUCKR(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\uc894"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\uc894\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "replace", 'abc\ufffd\ufffd\uc894'),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\uc894\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\uc894"),
# composed make-up sequence errors
@@ -40,13 +40,14 @@ class Test_EUCKR(test_multibytecodec_support.TestBase, unittest.TestCase):
(b"\xa4\xd4\xa4\xb6\xa4\xd0\xa4", "strict", None),
(b"\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4", "strict", "\uc4d4"),
(b"\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4x", "strict", "\uc4d4x"),
- (b"a\xa4\xd4\xa4\xb6\xa4", "replace", "a\ufffd"),
+ (b"a\xa4\xd4\xa4\xb6\xa4", "replace", 'a\ufffd'),
(b"\xa4\xd4\xa3\xb6\xa4\xd0\xa4\xd4", "strict", None),
(b"\xa4\xd4\xa4\xb6\xa3\xd0\xa4\xd4", "strict", None),
(b"\xa4\xd4\xa4\xb6\xa4\xd0\xa3\xd4", "strict", None),
- (b"\xa4\xd4\xa4\xff\xa4\xd0\xa4\xd4", "replace", "\ufffd"),
- (b"\xa4\xd4\xa4\xb6\xa4\xff\xa4\xd4", "replace", "\ufffd"),
- (b"\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xff", "replace", "\ufffd"),
+ (b"\xa4\xd4\xa4\xff\xa4\xd0\xa4\xd4", "replace", '\ufffd\u6e21\ufffd\u3160\ufffd'),
+ (b"\xa4\xd4\xa4\xb6\xa4\xff\xa4\xd4", "replace", '\ufffd\u6e21\ub544\ufffd\ufffd'),
+ (b"\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xff", "replace", '\ufffd\u6e21\ub544\u572d\ufffd'),
+ (b"\xa4\xd4\xff\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4", "replace", '\ufffd\ufffd\ufffd\uc4d4'),
(b"\xc1\xc4", "strict", "\uc894"),
)
@@ -57,9 +58,13 @@ class Test_JOHAB(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ucd27"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ucd27\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\ucd27"),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\ucd27\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\ucd27"),
+ (b"\xD8abc", "replace", "\uFFFDabc"),
+ (b"\xD8\xFFabc", "replace", "\uFFFD\uFFFDabc"),
+ (b"\x84bxy", "replace", "\uFFFDbxy"),
+ (b"\x8CBxy", "replace", "\uFFFDBxy"),
)
def test_main():
diff --git a/Lib/test/test_codecencodings_tw.py b/Lib/test/test_codecencodings_tw.py
index 12d3c9f..f2f3c18 100644
--- a/Lib/test/test_codecencodings_tw.py
+++ b/Lib/test/test_codecencodings_tw.py
@@ -15,8 +15,8 @@ class Test_Big5(test_multibytecodec_support.TestBase, unittest.TestCase):
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
- (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u8b10"),
- (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u8b10\ufffd"),
+ (b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u8b10"),
+ (b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u8b10\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u8b10"),
)
diff --git a/Lib/test/test_codecmaps_tw.py b/Lib/test/test_codecmaps_tw.py
index 6db5091..412b9de 100644
--- a/Lib/test/test_codecmaps_tw.py
+++ b/Lib/test/test_codecmaps_tw.py
@@ -23,6 +23,9 @@ class TestCP950Map(test_multibytecodec_support.TestBase_Mapping,
(b'\xa2\xcc', '\u5341'),
(b'\xa2\xce', '\u5345'),
]
+ codectests = (
+ (b"\xFFxy", "replace", "\ufffdxy"),
+ )
def test_main():
support.run_unittest(__name__)
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index 4899a59..5daaa19 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -1,8 +1,25 @@
-from test import support
-import unittest
+import _testcapi
import codecs
+import io
import locale
-import sys, _testcapi, io
+import sys
+import unittest
+import warnings
+
+from test import support
+
+if sys.platform == 'win32':
+ VISTA_OR_LATER = (sys.getwindowsversion().major >= 6)
+else:
+ VISTA_OR_LATER = False
+
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+ SIZEOF_WCHAR_T = -1
+else:
+ SIZEOF_WCHAR_T = ctypes.sizeof(ctypes.c_wchar)
class Queue(object):
"""
@@ -622,8 +639,113 @@ class UTF8Test(ReadTest):
b"abc\xed\xa0\x80def")
self.assertEqual(b"abc\xed\xa0\x80def".decode("utf-8", "surrogatepass"),
"abc\ud800def")
+ self.assertEqual("\U00010fff\uD800".encode("utf-8", "surrogatepass"),
+ b"\xf0\x90\xbf\xbf\xed\xa0\x80")
+ self.assertEqual(b"\xf0\x90\xbf\xbf\xed\xa0\x80".decode("utf-8", "surrogatepass"),
+ "\U00010fff\uD800")
self.assertTrue(codecs.lookup_error("surrogatepass"))
+@unittest.skipUnless(sys.platform == 'win32',
+ 'cp65001 is a Windows-only codec')
+class CP65001Test(ReadTest):
+ encoding = "cp65001"
+
+ def test_encode(self):
+ tests = [
+ ('abc', 'strict', b'abc'),
+ ('\xe9\u20ac', 'strict', b'\xc3\xa9\xe2\x82\xac'),
+ ('\U0010ffff', 'strict', b'\xf4\x8f\xbf\xbf'),
+ ]
+ if VISTA_OR_LATER:
+ tests.extend((
+ ('\udc80', 'strict', None),
+ ('\udc80', 'ignore', b''),
+ ('\udc80', 'replace', b'?'),
+ ('\udc80', 'backslashreplace', b'\\udc80'),
+ ('\udc80', 'surrogatepass', b'\xed\xb2\x80'),
+ ))
+ else:
+ tests.append(('\udc80', 'strict', b'\xed\xb2\x80'))
+ for text, errors, expected in tests:
+ if expected is not None:
+ try:
+ encoded = text.encode('cp65001', errors)
+ except UnicodeEncodeError as err:
+ self.fail('Unable to encode %a to cp65001 with '
+ 'errors=%r: %s' % (text, errors, err))
+ self.assertEqual(encoded, expected,
+ '%a.encode("cp65001", %r)=%a != %a'
+ % (text, errors, encoded, expected))
+ else:
+ self.assertRaises(UnicodeEncodeError,
+ text.encode, "cp65001", errors)
+
+ def test_decode(self):
+ tests = [
+ (b'abc', 'strict', 'abc'),
+ (b'\xc3\xa9\xe2\x82\xac', 'strict', '\xe9\u20ac'),
+ (b'\xf4\x8f\xbf\xbf', 'strict', '\U0010ffff'),
+ (b'\xef\xbf\xbd', 'strict', '\ufffd'),
+ (b'[\xc3\xa9]', 'strict', '[\xe9]'),
+ # invalid bytes
+ (b'[\xff]', 'strict', None),
+ (b'[\xff]', 'ignore', '[]'),
+ (b'[\xff]', 'replace', '[\ufffd]'),
+ (b'[\xff]', 'surrogateescape', '[\udcff]'),
+ ]
+ if VISTA_OR_LATER:
+ tests.extend((
+ (b'[\xed\xb2\x80]', 'strict', None),
+ (b'[\xed\xb2\x80]', 'ignore', '[]'),
+ (b'[\xed\xb2\x80]', 'replace', '[\ufffd\ufffd\ufffd]'),
+ ))
+ else:
+ tests.extend((
+ (b'[\xed\xb2\x80]', 'strict', '[\udc80]'),
+ ))
+ for raw, errors, expected in tests:
+ if expected is not None:
+ try:
+ decoded = raw.decode('cp65001', errors)
+ except UnicodeDecodeError as err:
+ self.fail('Unable to decode %a from cp65001 with '
+ 'errors=%r: %s' % (raw, errors, err))
+ self.assertEqual(decoded, expected,
+ '%a.decode("cp65001", %r)=%a != %a'
+ % (raw, errors, decoded, expected))
+ else:
+ self.assertRaises(UnicodeDecodeError,
+ raw.decode, 'cp65001', errors)
+
+ @unittest.skipUnless(VISTA_OR_LATER, 'require Windows Vista or later')
+ def test_lone_surrogates(self):
+ self.assertRaises(UnicodeEncodeError, "\ud800".encode, "cp65001")
+ self.assertRaises(UnicodeDecodeError, b"\xed\xa0\x80".decode, "cp65001")
+ self.assertEqual("[\uDC80]".encode("cp65001", "backslashreplace"),
+ b'[\\udc80]')
+ self.assertEqual("[\uDC80]".encode("cp65001", "xmlcharrefreplace"),
+ b'[&#56448;]')
+ self.assertEqual("[\uDC80]".encode("cp65001", "surrogateescape"),
+ b'[\x80]')
+ self.assertEqual("[\uDC80]".encode("cp65001", "ignore"),
+ b'[]')
+ self.assertEqual("[\uDC80]".encode("cp65001", "replace"),
+ b'[?]')
+
+ @unittest.skipUnless(VISTA_OR_LATER, 'require Windows Vista or later')
+ def test_surrogatepass_handler(self):
+ self.assertEqual("abc\ud800def".encode("cp65001", "surrogatepass"),
+ b"abc\xed\xa0\x80def")
+ self.assertEqual(b"abc\xed\xa0\x80def".decode("cp65001", "surrogatepass"),
+ "abc\ud800def")
+ self.assertEqual("\U00010fff\uD800".encode("cp65001", "surrogatepass"),
+ b"\xf0\x90\xbf\xbf\xed\xa0\x80")
+ self.assertEqual(b"\xf0\x90\xbf\xbf\xed\xa0\x80".decode("cp65001", "surrogatepass"),
+ "\U00010fff\uD800")
+ self.assertTrue(codecs.lookup_error("surrogatepass"))
+
+
+
class UTF7Test(ReadTest):
encoding = "utf-7"
@@ -884,61 +1006,80 @@ class PunycodeTest(unittest.TestCase):
self.assertEqual(uni, puny.decode("punycode"))
class UnicodeInternalTest(unittest.TestCase):
+ @unittest.skipUnless(SIZEOF_WCHAR_T == 4, 'specific to 32-bit wchar_t')
def test_bug1251300(self):
# Decoding with unicode_internal used to not correctly handle "code
# points" above 0x10ffff on UCS-4 builds.
- if sys.maxunicode > 0xffff:
- ok = [
- (b"\x00\x10\xff\xff", "\U0010ffff"),
- (b"\x00\x00\x01\x01", "\U00000101"),
- (b"", ""),
- ]
- not_ok = [
- b"\x7f\xff\xff\xff",
- b"\x80\x00\x00\x00",
- b"\x81\x00\x00\x00",
- b"\x00",
- b"\x00\x00\x00\x00\x00",
- ]
- for internal, uni in ok:
- if sys.byteorder == "little":
- internal = bytes(reversed(internal))
+ ok = [
+ (b"\x00\x10\xff\xff", "\U0010ffff"),
+ (b"\x00\x00\x01\x01", "\U00000101"),
+ (b"", ""),
+ ]
+ not_ok = [
+ b"\x7f\xff\xff\xff",
+ b"\x80\x00\x00\x00",
+ b"\x81\x00\x00\x00",
+ b"\x00",
+ b"\x00\x00\x00\x00\x00",
+ ]
+ for internal, uni in ok:
+ if sys.byteorder == "little":
+ internal = bytes(reversed(internal))
+ with support.check_warnings():
self.assertEqual(uni, internal.decode("unicode_internal"))
- for internal in not_ok:
- if sys.byteorder == "little":
- internal = bytes(reversed(internal))
+ for internal in not_ok:
+ if sys.byteorder == "little":
+ internal = bytes(reversed(internal))
+ with support.check_warnings(('unicode_internal codec has been '
+ 'deprecated', DeprecationWarning)):
self.assertRaises(UnicodeDecodeError, internal.decode,
- "unicode_internal")
-
+ "unicode_internal")
+ if sys.byteorder == "little":
+ invalid = b"\x00\x00\x11\x00"
+ else:
+ invalid = b"\x00\x11\x00\x00"
+ with support.check_warnings():
+ self.assertRaises(UnicodeDecodeError,
+ invalid.decode, "unicode_internal")
+ with support.check_warnings():
+ self.assertEqual(invalid.decode("unicode_internal", "replace"),
+ '\ufffd')
+
+ @unittest.skipUnless(SIZEOF_WCHAR_T == 4, 'specific to 32-bit wchar_t')
def test_decode_error_attributes(self):
- if sys.maxunicode > 0xffff:
- try:
+ try:
+ with support.check_warnings(('unicode_internal codec has been '
+ 'deprecated', DeprecationWarning)):
b"\x00\x00\x00\x00\x00\x11\x11\x00".decode("unicode_internal")
- except UnicodeDecodeError as ex:
- self.assertEqual("unicode_internal", ex.encoding)
- self.assertEqual(b"\x00\x00\x00\x00\x00\x11\x11\x00", ex.object)
- self.assertEqual(4, ex.start)
- self.assertEqual(8, ex.end)
- else:
- self.fail()
+ except UnicodeDecodeError as ex:
+ self.assertEqual("unicode_internal", ex.encoding)
+ self.assertEqual(b"\x00\x00\x00\x00\x00\x11\x11\x00", ex.object)
+ self.assertEqual(4, ex.start)
+ self.assertEqual(8, ex.end)
+ else:
+ self.fail()
+ @unittest.skipUnless(SIZEOF_WCHAR_T == 4, 'specific to 32-bit wchar_t')
def test_decode_callback(self):
- if sys.maxunicode > 0xffff:
- codecs.register_error("UnicodeInternalTest", codecs.ignore_errors)
- decoder = codecs.getdecoder("unicode_internal")
+ codecs.register_error("UnicodeInternalTest", codecs.ignore_errors)
+ decoder = codecs.getdecoder("unicode_internal")
+ with support.check_warnings(('unicode_internal codec has been '
+ 'deprecated', DeprecationWarning)):
ab = "ab".encode("unicode_internal").decode()
ignored = decoder(bytes("%s\x22\x22\x22\x22%s" % (ab[:4], ab[4:]),
"ascii"),
"UnicodeInternalTest")
- self.assertEqual(("ab", 12), ignored)
+ self.assertEqual(("ab", 12), ignored)
def test_encode_length(self):
- # Issue 3739
- encoder = codecs.getencoder("unicode_internal")
- self.assertEqual(encoder("a")[1], 1)
- self.assertEqual(encoder("\xe9\u0142")[1], 2)
+ with support.check_warnings(('unicode_internal codec has been '
+ 'deprecated', DeprecationWarning)):
+ # Issue 3739
+ encoder = codecs.getencoder("unicode_internal")
+ self.assertEqual(encoder("a")[1], 1)
+ self.assertEqual(encoder("\xe9\u0142")[1], 2)
- self.assertEqual(codecs.escape_encode(br'\x00')[1], 4)
+ self.assertEqual(codecs.escape_encode(br'\x00')[1], 4)
# From http://www.gnu.org/software/libidn/draft-josefsson-idn-test-vectors.html
nameprep_tests = [
@@ -1262,7 +1403,7 @@ class EncodedFileTest(unittest.TestCase):
self.assertEqual(ef.read(), b'\\\xd5\n\x00\x00\xae')
f = io.BytesIO()
- ef = codecs.EncodedFile(f, 'utf-8', 'latin1')
+ ef = codecs.EncodedFile(f, 'utf-8', 'latin-1')
ef.write(b'\xc3\xbc')
self.assertEqual(f.getvalue(), b'\xfc')
@@ -1394,10 +1535,13 @@ class BasicUnicodeTest(unittest.TestCase, MixInCheckStateHandling):
elif encoding == "latin_1":
name = "latin_1"
self.assertEqual(encoding.replace("_", "-"), name.replace("_", "-"))
- (b, size) = codecs.getencoder(encoding)(s)
- self.assertEqual(size, len(s), "%r != %r (encoding=%r)" % (size, len(s), encoding))
- (chars, size) = codecs.getdecoder(encoding)(b)
- self.assertEqual(chars, s, "%r != %r (encoding=%r)" % (chars, s, encoding))
+
+ with support.check_warnings():
+ # unicode-internal has been deprecated
+ (b, size) = codecs.getencoder(encoding)(s)
+ self.assertEqual(size, len(s), "%r != %r (encoding=%r)" % (size, len(s), encoding))
+ (chars, size) = codecs.getdecoder(encoding)(b)
+ self.assertEqual(chars, s, "%r != %r (encoding=%r)" % (chars, s, encoding))
if encoding not in broken_unicode_with_streams:
# check stream reader/writer
@@ -1501,7 +1645,9 @@ class BasicUnicodeTest(unittest.TestCase, MixInCheckStateHandling):
def test_bad_encode_args(self):
for encoding in all_unicode_encodings:
encoder = codecs.getencoder(encoding)
- self.assertRaises(TypeError, encoder)
+ with support.check_warnings():
+ # unicode-internal has been deprecated
+ self.assertRaises(TypeError, encoder)
def test_encoding_map_type_initialized(self):
from encodings import cp1140
@@ -1593,6 +1739,12 @@ class TypesTest(unittest.TestCase):
self.assertEqual(codecs.raw_unicode_escape_decode(r"\u1234"), ("\u1234", 6))
self.assertEqual(codecs.raw_unicode_escape_decode(br"\u1234"), ("\u1234", 6))
+ self.assertRaises(UnicodeDecodeError, codecs.unicode_escape_decode, br"\U00110000")
+ self.assertEqual(codecs.unicode_escape_decode(r"\U00110000", "replace"), ("\ufffd", 10))
+
+ self.assertRaises(UnicodeDecodeError, codecs.raw_unicode_escape_decode, br"\U00110000")
+ self.assertEqual(codecs.raw_unicode_escape_decode(r"\U00110000", "replace"), ("\ufffd", 10))
+
class SurrogateEscapeTest(unittest.TestCase):
def test_utf8(self):
@@ -1623,7 +1775,7 @@ class SurrogateEscapeTest(unittest.TestCase):
def test_latin1(self):
# Issue6373
- self.assertEqual("\udce4\udceb\udcef\udcf6\udcfc".encode("latin1", "surrogateescape"),
+ self.assertEqual("\udce4\udceb\udcef\udcf6\udcfc".encode("latin-1", "surrogateescape"),
b"\xe4\xeb\xef\xf6\xfc")
@@ -1732,6 +1884,155 @@ class TransformCodecTest(unittest.TestCase):
self.assertEqual(sout, b"\x80")
+@unittest.skipUnless(sys.platform == 'win32',
+ 'code pages are specific to Windows')
+class CodePageTest(unittest.TestCase):
+ # CP_UTF8 is already tested by CP65001Test
+ CP_UTF8 = 65001
+
+ def test_invalid_code_page(self):
+ self.assertRaises(ValueError, codecs.code_page_encode, -1, 'a')
+ self.assertRaises(ValueError, codecs.code_page_decode, -1, b'a')
+ self.assertRaises(WindowsError, codecs.code_page_encode, 123, 'a')
+ self.assertRaises(WindowsError, codecs.code_page_decode, 123, b'a')
+
+ def test_code_page_name(self):
+ self.assertRaisesRegex(UnicodeEncodeError, 'cp932',
+ codecs.code_page_encode, 932, '\xff')
+ self.assertRaisesRegex(UnicodeDecodeError, 'cp932',
+ codecs.code_page_decode, 932, b'\x81\x00')
+ self.assertRaisesRegex(UnicodeDecodeError, 'CP_UTF8',
+ codecs.code_page_decode, self.CP_UTF8, b'\xff')
+
+ def check_decode(self, cp, tests):
+ for raw, errors, expected in tests:
+ if expected is not None:
+ try:
+ decoded = codecs.code_page_decode(cp, raw, errors)
+ except UnicodeDecodeError as err:
+ self.fail('Unable to decode %a from "cp%s" with '
+ 'errors=%r: %s' % (raw, cp, errors, err))
+ self.assertEqual(decoded[0], expected,
+ '%a.decode("cp%s", %r)=%a != %a'
+ % (raw, cp, errors, decoded[0], expected))
+ # assert 0 <= decoded[1] <= len(raw)
+ self.assertGreaterEqual(decoded[1], 0)
+ self.assertLessEqual(decoded[1], len(raw))
+ else:
+ self.assertRaises(UnicodeDecodeError,
+ codecs.code_page_decode, cp, raw, errors)
+
+ def check_encode(self, cp, tests):
+ for text, errors, expected in tests:
+ if expected is not None:
+ try:
+ encoded = codecs.code_page_encode(cp, text, errors)
+ except UnicodeEncodeError as err:
+ self.fail('Unable to encode %a to "cp%s" with '
+ 'errors=%r: %s' % (text, cp, errors, err))
+ self.assertEqual(encoded[0], expected,
+ '%a.encode("cp%s", %r)=%a != %a'
+ % (text, cp, errors, encoded[0], expected))
+ self.assertEqual(encoded[1], len(text))
+ else:
+ self.assertRaises(UnicodeEncodeError,
+ codecs.code_page_encode, cp, text, errors)
+
+ def test_cp932(self):
+ self.check_encode(932, (
+ ('abc', 'strict', b'abc'),
+ ('\uff44\u9a3e', 'strict', b'\x82\x84\xe9\x80'),
+ # test error handlers
+ ('\xff', 'strict', None),
+ ('[\xff]', 'ignore', b'[]'),
+ ('[\xff]', 'replace', b'[y]'),
+ ('[\u20ac]', 'replace', b'[?]'),
+ ('[\xff]', 'backslashreplace', b'[\\xff]'),
+ ('[\xff]', 'xmlcharrefreplace', b'[&#255;]'),
+ ))
+ self.check_decode(932, (
+ (b'abc', 'strict', 'abc'),
+ (b'\x82\x84\xe9\x80', 'strict', '\uff44\u9a3e'),
+ # invalid bytes
+ (b'[\xff]', 'strict', None),
+ (b'[\xff]', 'ignore', '[]'),
+ (b'[\xff]', 'replace', '[\ufffd]'),
+ (b'[\xff]', 'surrogateescape', '[\udcff]'),
+ (b'\x81\x00abc', 'strict', None),
+ (b'\x81\x00abc', 'ignore', '\x00abc'),
+ (b'\x81\x00abc', 'replace', '\ufffd\x00abc'),
+ ))
+
+ def test_cp1252(self):
+ self.check_encode(1252, (
+ ('abc', 'strict', b'abc'),
+ ('\xe9\u20ac', 'strict', b'\xe9\x80'),
+ ('\xff', 'strict', b'\xff'),
+ ('\u0141', 'strict', None),
+ ('\u0141', 'ignore', b''),
+ ('\u0141', 'replace', b'L'),
+ ))
+ self.check_decode(1252, (
+ (b'abc', 'strict', 'abc'),
+ (b'\xe9\x80', 'strict', '\xe9\u20ac'),
+ (b'\xff', 'strict', '\xff'),
+ ))
+
+ def test_cp_utf7(self):
+ cp = 65000
+ self.check_encode(cp, (
+ ('abc', 'strict', b'abc'),
+ ('\xe9\u20ac', 'strict', b'+AOkgrA-'),
+ ('\U0010ffff', 'strict', b'+2//f/w-'),
+ ('\udc80', 'strict', b'+3IA-'),
+ ('\ufffd', 'strict', b'+//0-'),
+ ))
+ self.check_decode(cp, (
+ (b'abc', 'strict', 'abc'),
+ (b'+AOkgrA-', 'strict', '\xe9\u20ac'),
+ (b'+2//f/w-', 'strict', '\U0010ffff'),
+ (b'+3IA-', 'strict', '\udc80'),
+ (b'+//0-', 'strict', '\ufffd'),
+ # invalid bytes
+ (b'[+/]', 'strict', '[]'),
+ (b'[\xff]', 'strict', '[\xff]'),
+ ))
+
+ def test_multibyte_encoding(self):
+ self.check_decode(932, (
+ (b'\x84\xe9\x80', 'ignore', '\u9a3e'),
+ (b'\x84\xe9\x80', 'replace', '\ufffd\u9a3e'),
+ ))
+ self.check_decode(self.CP_UTF8, (
+ (b'\xff\xf4\x8f\xbf\xbf', 'ignore', '\U0010ffff'),
+ (b'\xff\xf4\x8f\xbf\xbf', 'replace', '\ufffd\U0010ffff'),
+ ))
+ if VISTA_OR_LATER:
+ self.check_encode(self.CP_UTF8, (
+ ('[\U0010ffff\uDC80]', 'ignore', b'[\xf4\x8f\xbf\xbf]'),
+ ('[\U0010ffff\uDC80]', 'replace', b'[\xf4\x8f\xbf\xbf?]'),
+ ))
+
+ def test_incremental(self):
+ decoded = codecs.code_page_decode(932, b'\x82', 'strict', False)
+ self.assertEqual(decoded, ('', 0))
+
+ decoded = codecs.code_page_decode(932,
+ b'\xe9\x80\xe9', 'strict',
+ False)
+ self.assertEqual(decoded, ('\u9a3e', 2))
+
+ decoded = codecs.code_page_decode(932,
+ b'\xe9\x80\xe9\x80', 'strict',
+ False)
+ self.assertEqual(decoded, ('\u9a3e\u9a3e', 4))
+
+ decoded = codecs.code_page_decode(932,
+ b'abc', 'strict',
+ False)
+ self.assertEqual(decoded, ('abc', 3))
+
+
def test_main():
support.run_unittest(
UTF32Test,
@@ -1742,6 +2043,7 @@ def test_main():
UTF16BETest,
UTF8Test,
UTF8SigTest,
+ CP65001Test,
UTF7Test,
UTF16ExTest,
ReadBufferTest,
@@ -1760,6 +2062,7 @@ def test_main():
SurrogateEscapeTest,
BomTest,
TransformCodecTest,
+ CodePageTest,
)
diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py
index 8dc5559..88c3129 100644
--- a/Lib/test/test_collections.py
+++ b/Lib/test/test_collections.py
@@ -1,6 +1,7 @@
"""Unit tests for collections.py."""
import unittest, doctest, operator
+from test.support import TESTFN, forget, unlink
import inspect
from test import support
from collections import namedtuple, Counter, OrderedDict, _count_elements
@@ -10,21 +11,20 @@ from random import randrange, shuffle
import keyword
import re
import sys
-from collections import _ChainMap
-from collections import Hashable, Iterable, Iterator
-from collections import Sized, Container, Callable
-from collections import Set, MutableSet
-from collections import Mapping, MutableMapping, KeysView, ItemsView, UserDict
-from collections import Sequence, MutableSequence
-from collections import ByteString
+from collections import UserDict
+from collections import ChainMap
+from collections.abc import Hashable, Iterable, Iterator
+from collections.abc import Sized, Container, Callable
+from collections.abc import Set, MutableSet
+from collections.abc import Mapping, MutableMapping, KeysView, ItemsView
+from collections.abc import Sequence, MutableSequence
+from collections.abc import ByteString
################################################################################
-### _ChainMap (helper class for configparser)
+### ChainMap (helper class for configparser and the string module)
################################################################################
-ChainMap = _ChainMap # rename to keep test code in sync with 3.3 version
-
class TestChainMap(unittest.TestCase):
def test_basics(self):
@@ -128,6 +128,7 @@ class TestNamedTuple(unittest.TestCase):
self.assertEqual(Point.__module__, __name__)
self.assertEqual(Point.__getitem__, tuple.__getitem__)
self.assertEqual(Point._fields, ('x', 'y'))
+ self.assertIn('class Point(tuple)', Point._source)
self.assertRaises(ValueError, namedtuple, 'abc%', 'efg ghi') # type has non-alpha char
self.assertRaises(ValueError, namedtuple, 'class', 'efg ghi') # type has keyword
@@ -327,6 +328,17 @@ class TestNamedTuple(unittest.TestCase):
pass
self.assertEqual(repr(B(1)), 'B(x=1)')
+ def test_source(self):
+ # verify that _source can be run through exec()
+ tmp = namedtuple('NTColor', 'red green blue')
+ globals().pop('NTColor', None) # remove artifacts from other tests
+ exec(tmp._source, globals())
+ self.assertIn('NTColor', globals())
+ c = NTColor(10, 20, 30)
+ self.assertEqual((c.red, c.green, c.blue), (10, 20, 30))
+ self.assertEqual(NTColor._fields, ('red', 'green', 'blue'))
+ globals().pop('NTColor', None) # clean-up after this test
+
################################################################################
### Abstract Base Classes
@@ -729,6 +741,44 @@ class TestCollectionABCs(ABCTestCase):
self.validate_abstract_methods(MutableSequence, '__contains__', '__iter__',
'__len__', '__getitem__', '__setitem__', '__delitem__', 'insert')
+ def test_MutableSequence_mixins(self):
+ # Test the mixins of MutableSequence by creating a miminal concrete
+ # class inherited from it.
+ class MutableSequenceSubclass(MutableSequence):
+ def __init__(self):
+ self.lst = []
+
+ def __setitem__(self, index, value):
+ self.lst[index] = value
+
+ def __getitem__(self, index):
+ return self.lst[index]
+
+ def __len__(self):
+ return len(self.lst)
+
+ def __delitem__(self, index):
+ del self.lst[index]
+
+ def insert(self, index, value):
+ self.lst.insert(index, value)
+
+ mss = MutableSequenceSubclass()
+ mss.append(0)
+ mss.extend((1, 2, 3, 4))
+ self.assertEqual(len(mss), 5)
+ self.assertEqual(mss[3], 3)
+ mss.reverse()
+ self.assertEqual(mss[3], 1)
+ mss.pop()
+ self.assertEqual(len(mss), 4)
+ mss.remove(3)
+ self.assertEqual(len(mss), 3)
+ mss += (10, 20, 30)
+ self.assertEqual(len(mss), 6)
+ self.assertEqual(mss[-1], 30)
+ mss.clear()
+ self.assertEqual(len(mss), 0)
################################################################################
### Counter
@@ -882,6 +932,27 @@ class TestCounter(unittest.TestCase):
set_result = setop(set(p.elements()), set(q.elements()))
self.assertEqual(counter_result, dict.fromkeys(set_result, 1))
+ def test_inplace_operations(self):
+ elements = 'abcd'
+ for i in range(1000):
+ # test random pairs of multisets
+ p = Counter(dict((elem, randrange(-2,4)) for elem in elements))
+ p.update(e=1, f=-1, g=0)
+ q = Counter(dict((elem, randrange(-2,4)) for elem in elements))
+ q.update(h=1, i=-1, j=0)
+ for inplace_op, regular_op in [
+ (Counter.__iadd__, Counter.__add__),
+ (Counter.__isub__, Counter.__sub__),
+ (Counter.__ior__, Counter.__or__),
+ (Counter.__iand__, Counter.__and__),
+ ]:
+ c = p.copy()
+ c_id = id(c)
+ regular_result = regular_op(c, q)
+ inplace_result = inplace_op(c, q)
+ self.assertEqual(inplace_result, regular_result)
+ self.assertEqual(id(inplace_result), c_id)
+
def test_subtract(self):
c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40)
c.subtract(a=1, b=2, c=-3, d=10, e=20, f=30, h=-50)
@@ -893,6 +964,11 @@ class TestCounter(unittest.TestCase):
c.subtract('aaaabbcce')
self.assertEqual(c, Counter(a=-1, b=0, c=-1, d=1, e=-1))
+ def test_unary(self):
+ c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40)
+ self.assertEqual(dict(+c), dict(c=5, d=10, e=15, g=40))
+ self.assertEqual(dict(-c), dict(a=5))
+
def test_repr_nonsortable(self):
c = Counter(a=2, b=None)
r = repr(c)
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
index 58ef297..72342f8 100644
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -1,10 +1,17 @@
import unittest
import sys
import _ast
+import types
from test import support
class TestSpecifics(unittest.TestCase):
+ def compile_single(self, source):
+ compile(source, "<single>", "single")
+
+ def assertInvalidSingle(self, source):
+ self.assertRaises(SyntaxError, self.compile_single, source)
+
def test_no_ending_newline(self):
compile("hi", "<test>", "exec")
compile("hi\r", "<test>", "exec")
@@ -433,6 +440,39 @@ if 1:
ast.body = [_ast.BoolOp()]
self.assertRaises(TypeError, compile, ast, '<ast>', 'exec')
+ @support.cpython_only
+ def test_same_filename_used(self):
+ s = """def f(): pass\ndef g(): pass"""
+ c = compile(s, "myfile", "exec")
+ for obj in c.co_consts:
+ if isinstance(obj, types.CodeType):
+ self.assertIs(obj.co_filename, c.co_filename)
+
+ def test_single_statement(self):
+ self.compile_single("1 + 2")
+ self.compile_single("\n1 + 2")
+ self.compile_single("1 + 2\n")
+ self.compile_single("1 + 2\n\n")
+ self.compile_single("1 + 2\t\t\n")
+ self.compile_single("1 + 2\t\t\n ")
+ self.compile_single("1 + 2 # one plus two")
+ self.compile_single("1; 2")
+ self.compile_single("import sys; sys")
+ self.compile_single("def f():\n pass")
+ self.compile_single("while False:\n pass")
+ self.compile_single("if x:\n f(x)")
+ self.compile_single("if x:\n f(x)\nelse:\n g(x)")
+ self.compile_single("class T:\n pass")
+
+ def test_bad_single_statement(self):
+ self.assertInvalidSingle('1\n2')
+ self.assertInvalidSingle('def f(): pass')
+ self.assertInvalidSingle('a = 13\nb = 187')
+ self.assertInvalidSingle('del x\ndel y')
+ self.assertInvalidSingle('f()\ng()')
+ self.assertInvalidSingle('f()\n# blah\nblah()')
+ self.assertInvalidSingle('f()\nxy # blah\nblah()')
+ self.assertInvalidSingle('x = 5 # comment\nx = 6\n')
def test_main():
support.run_unittest(TestSpecifics)
diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py
index 372da27..04ee246 100644
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -19,7 +19,7 @@ import unittest
from concurrent import futures
from concurrent.futures._base import (
PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future)
-import concurrent.futures.process
+from concurrent.futures.process import BrokenProcessPool
def create_future(state=PENDING, exception=None, result=None):
@@ -34,7 +34,7 @@ PENDING_FUTURE = create_future(state=PENDING)
RUNNING_FUTURE = create_future(state=RUNNING)
CANCELLED_FUTURE = create_future(state=CANCELLED)
CANCELLED_AND_NOTIFIED_FUTURE = create_future(state=CANCELLED_AND_NOTIFIED)
-EXCEPTION_FUTURE = create_future(state=FINISHED, exception=IOError())
+EXCEPTION_FUTURE = create_future(state=FINISHED, exception=OSError())
SUCCESSFUL_FUTURE = create_future(state=FINISHED, result=42)
@@ -160,7 +160,7 @@ class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
processes = self.executor._processes
self.executor.shutdown()
- for p in processes:
+ for p in processes.values():
p.join()
def test_context_manager_shutdown(self):
@@ -169,7 +169,7 @@ class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
self.assertEqual(list(e.map(abs, range(-5, 5))),
[5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
- for p in processes:
+ for p in processes.values():
p.join()
def test_del_shutdown(self):
@@ -180,7 +180,7 @@ class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
del executor
queue_management_thread.join()
- for p in processes:
+ for p in processes.values():
p.join()
class WaitTests(unittest.TestCase):
@@ -266,14 +266,14 @@ class WaitTests(unittest.TestCase):
def test_timeout(self):
future1 = self.executor.submit(mul, 6, 7)
- future2 = self.executor.submit(time.sleep, 3)
+ future2 = self.executor.submit(time.sleep, 6)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2],
- timeout=1.5,
+ timeout=5,
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
@@ -363,8 +363,8 @@ class ExecutorTest(unittest.TestCase):
results = []
try:
for i in self.executor.map(time.sleep,
- [0, 0, 3],
- timeout=1.5):
+ [0, 0, 6],
+ timeout=5):
results.append(i)
except futures.TimeoutError:
pass
@@ -373,13 +373,38 @@ class ExecutorTest(unittest.TestCase):
self.assertEqual([None, None], results)
+ def test_shutdown_race_issue12456(self):
+ # Issue #12456: race condition at shutdown where trying to post a
+ # sentinel in the call queue blocks (the queue is full while processes
+ # have exited).
+ self.executor.map(str, [2] * (self.worker_count + 1))
+ self.executor.shutdown()
+
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
- pass
+ def test_map_submits_without_iteration(self):
+ """Tests verifying issue 11777."""
+ finished = []
+ def record_finished(n):
+ finished.append(n)
+
+ self.executor.map(record_finished, range(10))
+ self.executor.shutdown(wait=True)
+ self.assertCountEqual(finished, range(10))
class ProcessPoolExecutorTest(ProcessPoolMixin, ExecutorTest):
- pass
+ def test_killed_child(self):
+ # When a child process is abruptly terminated, the whole pool gets
+ # "broken".
+ futures = [self.executor.submit(time.sleep, 3)]
+ # Get one of the processes, and terminate (kill) it
+ p = next(iter(self.executor._processes.values()))
+ p.terminate()
+ for fut in futures:
+ self.assertRaises(BrokenProcessPool, fut.result)
+ # Submitting other jobs fails as well.
+ self.assertRaises(BrokenProcessPool, self.executor.submit, pow, 2, 8)
class FutureTests(unittest.TestCase):
@@ -482,7 +507,7 @@ class FutureTests(unittest.TestCase):
'<Future at 0x[0-9a-f]+ state=cancelled>')
self.assertRegex(
repr(EXCEPTION_FUTURE),
- '<Future at 0x[0-9a-f]+ state=finished raised IOError>')
+ '<Future at 0x[0-9a-f]+ state=finished raised OSError>')
self.assertRegex(
repr(SUCCESSFUL_FUTURE),
'<Future at 0x[0-9a-f]+ state=finished returned int>')
@@ -493,7 +518,7 @@ class FutureTests(unittest.TestCase):
f2 = create_future(state=RUNNING)
f3 = create_future(state=CANCELLED)
f4 = create_future(state=CANCELLED_AND_NOTIFIED)
- f5 = create_future(state=FINISHED, exception=IOError())
+ f5 = create_future(state=FINISHED, exception=OSError())
f6 = create_future(state=FINISHED, result=5)
self.assertTrue(f1.cancel())
@@ -547,7 +572,7 @@ class FutureTests(unittest.TestCase):
CANCELLED_FUTURE.result, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_AND_NOTIFIED_FUTURE.result, timeout=0)
- self.assertRaises(IOError, EXCEPTION_FUTURE.result, timeout=0)
+ self.assertRaises(OSError, EXCEPTION_FUTURE.result, timeout=0)
self.assertEqual(SUCCESSFUL_FUTURE.result(timeout=0), 42)
def test_result_with_success(self):
@@ -586,7 +611,7 @@ class FutureTests(unittest.TestCase):
self.assertRaises(futures.CancelledError,
CANCELLED_AND_NOTIFIED_FUTURE.exception, timeout=0)
self.assertTrue(isinstance(EXCEPTION_FUTURE.exception(timeout=0),
- IOError))
+ OSError))
self.assertEqual(SUCCESSFUL_FUTURE.exception(timeout=0), None)
def test_exception_with_success(self):
@@ -595,14 +620,14 @@ class FutureTests(unittest.TestCase):
time.sleep(1)
with f1._condition:
f1._state = FINISHED
- f1._exception = IOError()
+ f1._exception = OSError()
f1._condition.notify_all()
f1 = create_future(state=PENDING)
t = threading.Thread(target=notification)
t.start()
- self.assertTrue(isinstance(f1.exception(timeout=5), IOError))
+ self.assertTrue(isinstance(f1.exception(timeout=5), OSError))
@test.support.reap_threads
def test_main():
@@ -615,7 +640,8 @@ def test_main():
ThreadPoolAsCompletedTests,
FutureTests,
ProcessPoolShutdownTest,
- ThreadPoolShutdownTest)
+ ThreadPoolShutdownTest,
+ )
finally:
test.support.reap_children()
diff --git a/Lib/test/test_cfgparser.py b/Lib/test/test_configparser.py
index a6e9050..a6e9050 100644
--- a/Lib/test/test_cfgparser.py
+++ b/Lib/test/test_configparser.py
diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py
index a84c109..c4baae4 100644
--- a/Lib/test/test_copy.py
+++ b/Lib/test/test_copy.py
@@ -17,7 +17,7 @@ class TestCopy(unittest.TestCase):
# Attempt full line coverage of copy.py from top to bottom
def test_exceptions(self):
- self.assertTrue(copy.Error is copy.error)
+ self.assertIs(copy.Error, copy.error)
self.assertTrue(issubclass(copy.Error, Exception))
# The copy() method
@@ -54,20 +54,26 @@ class TestCopy(unittest.TestCase):
def test_copy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
+ c.append(1)
return ""
def __reduce__(self):
- raise support.TestFailed("shouldn't call this")
+ self.fail("shouldn't call this")
+ c = []
x = C()
y = copy.copy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
+ self.assertEqual(c, [1])
def test_copy_reduce(self):
class C(object):
def __reduce__(self):
+ c.append(1)
return ""
+ c = []
x = C()
y = copy.copy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
+ self.assertEqual(c, [1])
def test_copy_cant(self):
class C(object):
@@ -91,7 +97,7 @@ class TestCopy(unittest.TestCase):
"hello", "hello\u1234", f.__code__,
NewStyle, range(10), Classic, max]
for x in tests:
- self.assertTrue(copy.copy(x) is x, repr(x))
+ self.assertIs(copy.copy(x), x)
def test_copy_list(self):
x = [1, 2, 3]
@@ -185,9 +191,9 @@ class TestCopy(unittest.TestCase):
x = [x, x]
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y[0] is not x[0])
- self.assertTrue(y[0] is y[1])
+ self.assertIsNot(y, x)
+ self.assertIsNot(y[0], x[0])
+ self.assertIs(y[0], y[1])
def test_deepcopy_issubclass(self):
# XXX Note: there's no way to test the TypeError coming out of
@@ -227,20 +233,26 @@ class TestCopy(unittest.TestCase):
def test_deepcopy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
+ c.append(1)
return ""
def __reduce__(self):
- raise support.TestFailed("shouldn't call this")
+ self.fail("shouldn't call this")
+ c = []
x = C()
y = copy.deepcopy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
+ self.assertEqual(c, [1])
def test_deepcopy_reduce(self):
class C(object):
def __reduce__(self):
+ c.append(1)
return ""
+ c = []
x = C()
y = copy.deepcopy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
+ self.assertEqual(c, [1])
def test_deepcopy_cant(self):
class C(object):
@@ -264,14 +276,14 @@ class TestCopy(unittest.TestCase):
"hello", "hello\u1234", f.__code__,
NewStyle, range(10), Classic, max]
for x in tests:
- self.assertTrue(copy.deepcopy(x) is x, repr(x))
+ self.assertIs(copy.deepcopy(x), x)
def test_deepcopy_list(self):
x = [[1, 2], 3]
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(x is not y)
- self.assertTrue(x[0] is not y[0])
+ self.assertIsNot(x, y)
+ self.assertIsNot(x[0], y[0])
def test_deepcopy_reflexive_list(self):
x = []
@@ -279,16 +291,26 @@ class TestCopy(unittest.TestCase):
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y[0] is y)
+ self.assertIsNot(y, x)
+ self.assertIs(y[0], y)
self.assertEqual(len(y), 1)
+ def test_deepcopy_empty_tuple(self):
+ x = ()
+ y = copy.deepcopy(x)
+ self.assertIs(x, y)
+
def test_deepcopy_tuple(self):
x = ([1, 2], 3)
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(x is not y)
- self.assertTrue(x[0] is not y[0])
+ self.assertIsNot(x, y)
+ self.assertIsNot(x[0], y[0])
+
+ def test_deepcopy_tuple_of_immutables(self):
+ x = ((1, 2), 3)
+ y = copy.deepcopy(x)
+ self.assertIs(x, y)
def test_deepcopy_reflexive_tuple(self):
x = ([],)
@@ -296,16 +318,16 @@ class TestCopy(unittest.TestCase):
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y[0] is not x[0])
- self.assertTrue(y[0][0] is y)
+ self.assertIsNot(y, x)
+ self.assertIsNot(y[0], x[0])
+ self.assertIs(y[0][0], y)
def test_deepcopy_dict(self):
x = {"foo": [1, 2], "bar": 3}
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(x is not y)
- self.assertTrue(x["foo"] is not y["foo"])
+ self.assertIsNot(x, y)
+ self.assertIsNot(x["foo"], y["foo"])
def test_deepcopy_reflexive_dict(self):
x = {}
@@ -315,15 +337,30 @@ class TestCopy(unittest.TestCase):
self.assertRaises(TypeError, op, y, x)
for op in equality_comparisons:
self.assertRaises(RuntimeError, op, y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y['foo'] is y)
+ self.assertIsNot(y, x)
+ self.assertIs(y['foo'], y)
self.assertEqual(len(y), 1)
def test_deepcopy_keepalive(self):
memo = {}
- x = 42
+ x = []
+ y = copy.deepcopy(x, memo)
+ self.assertIs(memo[id(memo)][0], x)
+
+ def test_deepcopy_dont_memo_immutable(self):
+ memo = {}
+ x = [1, 2, 3, 4]
y = copy.deepcopy(x, memo)
- self.assertTrue(memo[id(x)] is x)
+ self.assertEqual(y, x)
+ # There's the entry for the new list, and the keep alive.
+ self.assertEqual(len(memo), 2)
+
+ memo = {}
+ x = [(1, 2)]
+ y = copy.deepcopy(x, memo)
+ self.assertEqual(y, x)
+ # Tuples with immutable contents are immutable for deepcopy.
+ self.assertEqual(len(memo), 2)
def test_deepcopy_inst_vanilla(self):
class C:
@@ -334,7 +371,7 @@ class TestCopy(unittest.TestCase):
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_deepcopy(self):
class C:
@@ -347,8 +384,8 @@ class TestCopy(unittest.TestCase):
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y, x)
+ self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getinitargs(self):
class C:
@@ -361,8 +398,8 @@ class TestCopy(unittest.TestCase):
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y, x)
+ self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate(self):
class C:
@@ -375,8 +412,8 @@ class TestCopy(unittest.TestCase):
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y, x)
+ self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_setstate(self):
class C:
@@ -389,8 +426,8 @@ class TestCopy(unittest.TestCase):
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y, x)
+ self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate_setstate(self):
class C:
@@ -405,8 +442,8 @@ class TestCopy(unittest.TestCase):
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y is not x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y, x)
+ self.assertIsNot(y.foo, x.foo)
def test_deepcopy_reflexive_inst(self):
class C:
@@ -414,8 +451,8 @@ class TestCopy(unittest.TestCase):
x = C()
x.foo = x
y = copy.deepcopy(x)
- self.assertTrue(y is not x)
- self.assertTrue(y.foo is y)
+ self.assertIsNot(y, x)
+ self.assertIs(y.foo, y)
# _reconstruct()
@@ -425,9 +462,9 @@ class TestCopy(unittest.TestCase):
return ""
x = C()
y = copy.copy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
y = copy.deepcopy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
def test_reconstruct_nostate(self):
class C(object):
@@ -436,9 +473,9 @@ class TestCopy(unittest.TestCase):
x = C()
x.foo = 42
y = copy.copy(x)
- self.assertTrue(y.__class__ is x.__class__)
+ self.assertIs(y.__class__, x.__class__)
y = copy.deepcopy(x)
- self.assertTrue(y.__class__ is x.__class__)
+ self.assertIs(y.__class__, x.__class__)
def test_reconstruct_state(self):
class C(object):
@@ -452,7 +489,7 @@ class TestCopy(unittest.TestCase):
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y.foo, x.foo)
def test_reconstruct_state_setstate(self):
class C(object):
@@ -468,7 +505,7 @@ class TestCopy(unittest.TestCase):
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
- self.assertTrue(y.foo is not x.foo)
+ self.assertIsNot(y.foo, x.foo)
def test_reconstruct_reflexive(self):
class C(object):
@@ -476,8 +513,8 @@ class TestCopy(unittest.TestCase):
x = C()
x.foo = x
y = copy.deepcopy(x)
- self.assertTrue(y is not x)
- self.assertTrue(y.foo is y)
+ self.assertIsNot(y, x)
+ self.assertIs(y.foo, y)
# Additions for Python 2.3 and pickle protocol 2
@@ -491,12 +528,12 @@ class TestCopy(unittest.TestCase):
x = C([[1, 2], 3])
y = copy.copy(x)
self.assertEqual(x, y)
- self.assertTrue(x is not y)
- self.assertTrue(x[0] is y[0])
+ self.assertIsNot(x, y)
+ self.assertIs(x[0], y[0])
y = copy.deepcopy(x)
self.assertEqual(x, y)
- self.assertTrue(x is not y)
- self.assertTrue(x[0] is not y[0])
+ self.assertIsNot(x, y)
+ self.assertIsNot(x[0], y[0])
def test_reduce_5tuple(self):
class C(dict):
@@ -508,12 +545,12 @@ class TestCopy(unittest.TestCase):
x = C([("foo", [1, 2]), ("bar", 3)])
y = copy.copy(x)
self.assertEqual(x, y)
- self.assertTrue(x is not y)
- self.assertTrue(x["foo"] is y["foo"])
+ self.assertIsNot(x, y)
+ self.assertIs(x["foo"], y["foo"])
y = copy.deepcopy(x)
self.assertEqual(x, y)
- self.assertTrue(x is not y)
- self.assertTrue(x["foo"] is not y["foo"])
+ self.assertIsNot(x, y)
+ self.assertIsNot(x["foo"], y["foo"])
def test_copy_slots(self):
class C(object):
@@ -521,7 +558,7 @@ class TestCopy(unittest.TestCase):
x = C()
x.foo = [42]
y = copy.copy(x)
- self.assertTrue(x.foo is y.foo)
+ self.assertIs(x.foo, y.foo)
def test_deepcopy_slots(self):
class C(object):
@@ -530,7 +567,7 @@ class TestCopy(unittest.TestCase):
x.foo = [42]
y = copy.deepcopy(x)
self.assertEqual(x.foo, y.foo)
- self.assertTrue(x.foo is not y.foo)
+ self.assertIsNot(x.foo, y.foo)
def test_deepcopy_dict_subclass(self):
class C(dict):
@@ -547,7 +584,7 @@ class TestCopy(unittest.TestCase):
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertEqual(x._keys, y._keys)
- self.assertTrue(x is not y)
+ self.assertIsNot(x, y)
x['bar'] = 1
self.assertNotEqual(x, y)
self.assertNotEqual(x._keys, y._keys)
@@ -560,8 +597,8 @@ class TestCopy(unittest.TestCase):
y = copy.copy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
- self.assertTrue(x[0] is y[0])
- self.assertTrue(x.foo is y.foo)
+ self.assertIs(x[0], y[0])
+ self.assertIs(x.foo, y.foo)
def test_deepcopy_list_subclass(self):
class C(list):
@@ -571,8 +608,8 @@ class TestCopy(unittest.TestCase):
y = copy.deepcopy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
- self.assertTrue(x[0] is not y[0])
- self.assertTrue(x.foo is not y.foo)
+ self.assertIsNot(x[0], y[0])
+ self.assertIsNot(x.foo, y.foo)
def test_copy_tuple_subclass(self):
class C(tuple):
@@ -589,8 +626,8 @@ class TestCopy(unittest.TestCase):
self.assertEqual(tuple(x), ([1, 2], 3))
y = copy.deepcopy(x)
self.assertEqual(tuple(y), ([1, 2], 3))
- self.assertTrue(x is not y)
- self.assertTrue(x[0] is not y[0])
+ self.assertIsNot(x, y)
+ self.assertIsNot(x[0], y[0])
def test_getstate_exc(self):
class EvilState(object):
@@ -618,10 +655,10 @@ class TestCopy(unittest.TestCase):
obj = C()
x = weakref.ref(obj)
y = _copy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
del obj
y = _copy(x)
- self.assertTrue(y is x)
+ self.assertIs(y, x)
def test_copy_weakref(self):
self._check_weakref(copy.copy)
@@ -637,7 +674,7 @@ class TestCopy(unittest.TestCase):
u[a] = b
u[c] = d
v = copy.copy(u)
- self.assertFalse(v is u)
+ self.assertIsNot(v, u)
self.assertEqual(v, u)
self.assertEqual(v[a], b)
self.assertEqual(v[c], d)
@@ -667,8 +704,8 @@ class TestCopy(unittest.TestCase):
v = copy.deepcopy(u)
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
- self.assertFalse(v[a] is b)
- self.assertFalse(v[c] is d)
+ self.assertIsNot(v[a], b)
+ self.assertIsNot(v[c], d)
self.assertEqual(v[a].i, b.i)
self.assertEqual(v[c].i, d.i)
del c
@@ -687,12 +724,12 @@ class TestCopy(unittest.TestCase):
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
(x, y), (z, t) = sorted(v.items(), key=lambda pair: pair[0].i)
- self.assertFalse(x is a)
+ self.assertIsNot(x, a)
self.assertEqual(x.i, a.i)
- self.assertTrue(y is b)
- self.assertFalse(z is c)
+ self.assertIs(y, b)
+ self.assertIsNot(z, c)
self.assertEqual(z.i, c.i)
- self.assertTrue(t is d)
+ self.assertIs(t, d)
del x, y, z, t
del d
self.assertEqual(len(v), 1)
@@ -705,7 +742,7 @@ class TestCopy(unittest.TestCase):
f.b = f.m
g = copy.deepcopy(f)
self.assertEqual(g.m, g.b)
- self.assertTrue(g.b.__self__ is g)
+ self.assertIs(g.b.__self__, g)
g.b()
diff --git a/Lib/test/test_cprofile.py b/Lib/test/test_cprofile.py
index ae17c2b..5676668 100644
--- a/Lib/test/test_cprofile.py
+++ b/Lib/test/test_cprofile.py
@@ -18,16 +18,19 @@ class CProfileTest(ProfileTest):
def test_bad_counter_during_dealloc(self):
import _lsprof
# Must use a file as StringIO doesn't trigger the bug.
- with open(TESTFN, 'w') as file:
- sys.stderr = file
- try:
- obj = _lsprof.Profiler(lambda: int)
- obj.enable()
- obj = _lsprof.Profiler(1)
- obj.disable()
- finally:
- sys.stderr = sys.__stderr__
- unlink(TESTFN)
+ orig_stderr = sys.stderr
+ try:
+ with open(TESTFN, 'w') as file:
+ sys.stderr = file
+ try:
+ obj = _lsprof.Profiler(lambda: int)
+ obj.enable()
+ obj = _lsprof.Profiler(1)
+ obj.disable()
+ finally:
+ sys.stderr = orig_stderr
+ finally:
+ unlink(TESTFN)
def test_main():
diff --git a/Lib/test/test_crashers.py b/Lib/test/test_crashers.py
new file mode 100644
index 0000000..336ccbe
--- /dev/null
+++ b/Lib/test/test_crashers.py
@@ -0,0 +1,38 @@
+# Tests that the crashers in the Lib/test/crashers directory actually
+# do crash the interpreter as expected
+#
+# If a crasher is fixed, it should be moved elsewhere in the test suite to
+# ensure it continues to work correctly.
+
+import unittest
+import glob
+import os.path
+import test.support
+from test.script_helper import assert_python_failure
+
+CRASHER_DIR = os.path.join(os.path.dirname(__file__), "crashers")
+CRASHER_FILES = os.path.join(CRASHER_DIR, "*.py")
+
+infinite_loops = ["infinite_loop_re.py", "nasty_eq_vs_dict.py"]
+
+class CrasherTest(unittest.TestCase):
+
+ @unittest.skip("these tests are too fragile")
+ @test.support.cpython_only
+ def test_crashers_crash(self):
+ for fname in glob.glob(CRASHER_FILES):
+ if os.path.basename(fname) in infinite_loops:
+ continue
+ # Some "crashers" only trigger an exception rather than a
+ # segfault. Consider that an acceptable outcome.
+ if test.support.verbose:
+ print("Checking crasher:", fname)
+ assert_python_failure(fname)
+
+
+def test_main():
+ test.support.run_unittest(CrasherTest)
+ test.support.reap_children()
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_crypt.py b/Lib/test/test_crypt.py
index 2adb28d..dc107d8 100644
--- a/Lib/test/test_crypt.py
+++ b/Lib/test/test_crypt.py
@@ -10,6 +10,25 @@ class CryptTestCase(unittest.TestCase):
if support.verbose:
print('Test encryption: ', c)
+ def test_salt(self):
+ self.assertEqual(len(crypt._saltchars), 64)
+ for method in crypt.methods:
+ salt = crypt.mksalt(method)
+ self.assertEqual(len(salt),
+ method.salt_chars + (3 if method.ident else 0))
+
+ def test_saltedcrypt(self):
+ for method in crypt.methods:
+ pw = crypt.crypt('assword', method)
+ self.assertEqual(len(pw), method.total_size)
+ pw = crypt.crypt('assword', crypt.mksalt(method))
+ self.assertEqual(len(pw), method.total_size)
+
+ def test_methods(self):
+ # Gurantee that METHOD_CRYPT is the last method in crypt.methods.
+ self.assertTrue(len(crypt.methods) >= 1)
+ self.assertEqual(crypt.METHOD_CRYPT, crypt.methods[-1])
+
def test_main():
support.run_unittest(CryptTestCase)
diff --git a/Lib/test/test_curses.py b/Lib/test/test_curses.py
index 5812147..21ac608 100644
--- a/Lib/test/test_curses.py
+++ b/Lib/test/test_curses.py
@@ -264,11 +264,55 @@ def test_issue6243(stdscr):
curses.ungetch(1025)
stdscr.getkey()
+def test_unget_wch(stdscr):
+ if not hasattr(curses, 'unget_wch'):
+ return
+ import locale
+ encoding = locale.getpreferredencoding()
+ for ch in ('a', '\xe9', '\u20ac', '\U0010FFFF'):
+ try:
+ ch.encode(encoding)
+ except UnicodeEncodeError:
+ continue
+ try:
+ curses.unget_wch(ch)
+ except Exception as err:
+ raise Exception("unget_wch(%a) failed with locale encoding %s: %s"
+ % (ch, encoding, err))
+ read = stdscr.get_wch()
+ read = chr(read)
+ if read != ch:
+ raise AssertionError("%r != %r" % (read, ch))
+
+ code = ord(ch)
+ curses.unget_wch(code)
+ read = stdscr.get_wch()
+ if read != code:
+ raise AssertionError("%r != %r" % (read, code))
+
def test_issue10570():
b = curses.tparm(curses.tigetstr("cup"), 5, 3)
assert type(b) is bytes
curses.putp(b)
+def test_encoding(stdscr):
+ import codecs
+ encoding = stdscr.encoding
+ codecs.lookup(encoding)
+ try:
+ stdscr.encoding = 10
+ except TypeError:
+ pass
+ else:
+ raise AssertionError("TypeError not raised")
+ stdscr.encoding = encoding
+ try:
+ del stdscr.encoding
+ except TypeError:
+ pass
+ else:
+ raise AssertionError("TypeError not raised")
+
def main(stdscr):
curses.savetty()
try:
@@ -277,16 +321,18 @@ def main(stdscr):
test_userptr_without_set(stdscr)
test_resize_term(stdscr)
test_issue6243(stdscr)
+ test_unget_wch(stdscr)
test_issue10570()
+ test_encoding(stdscr)
finally:
curses.resetty()
def test_main():
- if not sys.stdout.isatty():
- raise unittest.SkipTest("sys.stdout is not a tty")
+ if not sys.__stdout__.isatty():
+ raise unittest.SkipTest("sys.__stdout__ is not a tty")
# testing setupterm() inside initscr/endwin
# causes terminal breakage
- curses.setupterm(fd=sys.stdout.fileno())
+ curses.setupterm(fd=sys.__stdout__.fileno())
try:
stdscr = curses.initscr()
main(stdscr)
diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py
index 26d4c14..02df7e3 100644
--- a/Lib/test/test_dbm.py
+++ b/Lib/test/test_dbm.py
@@ -71,8 +71,8 @@ class AnyDBMTestCase(unittest.TestCase):
f.close()
def test_anydbm_creation_n_file_exists_with_invalid_contents(self):
- with open(_fname, "w") as w:
- pass # create an empty file
+ # create an empty file
+ test.support.create_empty_file(_fname)
f = dbm.open(_fname, 'n')
self.addCleanup(f.close)
diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py
index e46cd91..014e9c7 100644
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -1834,18 +1834,9 @@ class ContextAPItests(unittest.TestCase):
# only, the attribute should be gettable/settable via both
# `clamp` and `_clamp`; in Python 3.3, `_clamp` should be
# removed.
- c = Context(clamp = 0)
- self.assertEqual(c.clamp, 0)
-
- with check_warnings(("", DeprecationWarning)):
- c._clamp = 1
- self.assertEqual(c.clamp, 1)
- with check_warnings(("", DeprecationWarning)):
- self.assertEqual(c._clamp, 1)
- c.clamp = 0
- self.assertEqual(c.clamp, 0)
- with check_warnings(("", DeprecationWarning)):
- self.assertEqual(c._clamp, 0)
+ c = Context()
+ with self.assertRaises(AttributeError):
+ clamp_value = c._clamp
def test_abs(self):
c = Context()
@@ -1970,6 +1961,17 @@ class ContextAPItests(unittest.TestCase):
self.assertRaises(TypeError, c.fma, 2, '3', 4)
self.assertRaises(TypeError, c.fma, 2, 3, '4')
+ # Issue 12079 for Context.fma ...
+ self.assertRaises(TypeError, c.fma,
+ Decimal('Infinity'), Decimal(0), "not a decimal")
+ self.assertRaises(TypeError, c.fma,
+ Decimal(1), Decimal('snan'), 1.222)
+ # ... and for Decimal.fma.
+ self.assertRaises(TypeError, Decimal('Infinity').fma,
+ Decimal(0), "not a decimal")
+ self.assertRaises(TypeError, Decimal(1).fma,
+ Decimal('snan'), 1.222)
+
def test_is_finite(self):
c = Context()
d = c.is_finite(Decimal(10))
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 141d791..bf82a88 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -654,19 +654,19 @@ class ClassPropertiesAndMethods(unittest.TestCase):
class A(metaclass=AMeta):
pass
self.assertEqual(['AMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
class B(metaclass=BMeta):
pass
# BMeta.__new__ calls AMeta.__new__ with super:
self.assertEqual(['BMeta', 'AMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
class C(A, B):
pass
# The most derived metaclass is BMeta:
self.assertEqual(['BMeta', 'AMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
# BMeta.__prepare__ should've been called:
self.assertIn('BMeta_was_here', C.__dict__)
@@ -674,20 +674,20 @@ class ClassPropertiesAndMethods(unittest.TestCase):
class C2(B, A):
pass
self.assertEqual(['BMeta', 'AMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertIn('BMeta_was_here', C2.__dict__)
# Check correct metaclass calculation when a metaclass is declared:
class D(C, metaclass=type):
pass
self.assertEqual(['BMeta', 'AMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertIn('BMeta_was_here', D.__dict__)
class E(C, metaclass=AMeta):
pass
self.assertEqual(['BMeta', 'AMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertIn('BMeta_was_here', E.__dict__)
# Special case: the given metaclass isn't a class,
@@ -729,33 +729,33 @@ class ClassPropertiesAndMethods(unittest.TestCase):
pass
self.assertIs(ANotMeta, type(A))
self.assertEqual(['ANotMeta'], prepare_calls)
- prepare_calls[:] = []
+ prepare_calls.clear()
self.assertEqual(['ANotMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
class B(metaclass=BNotMeta):
pass
self.assertIs(BNotMeta, type(B))
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
- prepare_calls[:] = []
+ prepare_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
class C(A, B):
pass
self.assertIs(BNotMeta, type(C))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
- prepare_calls[:] = []
+ prepare_calls.clear()
class C2(B, A):
pass
self.assertIs(BNotMeta, type(C2))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
- prepare_calls[:] = []
+ prepare_calls.clear()
# This is a TypeError, because of a metaclass conflict:
# BNotMeta is neither a subclass, nor a superclass of type
@@ -767,25 +767,25 @@ class ClassPropertiesAndMethods(unittest.TestCase):
pass
self.assertIs(BNotMeta, type(E))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
- prepare_calls[:] = []
+ prepare_calls.clear()
class F(object(), C):
pass
self.assertIs(BNotMeta, type(F))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
- prepare_calls[:] = []
+ prepare_calls.clear()
class F2(C, object()):
pass
self.assertIs(BNotMeta, type(F2))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
- new_calls[:] = []
+ new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
- prepare_calls[:] = []
+ prepare_calls.clear()
# TypeError: BNotMeta is neither a
# subclass, nor a superclass of int
@@ -1444,6 +1444,14 @@ order (MRO) for bases """
else:
self.fail("classmethod shouldn't accept keyword args")
+ cm = classmethod(f)
+ self.assertEqual(cm.__dict__, {})
+ cm.x = 42
+ self.assertEqual(cm.x, 42)
+ self.assertEqual(cm.__dict__, {"x" : 42})
+ del cm.x
+ self.assertFalse(hasattr(cm, "x"))
+
@support.impl_detail("the module 'xxsubtype' is internal")
def test_classmethods_in_c(self):
# Testing C-based class methods...
@@ -1475,6 +1483,13 @@ order (MRO) for bases """
self.assertEqual(d.goo(1), (1,))
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
+ sm = staticmethod(None)
+ self.assertEqual(sm.__dict__, {})
+ sm.x = 42
+ self.assertEqual(sm.x, 42)
+ self.assertEqual(sm.__dict__, {"x" : 42})
+ del sm.x
+ self.assertFalse(hasattr(sm, "x"))
@support.impl_detail("the module 'xxsubtype' is internal")
def test_staticmethods_in_c(self):
@@ -1800,12 +1815,7 @@ order (MRO) for bases """
for attr, obj in env.items():
setattr(X, attr, obj)
setattr(X, name, ErrDescr())
- try:
- runner(X())
- except MyException:
- pass
- else:
- self.fail("{0!r} didn't raise".format(name))
+ self.assertRaises(MyException, runner, X())
def test_specials(self):
# Testing special operators...
@@ -2242,9 +2252,6 @@ order (MRO) for bases """
# Two essentially featureless objects, just inheriting stuff from
# object.
self.assertEqual(dir(NotImplemented), dir(Ellipsis))
- if support.check_impl_detail():
- # None differs in PyPy: it has a __nonzero__
- self.assertEqual(dir(None), dir(Ellipsis))
# Nasty test case for proxied objects
class Wrapper(object):
@@ -4432,6 +4439,54 @@ order (MRO) for bases """
foo = Foo()
str(foo)
+ def test_slot_shadows_class_variable(self):
+ with self.assertRaises(ValueError) as cm:
+ class X:
+ __slots__ = ["foo"]
+ foo = None
+ m = str(cm.exception)
+ self.assertEqual("'foo' in __slots__ conflicts with class variable", m)
+
+ def test_set_doc(self):
+ class X:
+ "elephant"
+ X.__doc__ = "banana"
+ self.assertEqual(X.__doc__, "banana")
+ with self.assertRaises(TypeError) as cm:
+ type(list).__dict__["__doc__"].__set__(list, "blah")
+ self.assertIn("can't set list.__doc__", str(cm.exception))
+ with self.assertRaises(TypeError) as cm:
+ type(X).__dict__["__doc__"].__delete__(X)
+ self.assertIn("can't delete X.__doc__", str(cm.exception))
+ self.assertEqual(X.__doc__, "banana")
+
+ def test_qualname(self):
+ descriptors = [str.lower, complex.real, float.real, int.__add__]
+ types = ['method', 'member', 'getset', 'wrapper']
+
+ # make sure we have an example of each type of descriptor
+ for d, n in zip(descriptors, types):
+ self.assertEqual(type(d).__name__, n + '_descriptor')
+
+ for d in descriptors:
+ qualname = d.__objclass__.__qualname__ + '.' + d.__name__
+ self.assertEqual(d.__qualname__, qualname)
+
+ self.assertEqual(str.lower.__qualname__, 'str.lower')
+ self.assertEqual(complex.real.__qualname__, 'complex.real')
+ self.assertEqual(float.real.__qualname__, 'float.real')
+ self.assertEqual(int.__add__.__qualname__, 'int.__add__')
+
+ def test_qualname_dict(self):
+ ns = {'__qualname__': 'some.name'}
+ tp = type('Foo', (), ns)
+ self.assertEqual(tp.__qualname__, 'some.name')
+ self.assertEqual(tp.__dict__['__qualname__'], 'some.name')
+ self.assertEqual(ns, {'__qualname__': 'some.name'})
+
+ ns = {'__qualname__': 1}
+ self.assertRaises(TypeError, type, 'Foo', (), ns)
+
def test_cycle_through_dict(self):
# See bug #1469629
class X(dict):
@@ -4447,6 +4502,7 @@ order (MRO) for bases """
for o in gc.get_objects():
self.assertIsNot(type(o), X)
+
class DictProxyTests(unittest.TestCase):
def setUp(self):
class C(object):
@@ -4454,6 +4510,8 @@ class DictProxyTests(unittest.TestCase):
pass
self.C = C
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __local__')
def test_iter_keys(self):
# Testing dict-proxy keys...
it = self.C.__dict__.keys()
@@ -4461,15 +4519,19 @@ class DictProxyTests(unittest.TestCase):
keys = list(it)
keys.sort()
self.assertEqual(keys, ['__dict__', '__doc__', '__module__',
- '__weakref__', 'meth'])
+ '__qualname__', '__weakref__', 'meth'])
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __local__')
def test_iter_values(self):
# Testing dict-proxy values...
it = self.C.__dict__.values()
self.assertNotIsInstance(it, list)
values = list(it)
- self.assertEqual(len(values), 5)
+ self.assertEqual(len(values), 6)
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __local__')
def test_iter_items(self):
# Testing dict-proxy iteritems...
it = self.C.__dict__.items()
@@ -4477,7 +4539,7 @@ class DictProxyTests(unittest.TestCase):
keys = [item[0] for item in it]
keys.sort()
self.assertEqual(keys, ['__dict__', '__doc__', '__module__',
- '__weakref__', 'meth'])
+ '__qualname__', '__weakref__', 'meth'])
def test_dict_type_with_metaclass(self):
# Testing type of __dict__ when metaclass set...
@@ -4491,19 +4553,14 @@ class DictProxyTests(unittest.TestCase):
self.assertEqual(type(C.__dict__), type(B.__dict__))
def test_repr(self):
- # Testing dict_proxy.__repr__
- def sorted_dict_repr(repr_):
- # Given the repr of a dict, sort the keys
- assert repr_.startswith('{')
- assert repr_.endswith('}')
- kvs = repr_[1:-1].split(', ')
- return '{' + ', '.join(sorted(kvs)) + '}'
- dict_ = {k: v for k, v in self.C.__dict__.items()}
- repr_ = repr(self.C.__dict__)
- self.assertTrue(repr_.startswith('dict_proxy('))
- self.assertTrue(repr_.endswith(')'))
- self.assertEqual(sorted_dict_repr(repr_[len('dict_proxy('):-len(')')]),
- sorted_dict_repr('{!r}'.format(dict_)))
+ # Testing dict_proxy.__repr__.
+ # We can't blindly compare with the repr of another dict as ordering
+ # of keys and values is arbitrary and may differ.
+ r = repr(self.C.__dict__)
+ self.assertTrue(r.startswith('dict_proxy('), r)
+ self.assertTrue(r.endswith(')'), r)
+ for k, v in self.C.__dict__.items():
+ self.assertIn('{!r}: {!r}'.format(k, v), r)
class PTypesLongInitTest(unittest.TestCase):
@@ -4528,10 +4585,38 @@ class PTypesLongInitTest(unittest.TestCase):
type.mro(tuple)
+class MiscTests(unittest.TestCase):
+ def test_type_lookup_mro_reference(self):
+ # Issue #14199: _PyType_Lookup() has to keep a strong reference to
+ # the type MRO because it may be modified during the lookup, if
+ # __bases__ is set during the lookup for example.
+ class MyKey(object):
+ def __hash__(self):
+ return hash('mykey')
+
+ def __eq__(self, other):
+ X.__bases__ = (Base2,)
+
+ class Base(object):
+ mykey = 'from Base'
+ mykey2 = 'from Base'
+
+ class Base2(object):
+ mykey = 'from Base2'
+ mykey2 = 'from Base2'
+
+ X = type('X', (Base,), {MyKey(): 5})
+ # mykey is read from Base
+ self.assertEqual(X.mykey, 'from Base')
+ # mykey2 is read from Base2 because MyKey.__eq__ has set __bases__
+ self.assertEqual(X.mykey2, 'from Base2')
+
+
def test_main():
# Run all local test cases, with PTypesLongInitTest first.
support.run_unittest(PTypesLongInitTest, OperatorsTest,
- ClassPropertiesAndMethods, DictProxyTests)
+ ClassPropertiesAndMethods, DictProxyTests,
+ MiscTests)
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_descrtut.py b/Lib/test/test_descrtut.py
index 2db3d33..f495e18 100644
--- a/Lib/test/test_descrtut.py
+++ b/Lib/test/test_descrtut.py
@@ -170,6 +170,7 @@ You can get the information from the list type:
'__contains__',
'__delattr__',
'__delitem__',
+ '__dir__',
'__doc__',
'__eq__',
'__format__',
@@ -199,6 +200,8 @@ You can get the information from the list type:
'__str__',
'__subclasshook__',
'append',
+ 'clear',
+ 'copy',
'count',
'extend',
'index',
diff --git a/Lib/test/test_devpoll.py b/Lib/test/test_devpoll.py
new file mode 100644
index 0000000..bef4e18
--- /dev/null
+++ b/Lib/test/test_devpoll.py
@@ -0,0 +1,94 @@
+# Test case for the select.devpoll() function
+
+# Initial tests are copied as is from "test_poll.py"
+
+import os, select, random, unittest, sys
+from test.support import TESTFN, run_unittest
+
+try:
+ select.devpoll
+except AttributeError:
+ raise unittest.SkipTest("select.devpoll not defined -- skipping test_devpoll")
+
+
+def find_ready_matching(ready, flag):
+ match = []
+ for fd, mode in ready:
+ if mode & flag:
+ match.append(fd)
+ return match
+
+class DevPollTests(unittest.TestCase):
+
+ def test_devpoll1(self):
+ # Basic functional test of poll object
+ # Create a bunch of pipe and test that poll works with them.
+
+ p = select.devpoll()
+
+ NUM_PIPES = 12
+ MSG = b" This is a test."
+ MSG_LEN = len(MSG)
+ readers = []
+ writers = []
+ r2w = {}
+ w2r = {}
+
+ for i in range(NUM_PIPES):
+ rd, wr = os.pipe()
+ p.register(rd)
+ p.modify(rd, select.POLLIN)
+ p.register(wr, select.POLLOUT)
+ readers.append(rd)
+ writers.append(wr)
+ r2w[rd] = wr
+ w2r[wr] = rd
+
+ bufs = []
+
+ while writers:
+ ready = p.poll()
+ ready_writers = find_ready_matching(ready, select.POLLOUT)
+ if not ready_writers:
+ self.fail("no pipes ready for writing")
+ wr = random.choice(ready_writers)
+ os.write(wr, MSG)
+
+ ready = p.poll()
+ ready_readers = find_ready_matching(ready, select.POLLIN)
+ if not ready_readers:
+ self.fail("no pipes ready for reading")
+ self.assertEqual([w2r[wr]], ready_readers)
+ rd = ready_readers[0]
+ buf = os.read(rd, MSG_LEN)
+ self.assertEqual(len(buf), MSG_LEN)
+ bufs.append(buf)
+ os.close(r2w[rd]) ; os.close(rd)
+ p.unregister(r2w[rd])
+ p.unregister(rd)
+ writers.remove(r2w[rd])
+
+ self.assertEqual(bufs, [MSG] * NUM_PIPES)
+
+ def test_timeout_overflow(self):
+ pollster = select.devpoll()
+ w, r = os.pipe()
+ pollster.register(w)
+
+ pollster.poll(-1)
+ self.assertRaises(OverflowError, pollster.poll, -2)
+ self.assertRaises(OverflowError, pollster.poll, -1 << 31)
+ self.assertRaises(OverflowError, pollster.poll, -1 << 64)
+
+ pollster.poll(0)
+ pollster.poll(1)
+ pollster.poll(1 << 30)
+ self.assertRaises(OverflowError, pollster.poll, 1 << 31)
+ self.assertRaises(OverflowError, pollster.poll, 1 << 63)
+ self.assertRaises(OverflowError, pollster.poll, 1 << 64)
+
+def test_main():
+ run_unittest(DevPollTests)
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py
index d2740a3..387dd32 100644
--- a/Lib/test/test_dict.py
+++ b/Lib/test/test_dict.py
@@ -379,7 +379,7 @@ class DictTest(unittest.TestCase):
x.fail = True
self.assertRaises(Exc, d.pop, x)
- def test_mutatingiteration(self):
+ def test_mutating_iteration(self):
# changing dict size during iteration
d = {}
d[1] = 1
@@ -387,6 +387,33 @@ class DictTest(unittest.TestCase):
for i in d:
d[i+1] = 1
+ def test_mutating_lookup(self):
+ # changing dict during a lookup
+ class NastyKey:
+ mutate_dict = None
+
+ def __init__(self, value):
+ self.value = value
+
+ def __hash__(self):
+ # hash collision!
+ return 1
+
+ def __eq__(self, other):
+ if NastyKey.mutate_dict:
+ mydict, key = NastyKey.mutate_dict
+ NastyKey.mutate_dict = None
+ del mydict[key]
+ return self.value == other.value
+
+ key1 = NastyKey(1)
+ key2 = NastyKey(2)
+ d = {key1: 1}
+ NastyKey.mutate_dict = (d, key1)
+ with self.assertRaisesRegex(RuntimeError,
+ 'dictionary changed size during lookup'):
+ d[key2] = 2
+
def test_repr(self):
d = {}
self.assertEqual(repr(d), '{}')
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index 5c59eaa..1506fe9 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -1,11 +1,35 @@
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
+import difflib
import unittest
import sys
import dis
import io
+class _C:
+ def __init__(self, x):
+ self.x = x == 1
+
+dis_c_instance_method = """\
+ %-4d 0 LOAD_FAST 1 (x)
+ 3 LOAD_CONST 1 (1)
+ 6 COMPARE_OP 2 (==)
+ 9 LOAD_FAST 0 (self)
+ 12 STORE_ATTR 0 (x)
+ 15 LOAD_CONST 0 (None)
+ 18 RETURN_VALUE
+""" % (_C.__init__.__code__.co_firstlineno + 1,)
+
+dis_c_instance_method_bytes = """\
+ 0 LOAD_FAST 1 (1)
+ 3 LOAD_CONST 1 (1)
+ 6 COMPARE_OP 2 (==)
+ 9 LOAD_FAST 0 (0)
+ 12 STORE_ATTR 0 (0)
+ 15 LOAD_CONST 0 (0)
+ 18 RETURN_VALUE
+"""
def _f(a):
print(a)
@@ -23,6 +47,16 @@ dis_f = """\
_f.__code__.co_firstlineno + 2)
+dis_f_co_code = """\
+ 0 LOAD_GLOBAL 0 (0)
+ 3 LOAD_FAST 0 (0)
+ 6 CALL_FUNCTION 1
+ 9 POP_TOP
+ 10 LOAD_CONST 1 (1)
+ 13 RETURN_VALUE
+"""
+
+
def bug708901():
for res in range(1,
10):
@@ -138,18 +172,27 @@ dis_compound_stmt_str = """\
"""
class DisTests(unittest.TestCase):
- def do_disassembly_test(self, func, expected):
+
+ def get_disassembly(self, func, lasti=-1, wrapper=True):
s = io.StringIO()
save_stdout = sys.stdout
sys.stdout = s
- dis.dis(func)
- sys.stdout = save_stdout
- got = s.getvalue()
+ try:
+ if wrapper:
+ dis.dis(func)
+ else:
+ dis.disassemble(func, lasti)
+ finally:
+ sys.stdout = save_stdout
# Trim trailing blanks (if any).
- lines = got.split('\n')
- lines = [line.rstrip() for line in lines]
- expected = expected.split("\n")
- import difflib
+ return [line.rstrip() for line in s.getvalue().splitlines()]
+
+ def get_disassemble_as_string(self, func, lasti=-1):
+ return '\n'.join(self.get_disassembly(func, lasti, False))
+
+ def do_disassembly_test(self, func, expected):
+ lines = self.get_disassembly(func)
+ expected = expected.splitlines()
if expected != lines:
self.fail(
"events did not match expectation:\n" +
@@ -157,7 +200,7 @@ class DisTests(unittest.TestCase):
lines)))
def test_opmap(self):
- self.assertEqual(dis.opmap["STOP_CODE"], 0)
+ self.assertEqual(dis.opmap["NOP"], 9)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
@@ -211,6 +254,44 @@ class DisTests(unittest.TestCase):
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
+ def test_disassemble_bytes(self):
+ self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
+
+ def test_disassemble_method(self):
+ self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
+
+ def test_disassemble_method_bytes(self):
+ method_bytecode = _C(1).__init__.__code__.co_code
+ self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
+
+ def test_dis_none(self):
+ try:
+ del sys.last_traceback
+ except AttributeError:
+ pass
+ self.assertRaises(RuntimeError, dis.dis, None)
+
+ def test_dis_object(self):
+ self.assertRaises(TypeError, dis.dis, object())
+
+ def test_dis_traceback(self):
+ try:
+ del sys.last_traceback
+ except AttributeError:
+ pass
+
+ try:
+ 1/0
+ except Exception as e:
+ tb = e.__traceback__
+ sys.last_traceback = tb
+
+ tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
+ self.do_disassembly_test(None, tb_dis)
+
+ def test_dis_object(self):
+ self.assertRaises(TypeError, dis.dis, object())
+
code_info_code_info = """\
Name: code_info
Filename: (.*)
@@ -258,6 +339,7 @@ Flags: OPTIMIZED, NEWLOCALS, VARARGS, VARKEYWORDS, GENERATOR
Constants:
0: None
1: <code object f at (.*), file "(.*)", line (.*)>
+ 2: 'tricky.<locals>.f'
Variable names:
0: x
1: y
@@ -364,6 +446,13 @@ class CodeInfoTests(unittest.TestCase):
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
+ def test_code_info_object(self):
+ self.assertRaises(TypeError, dis.code_info, object())
+
+ def test_pretty_flags_no_flags(self):
+ self.assertEqual(dis.pretty_flags(0), '0x0')
+
+
def test_main():
run_unittest(DisTests, CodeInfoTests)
diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py
index 5969ce2..cdcd389 100644
--- a/Lib/test/test_doctest.py
+++ b/Lib/test/test_doctest.py
@@ -5,6 +5,7 @@ Test script for doctest.
from test import support
import doctest
import os
+import sys
# NOTE: There are some additional tests relating to interaction with
@@ -432,7 +433,7 @@ We'll simulate a __file__ attr that ends in pyc:
>>> tests = finder.find(sample_func)
>>> print(tests) # doctest: +ELLIPSIS
- [<DocTest sample_func from ...:17 (1 example)>]
+ [<DocTest sample_func from ...:18 (1 example)>]
The exact name depends on how test_doctest was invoked, so allow for
leading path components.
@@ -1745,226 +1746,227 @@ Run the debugger on the docstring, and then restore sys.stdin.
"""
-def test_pdb_set_trace():
- """Using pdb.set_trace from a doctest.
-
- You can use pdb.set_trace from a doctest. To do so, you must
- retrieve the set_trace function from the pdb module at the time
- you use it. The doctest module changes sys.stdout so that it can
- capture program output. It also temporarily replaces pdb.set_trace
- with a version that restores stdout. This is necessary for you to
- see debugger output.
-
- >>> doc = '''
- ... >>> x = 42
- ... >>> raise Exception('clé')
- ... Traceback (most recent call last):
- ... Exception: clé
- ... >>> import pdb; pdb.set_trace()
- ... '''
- >>> parser = doctest.DocTestParser()
- >>> test = parser.get_doctest(doc, {}, "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> runner = doctest.DocTestRunner(verbose=False)
-
- To demonstrate this, we'll create a fake standard input that
- captures our debugger input:
-
- >>> import tempfile
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'print(x)', # print data defined by the example
- ... 'continue', # stop debugging
- ... ''])
-
- >>> try: runner.run(test)
- ... finally: sys.stdin = real_stdin
- --Return--
- > <doctest foo-bar@baz[2]>(1)<module>()->None
- -> import pdb; pdb.set_trace()
- (Pdb) print(x)
- 42
- (Pdb) continue
- TestResults(failed=0, attempted=3)
-
- You can also put pdb.set_trace in a function called from a test:
-
- >>> def calls_set_trace():
- ... y=2
- ... import pdb; pdb.set_trace()
-
- >>> doc = '''
- ... >>> x=1
- ... >>> calls_set_trace()
- ... '''
- >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'print(y)', # print data defined in the function
- ... 'up', # out of function
- ... 'print(x)', # print data defined by the example
- ... 'continue', # stop debugging
- ... ''])
-
- >>> try:
- ... runner.run(test)
- ... finally:
- ... sys.stdin = real_stdin
- --Return--
- > <doctest test.test_doctest.test_pdb_set_trace[8]>(3)calls_set_trace()->None
- -> import pdb; pdb.set_trace()
- (Pdb) print(y)
- 2
- (Pdb) up
- > <doctest foo-bar@baz[1]>(1)<module>()
- -> calls_set_trace()
- (Pdb) print(x)
- 1
- (Pdb) continue
- TestResults(failed=0, attempted=2)
-
- During interactive debugging, source code is shown, even for
- doctest examples:
-
- >>> doc = '''
- ... >>> def f(x):
- ... ... g(x*2)
- ... >>> def g(x):
- ... ... print(x+3)
- ... ... import pdb; pdb.set_trace()
- ... >>> f(3)
- ... '''
- >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'list', # list source from example 2
- ... 'next', # return from g()
- ... 'list', # list source from example 1
- ... 'next', # return from f()
- ... 'list', # list source from example 3
- ... 'continue', # stop debugging
- ... ''])
- >>> try: runner.run(test)
- ... finally: sys.stdin = real_stdin
- ... # doctest: +NORMALIZE_WHITESPACE
- --Return--
- > <doctest foo-bar@baz[1]>(3)g()->None
- -> import pdb; pdb.set_trace()
- (Pdb) list
- 1 def g(x):
- 2 print(x+3)
- 3 -> import pdb; pdb.set_trace()
- [EOF]
- (Pdb) next
- --Return--
- > <doctest foo-bar@baz[0]>(2)f()->None
- -> g(x*2)
- (Pdb) list
- 1 def f(x):
- 2 -> g(x*2)
- [EOF]
- (Pdb) next
- --Return--
- > <doctest foo-bar@baz[2]>(1)<module>()->None
- -> f(3)
- (Pdb) list
- 1 -> f(3)
- [EOF]
- (Pdb) continue
- **********************************************************************
- File "foo-bar@baz.py", line 7, in foo-bar@baz
- Failed example:
- f(3)
- Expected nothing
- Got:
- 9
- TestResults(failed=1, attempted=3)
- """
-
-def test_pdb_set_trace_nested():
- """This illustrates more-demanding use of set_trace with nested functions.
-
- >>> class C(object):
- ... def calls_set_trace(self):
- ... y = 1
- ... import pdb; pdb.set_trace()
- ... self.f1()
- ... y = 2
- ... def f1(self):
- ... x = 1
- ... self.f2()
- ... x = 2
- ... def f2(self):
- ... z = 1
- ... z = 2
-
- >>> calls_set_trace = C().calls_set_trace
-
- >>> doc = '''
- ... >>> a = 1
- ... >>> calls_set_trace()
- ... '''
- >>> parser = doctest.DocTestParser()
- >>> runner = doctest.DocTestRunner(verbose=False)
- >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'print(y)', # print data defined in the function
- ... 'step', 'step', 'step', 'step', 'step', 'step', 'print(z)',
- ... 'up', 'print(x)',
- ... 'up', 'print(y)',
- ... 'up', 'print(foo)',
- ... 'continue', # stop debugging
- ... ''])
-
- >>> try:
- ... runner.run(test)
- ... finally:
- ... sys.stdin = real_stdin
- ... # doctest: +REPORT_NDIFF
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
- -> self.f1()
- (Pdb) print(y)
- 1
- (Pdb) step
- --Call--
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(7)f1()
- -> def f1(self):
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(8)f1()
- -> x = 1
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
- -> self.f2()
- (Pdb) step
- --Call--
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(11)f2()
- -> def f2(self):
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(12)f2()
- -> z = 1
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(13)f2()
- -> z = 2
- (Pdb) print(z)
- 1
- (Pdb) up
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
- -> self.f2()
- (Pdb) print(x)
- 1
- (Pdb) up
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
- -> self.f1()
- (Pdb) print(y)
- 1
- (Pdb) up
- > <doctest foo-bar@baz[1]>(1)<module>()
- -> calls_set_trace()
- (Pdb) print(foo)
- *** NameError: name 'foo' is not defined
- (Pdb) continue
- TestResults(failed=0, attempted=2)
-"""
+if not hasattr(sys, 'gettrace') or not sys.gettrace():
+ def test_pdb_set_trace():
+ """Using pdb.set_trace from a doctest.
+
+ You can use pdb.set_trace from a doctest. To do so, you must
+ retrieve the set_trace function from the pdb module at the time
+ you use it. The doctest module changes sys.stdout so that it can
+ capture program output. It also temporarily replaces pdb.set_trace
+ with a version that restores stdout. This is necessary for you to
+ see debugger output.
+
+ >>> doc = '''
+ ... >>> x = 42
+ ... >>> raise Exception('clé')
+ ... Traceback (most recent call last):
+ ... Exception: clé
+ ... >>> import pdb; pdb.set_trace()
+ ... '''
+ >>> parser = doctest.DocTestParser()
+ >>> test = parser.get_doctest(doc, {}, "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> runner = doctest.DocTestRunner(verbose=False)
+
+ To demonstrate this, we'll create a fake standard input that
+ captures our debugger input:
+
+ >>> import tempfile
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'print(x)', # print data defined by the example
+ ... 'continue', # stop debugging
+ ... ''])
+
+ >>> try: runner.run(test)
+ ... finally: sys.stdin = real_stdin
+ --Return--
+ > <doctest foo-bar@baz[2]>(1)<module>()->None
+ -> import pdb; pdb.set_trace()
+ (Pdb) print(x)
+ 42
+ (Pdb) continue
+ TestResults(failed=0, attempted=3)
+
+ You can also put pdb.set_trace in a function called from a test:
+
+ >>> def calls_set_trace():
+ ... y=2
+ ... import pdb; pdb.set_trace()
+
+ >>> doc = '''
+ ... >>> x=1
+ ... >>> calls_set_trace()
+ ... '''
+ >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'print(y)', # print data defined in the function
+ ... 'up', # out of function
+ ... 'print(x)', # print data defined by the example
+ ... 'continue', # stop debugging
+ ... ''])
+
+ >>> try:
+ ... runner.run(test)
+ ... finally:
+ ... sys.stdin = real_stdin
+ --Return--
+ > <doctest test.test_doctest.test_pdb_set_trace[8]>(3)calls_set_trace()->None
+ -> import pdb; pdb.set_trace()
+ (Pdb) print(y)
+ 2
+ (Pdb) up
+ > <doctest foo-bar@baz[1]>(1)<module>()
+ -> calls_set_trace()
+ (Pdb) print(x)
+ 1
+ (Pdb) continue
+ TestResults(failed=0, attempted=2)
+
+ During interactive debugging, source code is shown, even for
+ doctest examples:
+
+ >>> doc = '''
+ ... >>> def f(x):
+ ... ... g(x*2)
+ ... >>> def g(x):
+ ... ... print(x+3)
+ ... ... import pdb; pdb.set_trace()
+ ... >>> f(3)
+ ... '''
+ >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'list', # list source from example 2
+ ... 'next', # return from g()
+ ... 'list', # list source from example 1
+ ... 'next', # return from f()
+ ... 'list', # list source from example 3
+ ... 'continue', # stop debugging
+ ... ''])
+ >>> try: runner.run(test)
+ ... finally: sys.stdin = real_stdin
+ ... # doctest: +NORMALIZE_WHITESPACE
+ --Return--
+ > <doctest foo-bar@baz[1]>(3)g()->None
+ -> import pdb; pdb.set_trace()
+ (Pdb) list
+ 1 def g(x):
+ 2 print(x+3)
+ 3 -> import pdb; pdb.set_trace()
+ [EOF]
+ (Pdb) next
+ --Return--
+ > <doctest foo-bar@baz[0]>(2)f()->None
+ -> g(x*2)
+ (Pdb) list
+ 1 def f(x):
+ 2 -> g(x*2)
+ [EOF]
+ (Pdb) next
+ --Return--
+ > <doctest foo-bar@baz[2]>(1)<module>()->None
+ -> f(3)
+ (Pdb) list
+ 1 -> f(3)
+ [EOF]
+ (Pdb) continue
+ **********************************************************************
+ File "foo-bar@baz.py", line 7, in foo-bar@baz
+ Failed example:
+ f(3)
+ Expected nothing
+ Got:
+ 9
+ TestResults(failed=1, attempted=3)
+ """
+
+ def test_pdb_set_trace_nested():
+ """This illustrates more-demanding use of set_trace with nested functions.
+
+ >>> class C(object):
+ ... def calls_set_trace(self):
+ ... y = 1
+ ... import pdb; pdb.set_trace()
+ ... self.f1()
+ ... y = 2
+ ... def f1(self):
+ ... x = 1
+ ... self.f2()
+ ... x = 2
+ ... def f2(self):
+ ... z = 1
+ ... z = 2
+
+ >>> calls_set_trace = C().calls_set_trace
+
+ >>> doc = '''
+ ... >>> a = 1
+ ... >>> calls_set_trace()
+ ... '''
+ >>> parser = doctest.DocTestParser()
+ >>> runner = doctest.DocTestRunner(verbose=False)
+ >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'print(y)', # print data defined in the function
+ ... 'step', 'step', 'step', 'step', 'step', 'step', 'print(z)',
+ ... 'up', 'print(x)',
+ ... 'up', 'print(y)',
+ ... 'up', 'print(foo)',
+ ... 'continue', # stop debugging
+ ... ''])
+
+ >>> try:
+ ... runner.run(test)
+ ... finally:
+ ... sys.stdin = real_stdin
+ ... # doctest: +REPORT_NDIFF
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
+ -> self.f1()
+ (Pdb) print(y)
+ 1
+ (Pdb) step
+ --Call--
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(7)f1()
+ -> def f1(self):
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(8)f1()
+ -> x = 1
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
+ -> self.f2()
+ (Pdb) step
+ --Call--
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(11)f2()
+ -> def f2(self):
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(12)f2()
+ -> z = 1
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(13)f2()
+ -> z = 2
+ (Pdb) print(z)
+ 1
+ (Pdb) up
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
+ -> self.f2()
+ (Pdb) print(x)
+ 1
+ (Pdb) up
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
+ -> self.f1()
+ (Pdb) print(y)
+ 1
+ (Pdb) up
+ > <doctest foo-bar@baz[1]>(1)<module>()
+ -> calls_set_trace()
+ (Pdb) print(foo)
+ *** NameError: name 'foo' is not defined
+ (Pdb) continue
+ TestResults(failed=0, attempted=2)
+ """
def test_DocTestSuite():
"""DocTestSuite creates a unittest test suite from a doctest.
diff --git a/Lib/test/test_dummy_thread.py b/Lib/test/test_dummy_thread.py
index c61078d..2fafe1d 100644
--- a/Lib/test/test_dummy_thread.py
+++ b/Lib/test/test_dummy_thread.py
@@ -35,8 +35,8 @@ class LockTests(unittest.TestCase):
"Lock object did not release properly.")
def test_improper_release(self):
- #Make sure release of an unlocked thread raises _thread.error
- self.assertRaises(_thread.error, self.lock.release)
+ #Make sure release of an unlocked thread raises RuntimeError
+ self.assertRaises(RuntimeError, self.lock.release)
def test_cond_acquire_success(self):
#Make sure the conditional acquiring of the lock works.
diff --git a/Lib/test/test_email.py b/Lib/test/test_email.py
deleted file mode 100644
index 5eebba5..0000000
--- a/Lib/test/test_email.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (C) 2001-2007 Python Software Foundation
-# email package unit tests
-
-# The specific tests now live in Lib/email/test
-from email.test.test_email import suite
-from email.test.test_email_codecs import suite as codecs_suite
-from test import support
-
-def test_main():
- support.run_unittest(suite())
- support.run_unittest(codecs_suite())
-
-if __name__ == '__main__':
- test_main()
diff --git a/Lib/test/test_email/__init__.py b/Lib/test/test_email/__init__.py
new file mode 100644
index 0000000..d72b50e
--- /dev/null
+++ b/Lib/test/test_email/__init__.py
@@ -0,0 +1,55 @@
+import os
+import sys
+import unittest
+import test.support
+import email
+from test.test_email import __file__ as landmark
+
+# Run all tests in package for '-m unittest test.test_email'
+def load_tests(loader, standard_tests, pattern):
+ this_dir = os.path.dirname(__file__)
+ if pattern is None:
+ pattern = "test*"
+ package_tests = loader.discover(start_dir=this_dir, pattern=pattern)
+ standard_tests.addTests(package_tests)
+ return standard_tests
+
+
+# used by regrtest and __main__.
+def test_main():
+ here = os.path.dirname(__file__)
+ # Unittest mucks with the path, so we have to save and restore
+ # it to keep regrtest happy.
+ savepath = sys.path[:]
+ test.support._run_suite(unittest.defaultTestLoader.discover(here))
+ sys.path[:] = savepath
+
+
+# helper code used by a number of test modules.
+
+def openfile(filename, *args, **kws):
+ path = os.path.join(os.path.dirname(landmark), 'data', filename)
+ return open(path, *args, **kws)
+
+
+# Base test class
+class TestEmailBase(unittest.TestCase):
+
+ maxDiff = None
+
+ def __init__(self, *args, **kw):
+ super().__init__(*args, **kw)
+ self.addTypeEqualityFunc(bytes, self.assertBytesEqual)
+
+ ndiffAssertEqual = unittest.TestCase.assertEqual
+
+ def _msgobj(self, filename):
+ with openfile(filename) as fp:
+ return email.message_from_file(fp)
+
+ def _bytes_repr(self, b):
+ return [repr(x) for x in b.splitlines(keepends=True)]
+
+ def assertBytesEqual(self, first, second, msg):
+ """Our byte strings are really encoded strings; improve diff output"""
+ self.assertEqual(self._bytes_repr(first), self._bytes_repr(second))
diff --git a/Lib/test/test_email/__main__.py b/Lib/test/test_email/__main__.py
new file mode 100644
index 0000000..98af9ec
--- /dev/null
+++ b/Lib/test/test_email/__main__.py
@@ -0,0 +1,3 @@
+from test.test_email import test_main
+
+test_main()
diff --git a/Lib/email/test/data/PyBanner048.gif b/Lib/test/test_email/data/PyBanner048.gif
index 1a5c87f..1a5c87f 100644
--- a/Lib/email/test/data/PyBanner048.gif
+++ b/Lib/test/test_email/data/PyBanner048.gif
Binary files differ
diff --git a/Lib/email/test/data/audiotest.au b/Lib/test/test_email/data/audiotest.au
index f76b050..f76b050 100644
--- a/Lib/email/test/data/audiotest.au
+++ b/Lib/test/test_email/data/audiotest.au
Binary files differ
diff --git a/Lib/email/test/data/msg_01.txt b/Lib/test/test_email/data/msg_01.txt
index 7e33bcf..7e33bcf 100644
--- a/Lib/email/test/data/msg_01.txt
+++ b/Lib/test/test_email/data/msg_01.txt
diff --git a/Lib/email/test/data/msg_02.txt b/Lib/test/test_email/data/msg_02.txt
index 43f2480..43f2480 100644
--- a/Lib/email/test/data/msg_02.txt
+++ b/Lib/test/test_email/data/msg_02.txt
diff --git a/Lib/email/test/data/msg_03.txt b/Lib/test/test_email/data/msg_03.txt
index c748ebf..c748ebf 100644
--- a/Lib/email/test/data/msg_03.txt
+++ b/Lib/test/test_email/data/msg_03.txt
diff --git a/Lib/email/test/data/msg_04.txt b/Lib/test/test_email/data/msg_04.txt
index 1f633c4..1f633c4 100644
--- a/Lib/email/test/data/msg_04.txt
+++ b/Lib/test/test_email/data/msg_04.txt
diff --git a/Lib/email/test/data/msg_05.txt b/Lib/test/test_email/data/msg_05.txt
index 87d5e9c..87d5e9c 100644
--- a/Lib/email/test/data/msg_05.txt
+++ b/Lib/test/test_email/data/msg_05.txt
diff --git a/Lib/email/test/data/msg_06.txt b/Lib/test/test_email/data/msg_06.txt
index 69f3a47..69f3a47 100644
--- a/Lib/email/test/data/msg_06.txt
+++ b/Lib/test/test_email/data/msg_06.txt
diff --git a/Lib/email/test/data/msg_07.txt b/Lib/test/test_email/data/msg_07.txt
index 721f3a0..721f3a0 100644
--- a/Lib/email/test/data/msg_07.txt
+++ b/Lib/test/test_email/data/msg_07.txt
diff --git a/Lib/email/test/data/msg_08.txt b/Lib/test/test_email/data/msg_08.txt
index b563083..b563083 100644
--- a/Lib/email/test/data/msg_08.txt
+++ b/Lib/test/test_email/data/msg_08.txt
diff --git a/Lib/email/test/data/msg_09.txt b/Lib/test/test_email/data/msg_09.txt
index 575c4c2..575c4c2 100644
--- a/Lib/email/test/data/msg_09.txt
+++ b/Lib/test/test_email/data/msg_09.txt
diff --git a/Lib/email/test/data/msg_10.txt b/Lib/test/test_email/data/msg_10.txt
index 0790396..0790396 100644
--- a/Lib/email/test/data/msg_10.txt
+++ b/Lib/test/test_email/data/msg_10.txt
diff --git a/Lib/email/test/data/msg_11.txt b/Lib/test/test_email/data/msg_11.txt
index 8f7f199..8f7f199 100644
--- a/Lib/email/test/data/msg_11.txt
+++ b/Lib/test/test_email/data/msg_11.txt
diff --git a/Lib/email/test/data/msg_12.txt b/Lib/test/test_email/data/msg_12.txt
index 4bec8d9..4bec8d9 100644
--- a/Lib/email/test/data/msg_12.txt
+++ b/Lib/test/test_email/data/msg_12.txt
diff --git a/Lib/email/test/data/msg_12a.txt b/Lib/test/test_email/data/msg_12a.txt
index e94224e..e94224e 100644
--- a/Lib/email/test/data/msg_12a.txt
+++ b/Lib/test/test_email/data/msg_12a.txt
diff --git a/Lib/email/test/data/msg_13.txt b/Lib/test/test_email/data/msg_13.txt
index 8e6d52d..8e6d52d 100644
--- a/Lib/email/test/data/msg_13.txt
+++ b/Lib/test/test_email/data/msg_13.txt
diff --git a/Lib/email/test/data/msg_14.txt b/Lib/test/test_email/data/msg_14.txt
index 5d98d2f..5d98d2f 100644
--- a/Lib/email/test/data/msg_14.txt
+++ b/Lib/test/test_email/data/msg_14.txt
diff --git a/Lib/email/test/data/msg_15.txt b/Lib/test/test_email/data/msg_15.txt
index 0025624..0025624 100644
--- a/Lib/email/test/data/msg_15.txt
+++ b/Lib/test/test_email/data/msg_15.txt
diff --git a/Lib/email/test/data/msg_16.txt b/Lib/test/test_email/data/msg_16.txt
index 56167e9..56167e9 100644
--- a/Lib/email/test/data/msg_16.txt
+++ b/Lib/test/test_email/data/msg_16.txt
diff --git a/Lib/email/test/data/msg_17.txt b/Lib/test/test_email/data/msg_17.txt
index 8d86e41..8d86e41 100644
--- a/Lib/email/test/data/msg_17.txt
+++ b/Lib/test/test_email/data/msg_17.txt
diff --git a/Lib/email/test/data/msg_18.txt b/Lib/test/test_email/data/msg_18.txt
index f9f4904..f9f4904 100644
--- a/Lib/email/test/data/msg_18.txt
+++ b/Lib/test/test_email/data/msg_18.txt
diff --git a/Lib/email/test/data/msg_19.txt b/Lib/test/test_email/data/msg_19.txt
index 49bf7fc..49bf7fc 100644
--- a/Lib/email/test/data/msg_19.txt
+++ b/Lib/test/test_email/data/msg_19.txt
diff --git a/Lib/email/test/data/msg_20.txt b/Lib/test/test_email/data/msg_20.txt
index 1a6a887..1a6a887 100644
--- a/Lib/email/test/data/msg_20.txt
+++ b/Lib/test/test_email/data/msg_20.txt
diff --git a/Lib/email/test/data/msg_21.txt b/Lib/test/test_email/data/msg_21.txt
index 23590b2..23590b2 100644
--- a/Lib/email/test/data/msg_21.txt
+++ b/Lib/test/test_email/data/msg_21.txt
diff --git a/Lib/email/test/data/msg_22.txt b/Lib/test/test_email/data/msg_22.txt
index af9de5f..af9de5f 100644
--- a/Lib/email/test/data/msg_22.txt
+++ b/Lib/test/test_email/data/msg_22.txt
diff --git a/Lib/email/test/data/msg_23.txt b/Lib/test/test_email/data/msg_23.txt
index bb2e8ec..bb2e8ec 100644
--- a/Lib/email/test/data/msg_23.txt
+++ b/Lib/test/test_email/data/msg_23.txt
diff --git a/Lib/email/test/data/msg_24.txt b/Lib/test/test_email/data/msg_24.txt
index 4e52339..4e52339 100644
--- a/Lib/email/test/data/msg_24.txt
+++ b/Lib/test/test_email/data/msg_24.txt
diff --git a/Lib/email/test/data/msg_25.txt b/Lib/test/test_email/data/msg_25.txt
index 9e35275..9e35275 100644
--- a/Lib/email/test/data/msg_25.txt
+++ b/Lib/test/test_email/data/msg_25.txt
diff --git a/Lib/email/test/data/msg_26.txt b/Lib/test/test_email/data/msg_26.txt
index 58efaa9..58efaa9 100644
--- a/Lib/email/test/data/msg_26.txt
+++ b/Lib/test/test_email/data/msg_26.txt
diff --git a/Lib/email/test/data/msg_27.txt b/Lib/test/test_email/data/msg_27.txt
index d019176..d019176 100644
--- a/Lib/email/test/data/msg_27.txt
+++ b/Lib/test/test_email/data/msg_27.txt
diff --git a/Lib/email/test/data/msg_28.txt b/Lib/test/test_email/data/msg_28.txt
index 1e4824c..1e4824c 100644
--- a/Lib/email/test/data/msg_28.txt
+++ b/Lib/test/test_email/data/msg_28.txt
diff --git a/Lib/email/test/data/msg_29.txt b/Lib/test/test_email/data/msg_29.txt
index 1fab561..1fab561 100644
--- a/Lib/email/test/data/msg_29.txt
+++ b/Lib/test/test_email/data/msg_29.txt
diff --git a/Lib/email/test/data/msg_30.txt b/Lib/test/test_email/data/msg_30.txt
index 4334bb6..4334bb6 100644
--- a/Lib/email/test/data/msg_30.txt
+++ b/Lib/test/test_email/data/msg_30.txt
diff --git a/Lib/email/test/data/msg_31.txt b/Lib/test/test_email/data/msg_31.txt
index 1e58e56..1e58e56 100644
--- a/Lib/email/test/data/msg_31.txt
+++ b/Lib/test/test_email/data/msg_31.txt
diff --git a/Lib/email/test/data/msg_32.txt b/Lib/test/test_email/data/msg_32.txt
index 07ec5af..07ec5af 100644
--- a/Lib/email/test/data/msg_32.txt
+++ b/Lib/test/test_email/data/msg_32.txt
diff --git a/Lib/email/test/data/msg_33.txt b/Lib/test/test_email/data/msg_33.txt
index 042787a..042787a 100644
--- a/Lib/email/test/data/msg_33.txt
+++ b/Lib/test/test_email/data/msg_33.txt
diff --git a/Lib/email/test/data/msg_34.txt b/Lib/test/test_email/data/msg_34.txt
index 055dfea..055dfea 100644
--- a/Lib/email/test/data/msg_34.txt
+++ b/Lib/test/test_email/data/msg_34.txt
diff --git a/Lib/email/test/data/msg_35.txt b/Lib/test/test_email/data/msg_35.txt
index be7d5a2..be7d5a2 100644
--- a/Lib/email/test/data/msg_35.txt
+++ b/Lib/test/test_email/data/msg_35.txt
diff --git a/Lib/email/test/data/msg_36.txt b/Lib/test/test_email/data/msg_36.txt
index 5632c30..5632c30 100644
--- a/Lib/email/test/data/msg_36.txt
+++ b/Lib/test/test_email/data/msg_36.txt
diff --git a/Lib/email/test/data/msg_37.txt b/Lib/test/test_email/data/msg_37.txt
index 038d34a..038d34a 100644
--- a/Lib/email/test/data/msg_37.txt
+++ b/Lib/test/test_email/data/msg_37.txt
diff --git a/Lib/email/test/data/msg_38.txt b/Lib/test/test_email/data/msg_38.txt
index 006df81..006df81 100644
--- a/Lib/email/test/data/msg_38.txt
+++ b/Lib/test/test_email/data/msg_38.txt
diff --git a/Lib/email/test/data/msg_39.txt b/Lib/test/test_email/data/msg_39.txt
index 124b269..124b269 100644
--- a/Lib/email/test/data/msg_39.txt
+++ b/Lib/test/test_email/data/msg_39.txt
diff --git a/Lib/email/test/data/msg_40.txt b/Lib/test/test_email/data/msg_40.txt
index 1435fa1..1435fa1 100644
--- a/Lib/email/test/data/msg_40.txt
+++ b/Lib/test/test_email/data/msg_40.txt
diff --git a/Lib/email/test/data/msg_41.txt b/Lib/test/test_email/data/msg_41.txt
index 76cdd1c..76cdd1c 100644
--- a/Lib/email/test/data/msg_41.txt
+++ b/Lib/test/test_email/data/msg_41.txt
diff --git a/Lib/email/test/data/msg_42.txt b/Lib/test/test_email/data/msg_42.txt
index a75f8f4..a75f8f4 100644
--- a/Lib/email/test/data/msg_42.txt
+++ b/Lib/test/test_email/data/msg_42.txt
diff --git a/Lib/email/test/data/msg_43.txt b/Lib/test/test_email/data/msg_43.txt
index 797d12c..797d12c 100644
--- a/Lib/email/test/data/msg_43.txt
+++ b/Lib/test/test_email/data/msg_43.txt
diff --git a/Lib/email/test/data/msg_44.txt b/Lib/test/test_email/data/msg_44.txt
index 15a2252..15a2252 100644
--- a/Lib/email/test/data/msg_44.txt
+++ b/Lib/test/test_email/data/msg_44.txt
diff --git a/Lib/email/test/data/msg_45.txt b/Lib/test/test_email/data/msg_45.txt
index 58fde95..58fde95 100644
--- a/Lib/email/test/data/msg_45.txt
+++ b/Lib/test/test_email/data/msg_45.txt
diff --git a/Lib/email/test/data/msg_46.txt b/Lib/test/test_email/data/msg_46.txt
index 1e22c4f..1e22c4f 100644
--- a/Lib/email/test/data/msg_46.txt
+++ b/Lib/test/test_email/data/msg_46.txt
diff --git a/Lib/email/test/test_email_codecs.py b/Lib/test/test_email/test_asian_codecs.py
index ca85f57..a4dd9a9 100644
--- a/Lib/email/test/test_email_codecs.py
+++ b/Lib/test/test_email/test_asian_codecs.py
@@ -5,7 +5,7 @@
import unittest
from test.support import run_unittest
-from email.test.test_email import TestEmailBase
+from test.test_email.test_email import TestEmailBase
from email.charset import Charset
from email.header import Header, decode_header
from email.message import Message
@@ -78,16 +78,5 @@ Hello World! =?iso-2022-jp?b?GyRCJU8lbSE8JW8hPCVrJUkhKhsoQg==?=
-def suite():
- suite = unittest.TestSuite()
- suite.addTest(unittest.makeSuite(TestEmailAsianCodecs))
- return suite
-
-
-def test_main():
- run_unittest(TestEmailAsianCodecs)
-
-
-
if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+ unittest.main()
diff --git a/Lib/email/test/test_email.py b/Lib/test/test_email/test_email.py
index 5655938..08a49f8 100644
--- a/Lib/email/test/test_email.py
+++ b/Lib/test/test_email/test_email.py
@@ -36,40 +36,14 @@ from email import iterators
from email import base64mime
from email import quoprimime
-from test.support import findfile, run_unittest, unlink
-from email.test import __file__ as landmark
-
+from test.support import run_unittest, unlink
+from test.test_email import openfile, TestEmailBase
NL = '\n'
EMPTYSTRING = ''
SPACE = ' '
-
-def openfile(filename, *args, **kws):
- path = os.path.join(os.path.dirname(landmark), 'data', filename)
- return open(path, *args, **kws)
-
-
-
-# Base test class
-class TestEmailBase(unittest.TestCase):
- def ndiffAssertEqual(self, first, second):
- """Like assertEqual except use ndiff for readable output."""
- if first != second:
- sfirst = str(first)
- ssecond = str(second)
- rfirst = [repr(line) for line in sfirst.splitlines()]
- rsecond = [repr(line) for line in ssecond.splitlines()]
- diff = difflib.ndiff(rfirst, rsecond)
- raise self.failureException(NL + NL.join(diff))
-
- def _msgobj(self, filename):
- with openfile(findfile(filename)) as fp:
- return email.message_from_file(fp)
-
-
-
# Test various aspects of the Message class's API
class TestMessageAPI(TestEmailBase):
def test_get_all(self):
@@ -194,7 +168,7 @@ class TestMessageAPI(TestEmailBase):
def test_message_rfc822_only(self):
# Issue 7970: message/rfc822 not in multipart parsed by
# HeaderParser caused an exception when flattened.
- with openfile(findfile('msg_46.txt')) as fp:
+ with openfile('msg_46.txt') as fp:
msgdata = fp.read()
parser = HeaderParser()
msg = parser.parsestr(msgdata)
@@ -203,6 +177,17 @@ class TestMessageAPI(TestEmailBase):
gen.flatten(msg, False)
self.assertEqual(out.getvalue(), msgdata)
+ def test_byte_message_rfc822_only(self):
+ # Make sure new bytes header parser also passes this.
+ with openfile('msg_46.txt', 'rb') as fp:
+ msgdata = fp.read()
+ parser = email.parser.BytesHeaderParser()
+ msg = parser.parsebytes(msgdata)
+ out = BytesIO()
+ gen = email.generator.BytesGenerator(out)
+ gen.flatten(msg)
+ self.assertEqual(out.getvalue(), msgdata)
+
def test_get_decoded_payload(self):
eq = self.assertEqual
msg = self._msgobj('msg_10.txt')
@@ -1243,6 +1228,7 @@ List: List-Unsubscribe:
=?utf-8?q?_folding_white_space_works?=""")+'\n')
+
# Test mangling of "From " lines in the body of a message
class TestFromMangling(unittest.TestCase):
def setUp(self):
@@ -1280,13 +1266,7 @@ Blah blah blah
# Test the basic MIMEAudio class
class TestMIMEAudio(unittest.TestCase):
def setUp(self):
- # Make sure we pick up the audiotest.au that lives in email/test/data.
- # In Python, there's an audiotest.au living in Lib/test but that isn't
- # included in some binary distros that don't include the test
- # package. The trailing empty string on the .join() is significant
- # since findfile() will do a dirname().
- datadir = os.path.join(os.path.dirname(landmark), 'data', '')
- with open(findfile('audiotest.au', datadir), 'rb') as fp:
+ with openfile('audiotest.au', 'rb') as fp:
self._audiodata = fp.read()
self._au = MIMEAudio(self._audiodata)
@@ -1813,7 +1793,12 @@ YXNkZg==
# Test some badly formatted messages
-class TestNonConformant(TestEmailBase):
+class TestNonConformantBase:
+
+ def _msgobj(self, filename):
+ with openfile(filename) as fp:
+ return email.message_from_file(fp, policy=self.policy)
+
def test_parse_missing_minor_type(self):
eq = self.assertEqual
msg = self._msgobj('msg_14.txt')
@@ -1827,19 +1812,65 @@ class TestNonConformant(TestEmailBase):
# XXX We can probably eventually do better
inner = msg.get_payload(0)
unless(hasattr(inner, 'defects'))
- self.assertEqual(len(inner.defects), 1)
- unless(isinstance(inner.defects[0],
+ self.assertEqual(len(self.get_defects(inner)), 1)
+ unless(isinstance(self.get_defects(inner)[0],
errors.StartBoundaryNotFoundDefect))
def test_multipart_no_boundary(self):
unless = self.assertTrue
msg = self._msgobj('msg_25.txt')
unless(isinstance(msg.get_payload(), str))
- self.assertEqual(len(msg.defects), 2)
- unless(isinstance(msg.defects[0], errors.NoBoundaryInMultipartDefect))
- unless(isinstance(msg.defects[1],
+ self.assertEqual(len(self.get_defects(msg)), 2)
+ unless(isinstance(self.get_defects(msg)[0],
+ errors.NoBoundaryInMultipartDefect))
+ unless(isinstance(self.get_defects(msg)[1],
errors.MultipartInvariantViolationDefect))
+ multipart_msg = textwrap.dedent("""\
+ Date: Wed, 14 Nov 2007 12:56:23 GMT
+ From: foo@bar.invalid
+ To: foo@bar.invalid
+ Subject: Content-Transfer-Encoding: base64 and multipart
+ MIME-Version: 1.0
+ Content-Type: multipart/mixed;
+ boundary="===============3344438784458119861=="{}
+
+ --===============3344438784458119861==
+ Content-Type: text/plain
+
+ Test message
+
+ --===============3344438784458119861==
+ Content-Type: application/octet-stream
+ Content-Transfer-Encoding: base64
+
+ YWJj
+
+ --===============3344438784458119861==--
+ """)
+
+ def test_multipart_invalid_cte(self):
+ msg = email.message_from_string(
+ self.multipart_msg.format("\nContent-Transfer-Encoding: base64"),
+ policy = self.policy)
+ self.assertEqual(len(self.get_defects(msg)), 1)
+ self.assertIsInstance(self.get_defects(msg)[0],
+ errors.InvalidMultipartContentTransferEncodingDefect)
+
+ def test_multipart_no_cte_no_defect(self):
+ msg = email.message_from_string(
+ self.multipart_msg.format(''),
+ policy = self.policy)
+ self.assertEqual(len(self.get_defects(msg)), 0)
+
+ def test_multipart_valid_cte_no_defect(self):
+ for cte in ('7bit', '8bit', 'BINary'):
+ msg = email.message_from_string(
+ self.multipart_msg.format(
+ "\nContent-Transfer-Encoding: {}".format(cte)),
+ policy = self.policy)
+ self.assertEqual(len(self.get_defects(msg)), 0)
+
def test_invalid_content_type(self):
eq = self.assertEqual
neq = self.ndiffAssertEqual
@@ -1893,9 +1924,10 @@ counter to RFC 2822, there's no separating newline here
unless = self.assertTrue
msg = self._msgobj('msg_41.txt')
unless(hasattr(msg, 'defects'))
- self.assertEqual(len(msg.defects), 2)
- unless(isinstance(msg.defects[0], errors.NoBoundaryInMultipartDefect))
- unless(isinstance(msg.defects[1],
+ self.assertEqual(len(self.get_defects(msg)), 2)
+ unless(isinstance(self.get_defects(msg)[0],
+ errors.NoBoundaryInMultipartDefect))
+ unless(isinstance(self.get_defects(msg)[1],
errors.MultipartInvariantViolationDefect))
def test_missing_start_boundary(self):
@@ -1909,21 +1941,71 @@ counter to RFC 2822, there's no separating newline here
#
# [*] This message is missing its start boundary
bad = outer.get_payload(1).get_payload(0)
- self.assertEqual(len(bad.defects), 1)
- self.assertTrue(isinstance(bad.defects[0],
+ self.assertEqual(len(self.get_defects(bad)), 1)
+ self.assertTrue(isinstance(self.get_defects(bad)[0],
errors.StartBoundaryNotFoundDefect))
def test_first_line_is_continuation_header(self):
eq = self.assertEqual
m = ' Line 1\nLine 2\nLine 3'
- msg = email.message_from_string(m)
+ msg = email.message_from_string(m, policy=self.policy)
eq(msg.keys(), [])
eq(msg.get_payload(), 'Line 2\nLine 3')
- eq(len(msg.defects), 1)
- self.assertTrue(isinstance(msg.defects[0],
+ eq(len(self.get_defects(msg)), 1)
+ self.assertTrue(isinstance(self.get_defects(msg)[0],
errors.FirstHeaderLineIsContinuationDefect))
- eq(msg.defects[0].line, ' Line 1\n')
+ eq(self.get_defects(msg)[0].line, ' Line 1\n')
+
+
+class TestNonConformant(TestNonConformantBase, TestEmailBase):
+
+ policy=email.policy.default
+
+ def get_defects(self, obj):
+ return obj.defects
+
+
+class TestNonConformantCapture(TestNonConformantBase, TestEmailBase):
+
+ class CapturePolicy(email.policy.Policy):
+ captured = None
+ def register_defect(self, obj, defect):
+ self.captured.append(defect)
+
+ def setUp(self):
+ self.policy = self.CapturePolicy(captured=list())
+
+ def get_defects(self, obj):
+ return self.policy.captured
+
+
+class TestRaisingDefects(TestEmailBase):
+ def _msgobj(self, filename):
+ with openfile(filename) as fp:
+ return email.message_from_file(fp, policy=email.policy.strict)
+
+ def test_same_boundary_inner_outer(self):
+ with self.assertRaises(errors.StartBoundaryNotFoundDefect):
+ self._msgobj('msg_15.txt')
+
+ def test_multipart_no_boundary(self):
+ with self.assertRaises(errors.NoBoundaryInMultipartDefect):
+ self._msgobj('msg_25.txt')
+
+ def test_lying_multipart(self):
+ with self.assertRaises(errors.NoBoundaryInMultipartDefect):
+ self._msgobj('msg_41.txt')
+
+
+ def test_missing_start_boundary(self):
+ with self.assertRaises(errors.StartBoundaryNotFoundDefect):
+ self._msgobj('msg_42.txt')
+
+ def test_first_line_is_continuation_header(self):
+ m = ' Line 1\nLine 2\nLine 3'
+ with self.assertRaises(errors.FirstHeaderLineIsContinuationDefect):
+ msg = email.message_from_string(m, policy=email.policy.strict)
# Test RFC 2047 header encoding and decoding
@@ -2577,6 +2659,13 @@ class TestMiscellaneous(TestEmailBase):
(2002, 4, 3, 14, 58, 26, 0, 1, -1, -28800))
+ def test_parsedate_accepts_time_with_dots(self):
+ eq = self.assertEqual
+ eq(utils.parsedate_tz('5 Feb 2003 13.47.26 -0800'),
+ (2003, 2, 5, 13, 47, 26, 0, 1, -1, -28800))
+ eq(utils.parsedate_tz('5 Feb 2003 13.47 -0800'),
+ (2003, 2, 5, 13, 47, 0, 0, 1, -1, -28800))
+
def test_parsedate_acceptable_to_time_functions(self):
eq = self.assertEqual
timetup = utils.parsedate('5 Feb 2003 13:47:26 -0800')
@@ -2613,7 +2702,10 @@ class TestMiscellaneous(TestEmailBase):
def test_escape_dump(self):
self.assertEqual(
utils.formataddr(('A (Very) Silly Person', 'person@dom.ain')),
- r'"A \(Very\) Silly Person" <person@dom.ain>')
+ r'"A (Very) Silly Person" <person@dom.ain>')
+ self.assertEqual(
+ utils.parseaddr(r'"A \(Very\) Silly Person" <person@dom.ain>'),
+ ('A (Very) Silly Person', 'person@dom.ain'))
a = r'A \(Special\) Person'
b = 'person@dom.ain'
self.assertEqual(utils.parseaddr(utils.formataddr((a, b))), (a, b))
@@ -2626,6 +2718,46 @@ class TestMiscellaneous(TestEmailBase):
b = 'person@dom.ain'
self.assertEqual(utils.parseaddr(utils.formataddr((a, b))), (a, b))
+ def test_quotes_unicode_names(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ name = "H\u00e4ns W\u00fcrst"
+ addr = 'person@dom.ain'
+ utf8_base64 = "=?utf-8?b?SMOkbnMgV8O8cnN0?= <person@dom.ain>"
+ latin1_quopri = "=?iso-8859-1?q?H=E4ns_W=FCrst?= <person@dom.ain>"
+ self.assertEqual(utils.formataddr((name, addr)), utf8_base64)
+ self.assertEqual(utils.formataddr((name, addr), 'iso-8859-1'),
+ latin1_quopri)
+
+ def test_accepts_any_charset_like_object(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ name = "H\u00e4ns W\u00fcrst"
+ addr = 'person@dom.ain'
+ utf8_base64 = "=?utf-8?b?SMOkbnMgV8O8cnN0?= <person@dom.ain>"
+ foobar = "FOOBAR"
+ class CharsetMock:
+ def header_encode(self, string):
+ return foobar
+ mock = CharsetMock()
+ mock_expected = "%s <%s>" % (foobar, addr)
+ self.assertEqual(utils.formataddr((name, addr), mock), mock_expected)
+ self.assertEqual(utils.formataddr((name, addr), Charset('utf-8')),
+ utf8_base64)
+
+ def test_invalid_charset_like_object_raises_error(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ name = "H\u00e4ns W\u00fcrst"
+ addr = 'person@dom.ain'
+ # A object without a header_encode method:
+ bad_charset = object()
+ self.assertRaises(AttributeError, utils.formataddr, (name, addr),
+ bad_charset)
+
+ def test_unicode_address_raises_error(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ addr = 'pers\u00f6n@dom.in'
+ self.assertRaises(UnicodeError, utils.formataddr, (None, addr))
+ self.assertRaises(UnicodeError, utils.formataddr, ("Name", addr))
+
def test_name_with_dot(self):
x = 'John X. Doe <jxd@example.com>'
y = '"John X. Doe" <jxd@example.com>'
@@ -2671,6 +2803,15 @@ class TestMiscellaneous(TestEmailBase):
self.assertEqual(('', 'merwok.wok.wok@xample.com'),
utils.parseaddr('merwok. wok . wok@xample.com'))
+ def test_formataddr_does_not_quote_parens_in_quoted_string(self):
+ addr = ("'foo@example.com' (foo@example.com)",
+ 'foo@example.com')
+ addrstr = ('"\'foo@example.com\' '
+ '(foo@example.com)" <foo@example.com>')
+ self.assertEqual(utils.parseaddr(addrstr), addr)
+ self.assertEqual(utils.formataddr(addr), addrstr)
+
+
def test_multiline_from_comment(self):
x = """\
Foo
@@ -2906,6 +3047,7 @@ Do you like this message?
class TestParsers(TestEmailBase):
+
def test_header_parser(self):
eq = self.assertEqual
# Parse only the headers of a complex multipart MIME document
@@ -2917,6 +3059,18 @@ class TestParsers(TestEmailBase):
self.assertFalse(msg.is_multipart())
self.assertTrue(isinstance(msg.get_payload(), str))
+ def test_bytes_header_parser(self):
+ eq = self.assertEqual
+ # Parse only the headers of a complex multipart MIME document
+ with openfile('msg_02.txt', 'rb') as fp:
+ msg = email.parser.BytesHeaderParser().parse(fp)
+ eq(msg['from'], 'ppp-request@zzz.org')
+ eq(msg['to'], 'ppp@zzz.org')
+ eq(msg.get_content_type(), 'multipart/mixed')
+ self.assertFalse(msg.is_multipart())
+ self.assertTrue(isinstance(msg.get_payload(), str))
+ self.assertTrue(isinstance(msg.get_payload(decode=True), bytes))
+
def test_whitespace_continuation(self):
eq = self.assertEqual
# This message contains a line after the Subject: header that has only
@@ -2974,6 +3128,25 @@ Here's the message body
g.flatten(msg, linesep='\r\n')
self.assertEqual(s.getvalue(), text)
+ def test_crlf_control_via_policy(self):
+ with openfile('msg_26.txt', newline='\n') as fp:
+ text = fp.read()
+ msg = email.message_from_string(text)
+ s = StringIO()
+ g = email.generator.Generator(s, policy=email.policy.SMTP)
+ g.flatten(msg)
+ self.assertEqual(s.getvalue(), text)
+
+ def test_flatten_linesep_overrides_policy(self):
+ # msg_27 is lf separated
+ with openfile('msg_27.txt', newline='\n') as fp:
+ text = fp.read()
+ msg = email.message_from_string(text)
+ s = StringIO()
+ g = email.generator.Generator(s, policy=email.policy.SMTP)
+ g.flatten(msg, linesep='\n')
+ self.assertEqual(s.getvalue(), text)
+
maxDiff = None
def test_multipart_digest_with_extra_mime_headers(self):
@@ -3464,6 +3637,44 @@ class Test8BitBytesHandling(unittest.TestCase):
s.getvalue(),
'Subject: =?utf-8?b?xb5sdcWlb3XEjWvDvSBrxa/FiA==?=\r\n\r\n')
+ def test_crlf_control_via_policy(self):
+ # msg_26 is crlf terminated
+ with openfile('msg_26.txt', 'rb') as fp:
+ text = fp.read()
+ msg = email.message_from_bytes(text)
+ s = BytesIO()
+ g = email.generator.BytesGenerator(s, policy=email.policy.SMTP)
+ g.flatten(msg)
+ self.assertEqual(s.getvalue(), text)
+
+ def test_flatten_linesep_overrides_policy(self):
+ # msg_27 is lf separated
+ with openfile('msg_27.txt', 'rb') as fp:
+ text = fp.read()
+ msg = email.message_from_bytes(text)
+ s = BytesIO()
+ g = email.generator.BytesGenerator(s, policy=email.policy.SMTP)
+ g.flatten(msg, linesep='\n')
+ self.assertEqual(s.getvalue(), text)
+
+ def test_must_be_7bit_handles_unknown_8bit(self):
+ msg = email.message_from_bytes(self.non_latin_bin_msg)
+ out = BytesIO()
+ g = email.generator.BytesGenerator(out,
+ policy=email.policy.default.clone(must_be_7bit=True))
+ g.flatten(msg)
+ self.assertEqual(out.getvalue(),
+ self.non_latin_bin_msg_as7bit_wrapped.encode('ascii'))
+
+ def test_must_be_7bit_transforms_8bit_cte(self):
+ msg = email.message_from_bytes(self.latin_bin_msg)
+ out = BytesIO()
+ g = email.generator.BytesGenerator(out,
+ policy=email.policy.default.clone(must_be_7bit=True))
+ g.flatten(msg)
+ self.assertEqual(out.getvalue(),
+ self.latin_bin_msg_as7bit.encode('ascii'))
+
maxDiff = None
@@ -3482,12 +3693,7 @@ class BaseTestBytesGeneratorIdempotent:
b = BytesIO()
g = email.generator.BytesGenerator(b, maxheaderlen=0)
g.flatten(msg, unixfrom=unixfrom, linesep=self.linesep)
- self.assertByteStringsEqual(data, b.getvalue())
-
- def assertByteStringsEqual(self, str1, str2):
- # Not using self.blinesep here is intentional. This way the output
- # is more useful when the failure results in mixed line endings.
- self.assertListEqual(str1.split(b'\n'), str2.split(b'\n'))
+ self.assertEqual(data, b.getvalue())
class TestBytesGeneratorIdempotentNL(BaseTestBytesGeneratorIdempotent,
@@ -4639,7 +4845,7 @@ Content-Type: application/x-foo;
class TestSigned(TestEmailBase):
def _msg_and_obj(self, filename):
- with openfile(findfile(filename)) as fp:
+ with openfile(filename) as fp:
original = fp.read()
msg = email.message_from_string(original)
return original, msg
@@ -4671,23 +4877,5 @@ class TestSigned(TestEmailBase):
-def _testclasses():
- mod = sys.modules[__name__]
- return [getattr(mod, name) for name in dir(mod) if name.startswith('Test')]
-
-
-def suite():
- suite = unittest.TestSuite()
- for testclass in _testclasses():
- suite.addTest(unittest.makeSuite(testclass))
- return suite
-
-
-def test_main():
- for testclass in _testclasses():
- run_unittest(testclass)
-
-
-
if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+ unittest.main()
diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py
new file mode 100644
index 0000000..35ca6c5
--- /dev/null
+++ b/Lib/test/test_email/test_generator.py
@@ -0,0 +1,136 @@
+import io
+import textwrap
+import unittest
+from email import message_from_string, message_from_bytes
+from email.generator import Generator, BytesGenerator
+from email import policy
+from test.test_email import TestEmailBase
+
+# XXX: move generator tests from test_email into here at some point.
+
+
+class TestGeneratorBase():
+
+ long_subject = {
+ 0: textwrap.dedent("""\
+ To: whom_it_may_concern@example.com
+ From: nobody_you_want_to_know@example.com
+ Subject: We the willing led by the unknowing are doing the
+ impossible for the ungrateful. We have done so much for so long with so little
+ we are now qualified to do anything with nothing.
+
+ None
+ """),
+ 40: textwrap.dedent("""\
+ To: whom_it_may_concern@example.com
+ From:\x20
+ nobody_you_want_to_know@example.com
+ Subject: We the willing led by the
+ unknowing are doing the
+ impossible for the ungrateful. We have
+ done so much for so long with so little
+ we are now qualified to do anything
+ with nothing.
+
+ None
+ """),
+ 20: textwrap.dedent("""\
+ To:\x20
+ whom_it_may_concern@example.com
+ From:\x20
+ nobody_you_want_to_know@example.com
+ Subject: We the
+ willing led by the
+ unknowing are doing
+ the
+ impossible for the
+ ungrateful. We have
+ done so much for so
+ long with so little
+ we are now
+ qualified to do
+ anything with
+ nothing.
+
+ None
+ """),
+ }
+ long_subject[100] = long_subject[0]
+
+ def maxheaderlen_parameter_test(self, n):
+ msg = self.msgmaker(self.long_subject[0])
+ s = self.ioclass()
+ g = self.genclass(s, maxheaderlen=n)
+ g.flatten(msg)
+ self.assertEqual(s.getvalue(), self.long_subject[n])
+
+ def test_maxheaderlen_parameter_0(self):
+ self.maxheaderlen_parameter_test(0)
+
+ def test_maxheaderlen_parameter_100(self):
+ self.maxheaderlen_parameter_test(100)
+
+ def test_maxheaderlen_parameter_40(self):
+ self.maxheaderlen_parameter_test(40)
+
+ def test_maxheaderlen_parameter_20(self):
+ self.maxheaderlen_parameter_test(20)
+
+ def maxheaderlen_policy_test(self, n):
+ msg = self.msgmaker(self.long_subject[0])
+ s = self.ioclass()
+ g = self.genclass(s, policy=policy.default.clone(max_line_length=n))
+ g.flatten(msg)
+ self.assertEqual(s.getvalue(), self.long_subject[n])
+
+ def test_maxheaderlen_policy_0(self):
+ self.maxheaderlen_policy_test(0)
+
+ def test_maxheaderlen_policy_100(self):
+ self.maxheaderlen_policy_test(100)
+
+ def test_maxheaderlen_policy_40(self):
+ self.maxheaderlen_policy_test(40)
+
+ def test_maxheaderlen_policy_20(self):
+ self.maxheaderlen_policy_test(20)
+
+ def maxheaderlen_parm_overrides_policy_test(self, n):
+ msg = self.msgmaker(self.long_subject[0])
+ s = self.ioclass()
+ g = self.genclass(s, maxheaderlen=n,
+ policy=policy.default.clone(max_line_length=10))
+ g.flatten(msg)
+ self.assertEqual(s.getvalue(), self.long_subject[n])
+
+ def test_maxheaderlen_parm_overrides_policy_0(self):
+ self.maxheaderlen_parm_overrides_policy_test(0)
+
+ def test_maxheaderlen_parm_overrides_policy_100(self):
+ self.maxheaderlen_parm_overrides_policy_test(100)
+
+ def test_maxheaderlen_parm_overrides_policy_40(self):
+ self.maxheaderlen_parm_overrides_policy_test(40)
+
+ def test_maxheaderlen_parm_overrides_policy_20(self):
+ self.maxheaderlen_parm_overrides_policy_test(20)
+
+
+class TestGenerator(TestGeneratorBase, TestEmailBase):
+
+ msgmaker = staticmethod(message_from_string)
+ genclass = Generator
+ ioclass = io.StringIO
+
+
+class TestBytesGenerator(TestGeneratorBase, TestEmailBase):
+
+ msgmaker = staticmethod(message_from_bytes)
+ genclass = BytesGenerator
+ ioclass = io.BytesIO
+ long_subject = {key: x.encode('ascii')
+ for key, x in TestGeneratorBase.long_subject.items()}
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Lib/test/test_email/test_policy.py b/Lib/test/test_email/test_policy.py
new file mode 100644
index 0000000..1c65901
--- /dev/null
+++ b/Lib/test/test_email/test_policy.py
@@ -0,0 +1,150 @@
+import types
+import unittest
+import email.policy
+
+class PolicyAPITests(unittest.TestCase):
+
+ longMessage = True
+
+ # These default values are the ones set on email.policy.default.
+ # If any of these defaults change, the docs must be updated.
+ policy_defaults = {
+ 'max_line_length': 78,
+ 'linesep': '\n',
+ 'must_be_7bit': False,
+ 'raise_on_defect': False,
+ }
+
+ # For each policy under test, we give here the values of the attributes
+ # that are different from the defaults for that policy.
+ policies = {
+ email.policy.Policy(): {},
+ email.policy.default: {},
+ email.policy.SMTP: {'linesep': '\r\n'},
+ email.policy.HTTP: {'linesep': '\r\n', 'max_line_length': None},
+ email.policy.strict: {'raise_on_defect': True},
+ }
+
+ def test_defaults(self):
+ for policy, changed_defaults in self.policies.items():
+ expected = self.policy_defaults.copy()
+ expected.update(changed_defaults)
+ for attr, value in expected.items():
+ self.assertEqual(getattr(policy, attr), value,
+ ("change {} docs/docstrings if defaults have "
+ "changed").format(policy))
+
+ def test_all_attributes_covered(self):
+ for attr in dir(email.policy.default):
+ if (attr.startswith('_') or
+ isinstance(getattr(email.policy.Policy, attr),
+ types.FunctionType)):
+ continue
+ else:
+ self.assertIn(attr, self.policy_defaults,
+ "{} is not fully tested".format(attr))
+
+ def test_policy_is_immutable(self):
+ for policy in self.policies:
+ for attr in self.policy_defaults:
+ with self.assertRaisesRegex(AttributeError, attr+".*read-only"):
+ setattr(policy, attr, None)
+ with self.assertRaisesRegex(AttributeError, 'no attribute.*foo'):
+ policy.foo = None
+
+ def test_set_policy_attrs_when_calledl(self):
+ testattrdict = { attr: None for attr in self.policy_defaults }
+ for policyclass in self.policies:
+ policy = policyclass.clone(**testattrdict)
+ for attr in self.policy_defaults:
+ self.assertIsNone(getattr(policy, attr))
+
+ def test_reject_non_policy_keyword_when_called(self):
+ for policyclass in self.policies:
+ with self.assertRaises(TypeError):
+ policyclass(this_keyword_should_not_be_valid=None)
+ with self.assertRaises(TypeError):
+ policyclass(newtline=None)
+
+ def test_policy_addition(self):
+ expected = self.policy_defaults.copy()
+ p1 = email.policy.default.clone(max_line_length=100)
+ p2 = email.policy.default.clone(max_line_length=50)
+ added = p1 + p2
+ expected.update(max_line_length=50)
+ for attr, value in expected.items():
+ self.assertEqual(getattr(added, attr), value)
+ added = p2 + p1
+ expected.update(max_line_length=100)
+ for attr, value in expected.items():
+ self.assertEqual(getattr(added, attr), value)
+ added = added + email.policy.default
+ for attr, value in expected.items():
+ self.assertEqual(getattr(added, attr), value)
+
+ def test_register_defect(self):
+ class Dummy:
+ def __init__(self):
+ self.defects = []
+ obj = Dummy()
+ defect = object()
+ policy = email.policy.Policy()
+ policy.register_defect(obj, defect)
+ self.assertEqual(obj.defects, [defect])
+ defect2 = object()
+ policy.register_defect(obj, defect2)
+ self.assertEqual(obj.defects, [defect, defect2])
+
+ class MyObj:
+ def __init__(self):
+ self.defects = []
+
+ class MyDefect(Exception):
+ pass
+
+ def test_handle_defect_raises_on_strict(self):
+ foo = self.MyObj()
+ defect = self.MyDefect("the telly is broken")
+ with self.assertRaisesRegex(self.MyDefect, "the telly is broken"):
+ email.policy.strict.handle_defect(foo, defect)
+
+ def test_handle_defect_registers_defect(self):
+ foo = self.MyObj()
+ defect1 = self.MyDefect("one")
+ email.policy.default.handle_defect(foo, defect1)
+ self.assertEqual(foo.defects, [defect1])
+ defect2 = self.MyDefect("two")
+ email.policy.default.handle_defect(foo, defect2)
+ self.assertEqual(foo.defects, [defect1, defect2])
+
+ class MyPolicy(email.policy.Policy):
+ defects = None
+ def __init__(self, *args, **kw):
+ super().__init__(*args, defects=[], **kw)
+ def register_defect(self, obj, defect):
+ self.defects.append(defect)
+
+ def test_overridden_register_defect_still_raises(self):
+ foo = self.MyObj()
+ defect = self.MyDefect("the telly is broken")
+ with self.assertRaisesRegex(self.MyDefect, "the telly is broken"):
+ self.MyPolicy(raise_on_defect=True).handle_defect(foo, defect)
+
+ def test_overriden_register_defect_works(self):
+ foo = self.MyObj()
+ defect1 = self.MyDefect("one")
+ my_policy = self.MyPolicy()
+ my_policy.handle_defect(foo, defect1)
+ self.assertEqual(my_policy.defects, [defect1])
+ self.assertEqual(foo.defects, [])
+ defect2 = self.MyDefect("two")
+ my_policy.handle_defect(foo, defect2)
+ self.assertEqual(my_policy.defects, [defect1, defect2])
+ self.assertEqual(foo.defects, [])
+
+ # XXX: Need subclassing tests.
+ # For adding subclassed objects, make sure the usual rules apply (subclass
+ # wins), but that the order still works (right overrides left).
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Lib/test/test_email/test_utils.py b/Lib/test/test_email/test_utils.py
new file mode 100644
index 0000000..e003a64
--- /dev/null
+++ b/Lib/test/test_email/test_utils.py
@@ -0,0 +1,45 @@
+import datetime
+from email import utils
+import unittest
+
+class DateTimeTests(unittest.TestCase):
+
+ datestring = 'Sun, 23 Sep 2001 20:10:55'
+ dateargs = (2001, 9, 23, 20, 10, 55)
+ offsetstring = ' -0700'
+ utcoffset = datetime.timedelta(hours=-7)
+ tz = datetime.timezone(utcoffset)
+ naive_dt = datetime.datetime(*dateargs)
+ aware_dt = datetime.datetime(*dateargs, tzinfo=tz)
+
+ def test_naive_datetime(self):
+ self.assertEqual(utils.format_datetime(self.naive_dt),
+ self.datestring + ' -0000')
+
+ def test_aware_datetime(self):
+ self.assertEqual(utils.format_datetime(self.aware_dt),
+ self.datestring + self.offsetstring)
+
+ def test_usegmt(self):
+ utc_dt = datetime.datetime(*self.dateargs,
+ tzinfo=datetime.timezone.utc)
+ self.assertEqual(utils.format_datetime(utc_dt, usegmt=True),
+ self.datestring + ' GMT')
+
+ def test_usegmt_with_naive_datetime_raises(self):
+ with self.assertRaises(ValueError):
+ utils.format_datetime(self.naive_dt, usegmt=True)
+
+ def test_usegmt_with_non_utc_datetime_raises(self):
+ with self.assertRaises(ValueError):
+ utils.format_datetime(self.aware_dt, usegmt=True)
+
+ def test_parsedate_to_datetime(self):
+ self.assertEqual(
+ utils.parsedate_to_datetime(self.datestring + self.offsetstring),
+ self.aware_dt)
+
+ def test_parsedate_to_datetime_naive(self):
+ self.assertEqual(
+ utils.parsedate_to_datetime(self.datestring + ' -0000'),
+ self.naive_dt)
diff --git a/Lib/email/test/test_email_torture.py b/Lib/test/test_email/torture_test.py
index 544b1bb..544b1bb 100644
--- a/Lib/email/test/test_email_torture.py
+++ b/Lib/test/test_email/torture_test.py
diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py
index 083fd7f..7f9547f 100644
--- a/Lib/test/test_epoll.py
+++ b/Lib/test/test_epoll.py
@@ -75,6 +75,9 @@ class TestEPoll(unittest.TestCase):
ep.close()
self.assertTrue(ep.closed)
self.assertRaises(ValueError, ep.fileno)
+ if hasattr(select, "EPOLL_CLOEXEC"):
+ select.epoll(select.EPOLL_CLOEXEC).close()
+ self.assertRaises(OSError, select.epoll, flags=12356)
def test_badcreate(self):
self.assertRaises(TypeError, select.epoll, 1, 2, 3)
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 7a2dd0c..42536d3 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -8,7 +8,7 @@ import weakref
import errno
from test.support import (TESTFN, unlink, run_unittest, captured_output,
- gc_collect, cpython_only)
+ gc_collect, cpython_only, no_tracing)
# XXX This is not really enough, each *operation* should be tested!
@@ -46,8 +46,8 @@ class ExceptionTests(unittest.TestCase):
fp.close()
unlink(TESTFN)
- self.raise_catch(IOError, "IOError")
- self.assertRaises(IOError, open, 'this file does not exist', 'r')
+ self.raise_catch(OSError, "OSError")
+ self.assertRaises(OSError, open, 'this file does not exist', 'r')
self.raise_catch(ImportError, "ImportError")
self.assertRaises(ImportError, __import__, "undefined_module")
@@ -192,11 +192,35 @@ class ExceptionTests(unittest.TestCase):
except NameError:
pass
else:
- self.assertEqual(str(WindowsError(1001)), "1001")
- self.assertEqual(str(WindowsError(1001, "message")),
- "[Error 1001] message")
- self.assertEqual(WindowsError(1001, "message").errno, 22)
- self.assertEqual(WindowsError(1001, "message").winerror, 1001)
+ self.assertIs(WindowsError, OSError)
+ self.assertEqual(str(OSError(1001)), "1001")
+ self.assertEqual(str(OSError(1001, "message")),
+ "[Errno 1001] message")
+ # POSIX errno (9 aka EBADF) is untranslated
+ w = OSError(9, 'foo', 'bar')
+ self.assertEqual(w.errno, 9)
+ self.assertEqual(w.winerror, None)
+ self.assertEqual(str(w), "[Errno 9] foo: 'bar'")
+ # ERROR_PATH_NOT_FOUND (win error 3) becomes ENOENT (2)
+ w = OSError(0, 'foo', 'bar', 3)
+ self.assertEqual(w.errno, 2)
+ self.assertEqual(w.winerror, 3)
+ self.assertEqual(w.strerror, 'foo')
+ self.assertEqual(w.filename, 'bar')
+ self.assertEqual(str(w), "[Error 3] foo: 'bar'")
+ # Unknown win error becomes EINVAL (22)
+ w = OSError(0, 'foo', None, 1001)
+ self.assertEqual(w.errno, 22)
+ self.assertEqual(w.winerror, 1001)
+ self.assertEqual(w.strerror, 'foo')
+ self.assertEqual(w.filename, None)
+ self.assertEqual(str(w), "[Error 1001] foo")
+ # Non-numeric "errno"
+ w = OSError('bar', 'foo')
+ self.assertEqual(w.errno, 'bar')
+ self.assertEqual(w.winerror, None)
+ self.assertEqual(w.strerror, 'foo')
+ self.assertEqual(w.filename, None)
def testAttributes(self):
# test that exception attributes are happy
@@ -274,11 +298,12 @@ class ExceptionTests(unittest.TestCase):
'start' : 0, 'end' : 1}),
]
try:
+ # More tests are in test_WindowsError
exceptionList.append(
(WindowsError, (1, 'strErrorStr', 'filenameStr'),
{'args' : (1, 'strErrorStr'),
- 'strerror' : 'strErrorStr', 'winerror' : 1,
- 'errno' : 22, 'filename' : 'filenameStr'})
+ 'strerror' : 'strErrorStr', 'winerror' : None,
+ 'errno' : 1, 'filename' : 'filenameStr'})
)
except NameError:
pass
@@ -362,19 +387,36 @@ class ExceptionTests(unittest.TestCase):
def testChainingAttrs(self):
e = Exception()
- self.assertEqual(e.__context__, None)
- self.assertEqual(e.__cause__, None)
+ self.assertIsNone(e.__context__)
+ self.assertIs(e.__cause__, Ellipsis)
e = TypeError()
- self.assertEqual(e.__context__, None)
- self.assertEqual(e.__cause__, None)
+ self.assertIsNone(e.__context__)
+ self.assertIs(e.__cause__, Ellipsis)
class MyException(EnvironmentError):
pass
e = MyException()
- self.assertEqual(e.__context__, None)
- self.assertEqual(e.__cause__, None)
+ self.assertIsNone(e.__context__)
+ self.assertIs(e.__cause__, Ellipsis)
+
+ def testChainingDescriptors(self):
+ try:
+ raise Exception()
+ except Exception as exc:
+ e = exc
+
+ self.assertIsNone(e.__context__)
+ self.assertIs(e.__cause__, Ellipsis)
+
+ e.__context__ = NameError()
+ e.__cause__ = None
+ self.assertIsInstance(e.__context__, NameError)
+ self.assertIsNone(e.__cause__)
+
+ e.__cause__ = Ellipsis
+ self.assertIs(e.__cause__, Ellipsis)
def testKeywordArgs(self):
# test that builtin exception don't take keyword args,
@@ -389,6 +431,7 @@ class ExceptionTests(unittest.TestCase):
x = DerivedException(fancy_arg=42)
self.assertEqual(x.fancy_arg, 42)
+ @no_tracing
def testInfiniteRecursion(self):
def f():
return f()
@@ -721,6 +764,7 @@ class ExceptionTests(unittest.TestCase):
u.start = 1000
self.assertEqual(str(u), "can't translate characters in position 1000-4: 965230951443685724997")
+ @no_tracing
def test_badisinstance(self):
# Bug #2542: if issubclass(e, MyException) raises an exception,
# it should be ignored
@@ -831,6 +875,7 @@ class ExceptionTests(unittest.TestCase):
self.fail("MemoryError not raised")
self.assertEqual(wr(), None)
+ @no_tracing
def test_recursion_error_cleanup(self):
# Same test as above, but with "recursion exceeded" errors
class C:
diff --git a/Lib/test/test_extcall.py b/Lib/test/test_extcall.py
index 1f7f630..6b6c12d 100644
--- a/Lib/test/test_extcall.py
+++ b/Lib/test/test_extcall.py
@@ -66,17 +66,17 @@ Verify clearing of SF bug #733667
>>> g()
Traceback (most recent call last):
...
- TypeError: g() takes at least 1 argument (0 given)
+ TypeError: g() missing 1 required positional argument: 'x'
>>> g(*())
Traceback (most recent call last):
...
- TypeError: g() takes at least 1 argument (0 given)
+ TypeError: g() missing 1 required positional argument: 'x'
>>> g(*(), **{})
Traceback (most recent call last):
...
- TypeError: g() takes at least 1 argument (0 given)
+ TypeError: g() missing 1 required positional argument: 'x'
>>> g(1)
1 () {}
@@ -151,7 +151,7 @@ What about willful misconduct?
>>> g(1, 2, 3, **{'x': 4, 'y': 5})
Traceback (most recent call last):
...
- TypeError: g() got multiple values for keyword argument 'x'
+ TypeError: g() got multiple values for argument 'x'
>>> f(**{1:2})
Traceback (most recent call last):
@@ -263,29 +263,80 @@ the function call setup. See <http://bugs.python.org/issue2016>.
>>> f(**x)
1 2
-A obscure message:
+Too many arguments:
- >>> def f(a, b):
- ... pass
- >>> f(b=1)
+ >>> def f(): pass
+ >>> f(1)
+ Traceback (most recent call last):
+ ...
+ TypeError: f() takes 0 positional arguments but 1 was given
+ >>> def f(a): pass
+ >>> f(1, 2)
Traceback (most recent call last):
...
- TypeError: f() takes exactly 2 arguments (1 given)
+ TypeError: f() takes 1 positional argument but 2 were given
+ >>> def f(a, b=1): pass
+ >>> f(1, 2, 3)
+ Traceback (most recent call last):
+ ...
+ TypeError: f() takes from 1 to 2 positional arguments but 3 were given
+ >>> def f(*, kw): pass
+ >>> f(1, kw=3)
+ Traceback (most recent call last):
+ ...
+ TypeError: f() takes 0 positional arguments but 1 positional argument (and 1 keyword-only argument) were given
+ >>> def f(*, kw, b): pass
+ >>> f(1, 2, 3, b=3, kw=3)
+ Traceback (most recent call last):
+ ...
+ TypeError: f() takes 0 positional arguments but 3 positional arguments (and 2 keyword-only arguments) were given
+ >>> def f(a, b=2, *, kw): pass
+ >>> f(2, 3, 4, kw=4)
+ Traceback (most recent call last):
+ ...
+ TypeError: f() takes from 1 to 2 positional arguments but 3 positional arguments (and 1 keyword-only argument) were given
-The number of arguments passed in includes keywords:
+Too few and missing arguments:
- >>> def f(a):
- ... pass
- >>> f(6, a=4, *(1, 2, 3))
+ >>> def f(a): pass
+ >>> f()
Traceback (most recent call last):
...
- TypeError: f() takes exactly 1 positional argument (5 given)
- >>> def f(a, *, kw):
- ... pass
- >>> f(6, 4, kw=4)
+ TypeError: f() missing 1 required positional argument: 'a'
+ >>> def f(a, b): pass
+ >>> f()
Traceback (most recent call last):
...
- TypeError: f() takes exactly 1 positional argument (3 given)
+ TypeError: f() missing 2 required positional arguments: 'a' and 'b'
+ >>> def f(a, b, c): pass
+ >>> f()
+ Traceback (most recent call last):
+ ...
+ TypeError: f() missing 3 required positional arguments: 'a', 'b', and 'c'
+ >>> def f(a, b, c, d, e): pass
+ >>> f()
+ Traceback (most recent call last):
+ ...
+ TypeError: f() missing 5 required positional arguments: 'a', 'b', 'c', 'd', and 'e'
+ >>> def f(a, b=4, c=5, d=5): pass
+ >>> f(c=12, b=9)
+ Traceback (most recent call last):
+ ...
+ TypeError: f() missing 1 required positional argument: 'a'
+
+Same with keyword only args:
+
+ >>> def f(*, w): pass
+ >>> f()
+ Traceback (most recent call last):
+ ...
+ TypeError: f() missing 1 required keyword-only argument: 'w'
+ >>> def f(*, a, b, c, d, e): pass
+ >>> f()
+ Traceback (most recent call last):
+ ...
+ TypeError: f() missing 5 required keyword-only arguments: 'a', 'b', 'c', 'd', and 'e'
+
"""
import sys
diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py
new file mode 100644
index 0000000..977cb39
--- /dev/null
+++ b/Lib/test/test_faulthandler.py
@@ -0,0 +1,554 @@
+from contextlib import contextmanager
+import datetime
+import faulthandler
+import os
+import re
+import signal
+import subprocess
+import sys
+from test import support, script_helper
+import tempfile
+import unittest
+
+try:
+ import threading
+ HAVE_THREADS = True
+except ImportError:
+ HAVE_THREADS = False
+
+TIMEOUT = 0.5
+
+try:
+ from resource import setrlimit, RLIMIT_CORE, error as resource_error
+except ImportError:
+ prepare_subprocess = None
+else:
+ def prepare_subprocess():
+ # don't create core file
+ try:
+ setrlimit(RLIMIT_CORE, (0, 0))
+ except (ValueError, resource_error):
+ pass
+
+def expected_traceback(lineno1, lineno2, header, min_count=1):
+ regex = header
+ regex += ' File "<string>", line %s in func\n' % lineno1
+ regex += ' File "<string>", line %s in <module>' % lineno2
+ if 1 < min_count:
+ return '^' + (regex + '\n') * (min_count - 1) + regex
+ else:
+ return '^' + regex + '$'
+
+@contextmanager
+def temporary_filename():
+ filename = tempfile.mktemp()
+ try:
+ yield filename
+ finally:
+ support.unlink(filename)
+
+class FaultHandlerTests(unittest.TestCase):
+ def get_output(self, code, filename=None):
+ """
+ Run the specified code in Python (in a new child process) and read the
+ output from the standard error or from a file (if filename is set).
+ Return the output lines as a list.
+
+ Strip the reference count from the standard error for Python debug
+ build, and replace "Current thread 0x00007f8d8fbd9700" by "Current
+ thread XXX".
+ """
+ options = {}
+ if prepare_subprocess:
+ options['preexec_fn'] = prepare_subprocess
+ process = script_helper.spawn_python('-c', code, **options)
+ stdout, stderr = process.communicate()
+ exitcode = process.wait()
+ output = support.strip_python_stderr(stdout)
+ output = output.decode('ascii', 'backslashreplace')
+ if filename:
+ self.assertEqual(output, '')
+ with open(filename, "rb") as fp:
+ output = fp.read()
+ output = output.decode('ascii', 'backslashreplace')
+ output = re.sub('Current thread 0x[0-9a-f]+',
+ 'Current thread XXX',
+ output)
+ return output.splitlines(), exitcode
+
+ def check_fatal_error(self, code, line_number, name_regex,
+ filename=None, all_threads=True, other_regex=None):
+ """
+ Check that the fault handler for fatal errors is enabled and check the
+ traceback from the child process output.
+
+ Raise an error if the output doesn't match the expected format.
+ """
+ if all_threads:
+ header = 'Current thread XXX'
+ else:
+ header = 'Traceback (most recent call first)'
+ regex = """
+^Fatal Python error: {name}
+
+{header}:
+ File "<string>", line {lineno} in <module>$
+""".strip()
+ regex = regex.format(
+ lineno=line_number,
+ name=name_regex,
+ header=re.escape(header))
+ if other_regex:
+ regex += '|' + other_regex
+ output, exitcode = self.get_output(code, filename)
+ output = '\n'.join(output)
+ self.assertRegex(output, regex)
+ self.assertNotEqual(exitcode, 0)
+
+ def test_read_null(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._read_null()
+""".strip(),
+ 3,
+ # Issue #12700: Read NULL raises SIGILL on Mac OS X Lion
+ '(?:Segmentation fault|Bus error|Illegal instruction)')
+
+ def test_sigsegv(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigsegv()
+""".strip(),
+ 3,
+ 'Segmentation fault')
+
+ def test_sigabrt(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigabrt()
+""".strip(),
+ 3,
+ 'Aborted')
+
+ @unittest.skipIf(sys.platform == 'win32',
+ "SIGFPE cannot be caught on Windows")
+ def test_sigfpe(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigfpe()
+""".strip(),
+ 3,
+ 'Floating point exception')
+
+ @unittest.skipIf(not hasattr(faulthandler, '_sigbus'),
+ "need faulthandler._sigbus()")
+ def test_sigbus(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigbus()
+""".strip(),
+ 3,
+ 'Bus error')
+
+ @unittest.skipIf(not hasattr(faulthandler, '_sigill'),
+ "need faulthandler._sigill()")
+ def test_sigill(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigill()
+""".strip(),
+ 3,
+ 'Illegal instruction')
+
+ def test_fatal_error(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler._fatal_error(b'xyz')
+""".strip(),
+ 2,
+ 'xyz')
+
+ @unittest.skipIf(sys.platform.startswith('openbsd') and HAVE_THREADS,
+ "Issue #12868: sigaltstack() doesn't work on "
+ "OpenBSD if Python is compiled with pthread")
+ @unittest.skipIf(not hasattr(faulthandler, '_stack_overflow'),
+ 'need faulthandler._stack_overflow()')
+ def test_stack_overflow(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._stack_overflow()
+""".strip(),
+ 3,
+ '(?:Segmentation fault|Bus error)',
+ other_regex='unable to raise a stack overflow')
+
+ def test_gil_released(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._read_null(True)
+""".strip(),
+ 3,
+ '(?:Segmentation fault|Bus error|Illegal instruction)')
+
+ def test_enable_file(self):
+ with temporary_filename() as filename:
+ self.check_fatal_error("""
+import faulthandler
+output = open({filename}, 'wb')
+faulthandler.enable(output)
+faulthandler._read_null()
+""".strip().format(filename=repr(filename)),
+ 4,
+ '(?:Segmentation fault|Bus error|Illegal instruction)',
+ filename=filename)
+
+ def test_enable_single_thread(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable(all_threads=False)
+faulthandler._read_null()
+""".strip(),
+ 3,
+ '(?:Segmentation fault|Bus error|Illegal instruction)',
+ all_threads=False)
+
+ def test_disable(self):
+ code = """
+import faulthandler
+faulthandler.enable()
+faulthandler.disable()
+faulthandler._read_null()
+""".strip()
+ not_expected = 'Fatal Python error'
+ stderr, exitcode = self.get_output(code)
+ stder = '\n'.join(stderr)
+ self.assertTrue(not_expected not in stderr,
+ "%r is present in %r" % (not_expected, stderr))
+ self.assertNotEqual(exitcode, 0)
+
+ def test_is_enabled(self):
+ orig_stderr = sys.stderr
+ try:
+ # regrtest may replace sys.stderr by io.StringIO object, but
+ # faulthandler.enable() requires that sys.stderr has a fileno()
+ # method
+ sys.stderr = sys.__stderr__
+
+ was_enabled = faulthandler.is_enabled()
+ try:
+ faulthandler.enable()
+ self.assertTrue(faulthandler.is_enabled())
+ faulthandler.disable()
+ self.assertFalse(faulthandler.is_enabled())
+ finally:
+ if was_enabled:
+ faulthandler.enable()
+ else:
+ faulthandler.disable()
+ finally:
+ sys.stderr = orig_stderr
+
+ def check_dump_traceback(self, filename):
+ """
+ Explicitly call dump_traceback() function and check its output.
+ Raise an error if the output doesn't match the expected format.
+ """
+ code = """
+import faulthandler
+
+def funcB():
+ if {has_filename}:
+ with open({filename}, "wb") as fp:
+ faulthandler.dump_traceback(fp, all_threads=False)
+ else:
+ faulthandler.dump_traceback(all_threads=False)
+
+def funcA():
+ funcB()
+
+funcA()
+""".strip()
+ code = code.format(
+ filename=repr(filename),
+ has_filename=bool(filename),
+ )
+ if filename:
+ lineno = 6
+ else:
+ lineno = 8
+ expected = [
+ 'Traceback (most recent call first):',
+ ' File "<string>", line %s in funcB' % lineno,
+ ' File "<string>", line 11 in funcA',
+ ' File "<string>", line 13 in <module>'
+ ]
+ trace, exitcode = self.get_output(code, filename)
+ self.assertEqual(trace, expected)
+ self.assertEqual(exitcode, 0)
+
+ def test_dump_traceback(self):
+ self.check_dump_traceback(None)
+
+ def test_dump_traceback_file(self):
+ with temporary_filename() as filename:
+ self.check_dump_traceback(filename)
+
+ @unittest.skipIf(not HAVE_THREADS, 'need threads')
+ def check_dump_traceback_threads(self, filename):
+ """
+ Call explicitly dump_traceback(all_threads=True) and check the output.
+ Raise an error if the output doesn't match the expected format.
+ """
+ code = """
+import faulthandler
+from threading import Thread, Event
+import time
+
+def dump():
+ if {filename}:
+ with open({filename}, "wb") as fp:
+ faulthandler.dump_traceback(fp, all_threads=True)
+ else:
+ faulthandler.dump_traceback(all_threads=True)
+
+class Waiter(Thread):
+ # avoid blocking if the main thread raises an exception.
+ daemon = True
+
+ def __init__(self):
+ Thread.__init__(self)
+ self.running = Event()
+ self.stop = Event()
+
+ def run(self):
+ self.running.set()
+ self.stop.wait()
+
+waiter = Waiter()
+waiter.start()
+waiter.running.wait()
+dump()
+waiter.stop.set()
+waiter.join()
+""".strip()
+ code = code.format(filename=repr(filename))
+ output, exitcode = self.get_output(code, filename)
+ output = '\n'.join(output)
+ if filename:
+ lineno = 8
+ else:
+ lineno = 10
+ regex = """
+^Thread 0x[0-9a-f]+:
+(?: File ".*threading.py", line [0-9]+ in [_a-z]+
+){{1,3}} File "<string>", line 23 in run
+ File ".*threading.py", line [0-9]+ in _bootstrap_inner
+ File ".*threading.py", line [0-9]+ in _bootstrap
+
+Current thread XXX:
+ File "<string>", line {lineno} in dump
+ File "<string>", line 28 in <module>$
+""".strip()
+ regex = regex.format(lineno=lineno)
+ self.assertRegex(output, regex)
+ self.assertEqual(exitcode, 0)
+
+ def test_dump_traceback_threads(self):
+ self.check_dump_traceback_threads(None)
+
+ def test_dump_traceback_threads_file(self):
+ with temporary_filename() as filename:
+ self.check_dump_traceback_threads(filename)
+
+ def _check_dump_tracebacks_later(self, repeat, cancel, filename, loops):
+ """
+ Check how many times the traceback is written in timeout x 2.5 seconds,
+ or timeout x 3.5 seconds if cancel is True: 1, 2 or 3 times depending
+ on repeat and cancel options.
+
+ Raise an error if the output doesn't match the expect format.
+ """
+ timeout_str = str(datetime.timedelta(seconds=TIMEOUT))
+ code = """
+import faulthandler
+import time
+
+def func(timeout, repeat, cancel, file, loops):
+ for loop in range(loops):
+ faulthandler.dump_tracebacks_later(timeout, repeat=repeat, file=file)
+ if cancel:
+ faulthandler.cancel_dump_tracebacks_later()
+ time.sleep(timeout * 5)
+ faulthandler.cancel_dump_tracebacks_later()
+
+timeout = {timeout}
+repeat = {repeat}
+cancel = {cancel}
+loops = {loops}
+if {has_filename}:
+ file = open({filename}, "wb")
+else:
+ file = None
+func(timeout, repeat, cancel, file, loops)
+if file is not None:
+ file.close()
+""".strip()
+ code = code.format(
+ timeout=TIMEOUT,
+ repeat=repeat,
+ cancel=cancel,
+ loops=loops,
+ has_filename=bool(filename),
+ filename=repr(filename),
+ )
+ trace, exitcode = self.get_output(code, filename)
+ trace = '\n'.join(trace)
+
+ if not cancel:
+ count = loops
+ if repeat:
+ count *= 2
+ header = r'Timeout \(%s\)!\nThread 0x[0-9a-f]+:\n' % timeout_str
+ regex = expected_traceback(9, 20, header, min_count=count)
+ self.assertRegex(trace, regex)
+ else:
+ self.assertEqual(trace, '')
+ self.assertEqual(exitcode, 0)
+
+ @unittest.skipIf(not hasattr(faulthandler, 'dump_tracebacks_later'),
+ 'need faulthandler.dump_tracebacks_later()')
+ def check_dump_tracebacks_later(self, repeat=False, cancel=False,
+ file=False, twice=False):
+ if twice:
+ loops = 2
+ else:
+ loops = 1
+ if file:
+ with temporary_filename() as filename:
+ self._check_dump_tracebacks_later(repeat, cancel,
+ filename, loops)
+ else:
+ self._check_dump_tracebacks_later(repeat, cancel, None, loops)
+
+ def test_dump_tracebacks_later(self):
+ self.check_dump_tracebacks_later()
+
+ def test_dump_tracebacks_later_repeat(self):
+ self.check_dump_tracebacks_later(repeat=True)
+
+ def test_dump_tracebacks_later_cancel(self):
+ self.check_dump_tracebacks_later(cancel=True)
+
+ def test_dump_tracebacks_later_file(self):
+ self.check_dump_tracebacks_later(file=True)
+
+ def test_dump_tracebacks_later_twice(self):
+ self.check_dump_tracebacks_later(twice=True)
+
+ @unittest.skipIf(not hasattr(faulthandler, "register"),
+ "need faulthandler.register")
+ def check_register(self, filename=False, all_threads=False,
+ unregister=False, chain=False):
+ """
+ Register a handler displaying the traceback on a user signal. Raise the
+ signal and check the written traceback.
+
+ If chain is True, check that the previous signal handler is called.
+
+ Raise an error if the output doesn't match the expected format.
+ """
+ signum = signal.SIGUSR1
+ code = """
+import faulthandler
+import os
+import signal
+import sys
+
+def func(signum):
+ os.kill(os.getpid(), signum)
+
+def handler(signum, frame):
+ handler.called = True
+handler.called = False
+
+exitcode = 0
+signum = {signum}
+unregister = {unregister}
+chain = {chain}
+
+if {has_filename}:
+ file = open({filename}, "wb")
+else:
+ file = None
+if chain:
+ signal.signal(signum, handler)
+faulthandler.register(signum, file=file,
+ all_threads={all_threads}, chain={chain})
+if unregister:
+ faulthandler.unregister(signum)
+func(signum)
+if chain and not handler.called:
+ if file is not None:
+ output = file
+ else:
+ output = sys.stderr
+ print("Error: signal handler not called!", file=output)
+ exitcode = 1
+if file is not None:
+ file.close()
+sys.exit(exitcode)
+""".strip()
+ code = code.format(
+ filename=repr(filename),
+ has_filename=bool(filename),
+ all_threads=all_threads,
+ signum=signum,
+ unregister=unregister,
+ chain=chain,
+ )
+ trace, exitcode = self.get_output(code, filename)
+ trace = '\n'.join(trace)
+ if not unregister:
+ if all_threads:
+ regex = 'Current thread XXX:\n'
+ else:
+ regex = 'Traceback \(most recent call first\):\n'
+ regex = expected_traceback(7, 28, regex)
+ self.assertRegex(trace, regex)
+ else:
+ self.assertEqual(trace, '')
+ if unregister:
+ self.assertNotEqual(exitcode, 0)
+ else:
+ self.assertEqual(exitcode, 0)
+
+ def test_register(self):
+ self.check_register()
+
+ def test_unregister(self):
+ self.check_register(unregister=True)
+
+ def test_register_file(self):
+ with temporary_filename() as filename:
+ self.check_register(filename=filename)
+
+ def test_register_threads(self):
+ self.check_register(all_threads=True)
+
+ def test_register_chain(self):
+ self.check_register(chain=True)
+
+
+def test_main():
+ support.run_unittest(FaultHandlerTests)
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py
index f312882..1e70641 100644
--- a/Lib/test/test_fileinput.py
+++ b/Lib/test/test_fileinput.py
@@ -2,18 +2,33 @@
Tests for fileinput module.
Nick Mathewson
'''
-
+import os
+import sys
+import re
+import fileinput
+import collections
+import builtins
import unittest
-from test.support import verbose, TESTFN, run_unittest
-from test.support import unlink as safe_unlink
-import sys, re
+
+try:
+ import bz2
+except ImportError:
+ bz2 = None
+try:
+ import gzip
+except ImportError:
+ gzip = None
+
from io import StringIO
from fileinput import FileInput, hook_encoded
+from test.support import verbose, TESTFN, run_unittest
+from test.support import unlink as safe_unlink
+
+
# The fileinput module has 2 interfaces: the FileInput class which does
# all the work, and a few functions (input, etc.) that use a global _state
-# variable. We only test the FileInput class, since the other functions
-# only provide a thin facade over FileInput.
+# variable.
# Write lines (a list of lines) to temp file number i, and return the
# temp file's name.
@@ -121,7 +136,16 @@ class BufferSizesTests(unittest.TestCase):
self.assertEqual(int(m.group(1)), fi.filelineno())
fi.close()
+class UnconditionallyRaise:
+ def __init__(self, exception_type):
+ self.exception_type = exception_type
+ self.invoked = False
+ def __call__(self, *args, **kwargs):
+ self.invoked = True
+ raise self.exception_type()
+
class FileInputTests(unittest.TestCase):
+
def test_zero_byte_files(self):
t1 = t2 = t3 = t4 = None
try:
@@ -219,17 +243,20 @@ class FileInputTests(unittest.TestCase):
self.fail("FileInput should check openhook for being callable")
except ValueError:
pass
- # XXX The rot13 codec was removed.
- # So this test needs to be changed to use something else.
- # (Or perhaps the API needs to change so we can just pass
- # an encoding rather than using a hook?)
-## try:
-## t1 = writeTmp(1, ["A\nB"], mode="wb")
-## fi = FileInput(files=t1, openhook=hook_encoded("rot13"))
-## lines = list(fi)
-## self.assertEqual(lines, ["N\n", "O"])
-## finally:
-## remove_tempfiles(t1)
+
+ class CustomOpenHook:
+ def __init__(self):
+ self.invoked = False
+ def __call__(self, *args):
+ self.invoked = True
+ return open(*args)
+
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ custom_open_hook = CustomOpenHook()
+ with FileInput([t], openhook=custom_open_hook) as fi:
+ fi.readline()
+ self.assertTrue(custom_open_hook.invoked, "openhook not invoked")
def test_context_manager(self):
try:
@@ -254,9 +281,576 @@ class FileInputTests(unittest.TestCase):
finally:
remove_tempfiles(t1)
+ def test_empty_files_list_specified_to_constructor(self):
+ with FileInput(files=[]) as fi:
+ self.assertEqual(fi._files, ('-',))
+
+ def test__getitem__(self):
+ """Tests invoking FileInput.__getitem__() with the current
+ line number"""
+ t = writeTmp(1, ["line1\n", "line2\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ retval1 = fi[0]
+ self.assertEqual(retval1, "line1\n")
+ retval2 = fi[1]
+ self.assertEqual(retval2, "line2\n")
+
+ def test__getitem__invalid_key(self):
+ """Tests invoking FileInput.__getitem__() with an index unequal to
+ the line number"""
+ t = writeTmp(1, ["line1\n", "line2\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ with self.assertRaises(RuntimeError) as cm:
+ fi[1]
+ self.assertEqual(cm.exception.args, ("accessing lines out of order",))
+
+ def test__getitem__eof(self):
+ """Tests invoking FileInput.__getitem__() with the line number but at
+ end-of-input"""
+ t = writeTmp(1, [])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ with self.assertRaises(IndexError) as cm:
+ fi[0]
+ self.assertEqual(cm.exception.args, ("end of input reached",))
+
+ def test_nextfile_oserror_deleting_backup(self):
+ """Tests invoking FileInput.nextfile() when the attempt to delete
+ the backup file would raise OSError. This error is expected to be
+ silently ignored"""
+
+ os_unlink_orig = os.unlink
+ os_unlink_replacement = UnconditionallyRaise(OSError)
+ try:
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t], inplace=True) as fi:
+ next(fi) # make sure the file is opened
+ os.unlink = os_unlink_replacement
+ fi.nextfile()
+ finally:
+ os.unlink = os_unlink_orig
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(os_unlink_replacement.invoked,
+ "os.unlink() was not invoked")
+
+ def test_readline_os_fstat_raises_OSError(self):
+ """Tests invoking FileInput.readline() when os.fstat() raises OSError.
+ This exception should be silently discarded."""
+
+ os_fstat_orig = os.fstat
+ os_fstat_replacement = UnconditionallyRaise(OSError)
+ try:
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t], inplace=True) as fi:
+ os.fstat = os_fstat_replacement
+ fi.readline()
+ finally:
+ os.fstat = os_fstat_orig
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(os_fstat_replacement.invoked,
+ "os.fstat() was not invoked")
+
+ @unittest.skipIf(not hasattr(os, "chmod"), "os.chmod does not exist")
+ def test_readline_os_chmod_raises_OSError(self):
+ """Tests invoking FileInput.readline() when os.chmod() raises OSError.
+ This exception should be silently discarded."""
+
+ os_chmod_orig = os.chmod
+ os_chmod_replacement = UnconditionallyRaise(OSError)
+ try:
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t], inplace=True) as fi:
+ os.chmod = os_chmod_replacement
+ fi.readline()
+ finally:
+ os.chmod = os_chmod_orig
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(os_chmod_replacement.invoked,
+ "os.fstat() was not invoked")
+
+ def test_fileno_when_ValueError_raised(self):
+ class FilenoRaisesValueError(UnconditionallyRaise):
+ def __init__(self):
+ UnconditionallyRaise.__init__(self, ValueError)
+ def fileno(self):
+ self.__call__()
+
+ unconditionally_raise_ValueError = FilenoRaisesValueError()
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ file_backup = fi._file
+ try:
+ fi._file = unconditionally_raise_ValueError
+ result = fi.fileno()
+ finally:
+ fi._file = file_backup # make sure the file gets cleaned up
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(unconditionally_raise_ValueError.invoked,
+ "_file.fileno() was not invoked")
+
+ self.assertEqual(result, -1, "fileno() should return -1")
+
+class MockFileInput:
+ """A class that mocks out fileinput.FileInput for use during unit tests"""
+
+ def __init__(self, files=None, inplace=False, backup="", bufsize=0,
+ mode="r", openhook=None):
+ self.files = files
+ self.inplace = inplace
+ self.backup = backup
+ self.bufsize = bufsize
+ self.mode = mode
+ self.openhook = openhook
+ self._file = None
+ self.invocation_counts = collections.defaultdict(lambda: 0)
+ self.return_values = {}
+
+ def close(self):
+ self.invocation_counts["close"] += 1
+
+ def nextfile(self):
+ self.invocation_counts["nextfile"] += 1
+ return self.return_values["nextfile"]
+
+ def filename(self):
+ self.invocation_counts["filename"] += 1
+ return self.return_values["filename"]
+
+ def lineno(self):
+ self.invocation_counts["lineno"] += 1
+ return self.return_values["lineno"]
+
+ def filelineno(self):
+ self.invocation_counts["filelineno"] += 1
+ return self.return_values["filelineno"]
+
+ def fileno(self):
+ self.invocation_counts["fileno"] += 1
+ return self.return_values["fileno"]
+
+ def isfirstline(self):
+ self.invocation_counts["isfirstline"] += 1
+ return self.return_values["isfirstline"]
+
+ def isstdin(self):
+ self.invocation_counts["isstdin"] += 1
+ return self.return_values["isstdin"]
+
+class BaseFileInputGlobalMethodsTest(unittest.TestCase):
+ """Base class for unit tests for the global function of
+ the fileinput module."""
+
+ def setUp(self):
+ self._orig_state = fileinput._state
+ self._orig_FileInput = fileinput.FileInput
+ fileinput.FileInput = MockFileInput
+
+ def tearDown(self):
+ fileinput.FileInput = self._orig_FileInput
+ fileinput._state = self._orig_state
+
+ def assertExactlyOneInvocation(self, mock_file_input, method_name):
+ # assert that the method with the given name was invoked once
+ actual_count = mock_file_input.invocation_counts[method_name]
+ self.assertEqual(actual_count, 1, method_name)
+ # assert that no other unexpected methods were invoked
+ actual_total_count = len(mock_file_input.invocation_counts)
+ self.assertEqual(actual_total_count, 1)
+
+class Test_fileinput_input(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.input()"""
+
+ def test_state_is_not_None_and_state_file_is_not_None(self):
+ """Tests invoking fileinput.input() when fileinput._state is not None
+ and its _file attribute is also not None. Expect RuntimeError to
+ be raised with a meaningful error message and for fileinput._state
+ to *not* be modified."""
+ instance = MockFileInput()
+ instance._file = object()
+ fileinput._state = instance
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.input()
+ self.assertEqual(("input() already active",), cm.exception.args)
+ self.assertIs(instance, fileinput._state, "fileinput._state")
+
+ def test_state_is_not_None_and_state_file_is_None(self):
+ """Tests invoking fileinput.input() when fileinput._state is not None
+ but its _file attribute *is* None. Expect it to create and return
+ a new fileinput.FileInput object with all method parameters passed
+ explicitly to the __init__() method; also ensure that
+ fileinput._state is set to the returned instance."""
+ instance = MockFileInput()
+ instance._file = None
+ fileinput._state = instance
+ self.do_test_call_input()
+
+ def test_state_is_None(self):
+ """Tests invoking fileinput.input() when fileinput._state is None
+ Expect it to create and return a new fileinput.FileInput object
+ with all method parameters passed explicitly to the __init__()
+ method; also ensure that fileinput._state is set to the returned
+ instance."""
+ fileinput._state = None
+ self.do_test_call_input()
+
+ def do_test_call_input(self):
+ """Tests that fileinput.input() creates a new fileinput.FileInput
+ object, passing the given parameters unmodified to
+ fileinput.FileInput.__init__(). Note that this test depends on the
+ monkey patching of fileinput.FileInput done by setUp()."""
+ files = object()
+ inplace = object()
+ backup = object()
+ bufsize = object()
+ mode = object()
+ openhook = object()
+
+ # call fileinput.input() with different values for each argument
+ result = fileinput.input(files=files, inplace=inplace, backup=backup,
+ bufsize=bufsize,
+ mode=mode, openhook=openhook)
+
+ # ensure fileinput._state was set to the returned object
+ self.assertIs(result, fileinput._state, "fileinput._state")
+
+ # ensure the parameters to fileinput.input() were passed directly
+ # to FileInput.__init__()
+ self.assertIs(files, result.files, "files")
+ self.assertIs(inplace, result.inplace, "inplace")
+ self.assertIs(backup, result.backup, "backup")
+ self.assertIs(bufsize, result.bufsize, "bufsize")
+ self.assertIs(mode, result.mode, "mode")
+ self.assertIs(openhook, result.openhook, "openhook")
+
+class Test_fileinput_close(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.close()"""
+
+ def test_state_is_None(self):
+ """Tests that fileinput.close() does nothing if fileinput._state
+ is None"""
+ fileinput._state = None
+ fileinput.close()
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests that fileinput.close() invokes close() on fileinput._state
+ and sets _state=None"""
+ instance = MockFileInput()
+ fileinput._state = instance
+ fileinput.close()
+ self.assertExactlyOneInvocation(instance, "close")
+ self.assertIsNone(fileinput._state)
+
+class Test_fileinput_nextfile(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.nextfile()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.nextfile() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.nextfile()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.nextfile() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.nextfile() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ nextfile_retval = object()
+ instance = MockFileInput()
+ instance.return_values["nextfile"] = nextfile_retval
+ fileinput._state = instance
+ retval = fileinput.nextfile()
+ self.assertExactlyOneInvocation(instance, "nextfile")
+ self.assertIs(retval, nextfile_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_filename(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.filename()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.filename() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.filename()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.filename() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.filename() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ filename_retval = object()
+ instance = MockFileInput()
+ instance.return_values["filename"] = filename_retval
+ fileinput._state = instance
+ retval = fileinput.filename()
+ self.assertExactlyOneInvocation(instance, "filename")
+ self.assertIs(retval, filename_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_lineno(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.lineno()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.lineno() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.lineno()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.lineno() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.lineno() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ lineno_retval = object()
+ instance = MockFileInput()
+ instance.return_values["lineno"] = lineno_retval
+ fileinput._state = instance
+ retval = fileinput.lineno()
+ self.assertExactlyOneInvocation(instance, "lineno")
+ self.assertIs(retval, lineno_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_filelineno(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.filelineno()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.filelineno() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.filelineno()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.filelineno() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.filelineno() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ filelineno_retval = object()
+ instance = MockFileInput()
+ instance.return_values["filelineno"] = filelineno_retval
+ fileinput._state = instance
+ retval = fileinput.filelineno()
+ self.assertExactlyOneInvocation(instance, "filelineno")
+ self.assertIs(retval, filelineno_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_fileno(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.fileno()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.fileno() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.fileno()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.fileno() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.fileno() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ fileno_retval = object()
+ instance = MockFileInput()
+ instance.return_values["fileno"] = fileno_retval
+ instance.fileno_retval = fileno_retval
+ fileinput._state = instance
+ retval = fileinput.fileno()
+ self.assertExactlyOneInvocation(instance, "fileno")
+ self.assertIs(retval, fileno_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_isfirstline(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.isfirstline()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.isfirstline() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.isfirstline()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.isfirstline() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.isfirstline() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ isfirstline_retval = object()
+ instance = MockFileInput()
+ instance.return_values["isfirstline"] = isfirstline_retval
+ fileinput._state = instance
+ retval = fileinput.isfirstline()
+ self.assertExactlyOneInvocation(instance, "isfirstline")
+ self.assertIs(retval, isfirstline_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_isstdin(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.isstdin()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.isstdin() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.isstdin()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.isstdin() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.isstdin() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ isstdin_retval = object()
+ instance = MockFileInput()
+ instance.return_values["isstdin"] = isstdin_retval
+ fileinput._state = instance
+ retval = fileinput.isstdin()
+ self.assertExactlyOneInvocation(instance, "isstdin")
+ self.assertIs(retval, isstdin_retval)
+ self.assertIs(fileinput._state, instance)
+
+class InvocationRecorder:
+ def __init__(self):
+ self.invocation_count = 0
+ def __call__(self, *args, **kwargs):
+ self.invocation_count += 1
+ self.last_invocation = (args, kwargs)
+
+class Test_hook_compressed(unittest.TestCase):
+ """Unit tests for fileinput.hook_compressed()"""
+
+ def setUp(self):
+ self.fake_open = InvocationRecorder()
+
+ def test_empty_string(self):
+ self.do_test_use_builtin_open("", 1)
+
+ def test_no_ext(self):
+ self.do_test_use_builtin_open("abcd", 2)
+
+ @unittest.skipUnless(gzip, "Requires gzip and zlib")
+ def test_gz_ext_fake(self):
+ original_open = gzip.open
+ gzip.open = self.fake_open
+ try:
+ result = fileinput.hook_compressed("test.gz", 3)
+ finally:
+ gzip.open = original_open
+
+ self.assertEqual(self.fake_open.invocation_count, 1)
+ self.assertEqual(self.fake_open.last_invocation, (("test.gz", 3), {}))
+
+ @unittest.skipUnless(bz2, "Requires bz2")
+ def test_bz2_ext_fake(self):
+ original_open = bz2.BZ2File
+ bz2.BZ2File = self.fake_open
+ try:
+ result = fileinput.hook_compressed("test.bz2", 4)
+ finally:
+ bz2.BZ2File = original_open
+
+ self.assertEqual(self.fake_open.invocation_count, 1)
+ self.assertEqual(self.fake_open.last_invocation, (("test.bz2", 4), {}))
+
+ def test_blah_ext(self):
+ self.do_test_use_builtin_open("abcd.blah", 5)
+
+ def test_gz_ext_builtin(self):
+ self.do_test_use_builtin_open("abcd.Gz", 6)
+
+ def test_bz2_ext_builtin(self):
+ self.do_test_use_builtin_open("abcd.Bz2", 7)
+
+ def do_test_use_builtin_open(self, filename, mode):
+ original_open = self.replace_builtin_open(self.fake_open)
+ try:
+ result = fileinput.hook_compressed(filename, mode)
+ finally:
+ self.replace_builtin_open(original_open)
+
+ self.assertEqual(self.fake_open.invocation_count, 1)
+ self.assertEqual(self.fake_open.last_invocation,
+ ((filename, mode), {}))
+
+ @staticmethod
+ def replace_builtin_open(new_open_func):
+ original_open = builtins.open
+ builtins.open = new_open_func
+ return original_open
+
+class Test_hook_encoded(unittest.TestCase):
+ """Unit tests for fileinput.hook_encoded()"""
+
+ def test(self):
+ encoding = object()
+ result = fileinput.hook_encoded(encoding)
+
+ fake_open = InvocationRecorder()
+ original_open = builtins.open
+ builtins.open = fake_open
+ try:
+ filename = object()
+ mode = object()
+ open_result = result(filename, mode)
+ finally:
+ builtins.open = original_open
+
+ self.assertEqual(fake_open.invocation_count, 1)
+
+ args, kwargs = fake_open.last_invocation
+ self.assertIs(args[0], filename)
+ self.assertIs(args[1], mode)
+ self.assertIs(kwargs.pop('encoding'), encoding)
+ self.assertFalse(kwargs)
def test_main():
- run_unittest(BufferSizesTests, FileInputTests)
+ run_unittest(
+ BufferSizesTests,
+ FileInputTests,
+ Test_fileinput_input,
+ Test_fileinput_close,
+ Test_fileinput_nextfile,
+ Test_fileinput_filename,
+ Test_fileinput_lineno,
+ Test_fileinput_filelineno,
+ Test_fileinput_fileno,
+ Test_fileinput_isfirstline,
+ Test_fileinput_isstdin,
+ Test_hook_compressed,
+ Test_hook_encoded,
+ )
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py
index 3588fb4..9854d0c 100644
--- a/Lib/test/test_fileio.py
+++ b/Lib/test/test_fileio.py
@@ -2,6 +2,7 @@
import sys
import os
+import io
import errno
import unittest
from array import array
@@ -339,10 +340,10 @@ class OtherFileTests(unittest.TestCase):
self.assertEqual(f.tell(), 10)
f.truncate(5)
self.assertEqual(f.tell(), 10)
- self.assertEqual(f.seek(0, os.SEEK_END), 5)
+ self.assertEqual(f.seek(0, io.SEEK_END), 5)
f.truncate(15)
self.assertEqual(f.tell(), 5)
- self.assertEqual(f.seek(0, os.SEEK_END), 15)
+ self.assertEqual(f.seek(0, io.SEEK_END), 15)
f.close()
def testTruncateOnWindows(self):
diff --git a/Lib/test/test_float.py b/Lib/test/test_float.py
index 4d7bbba..dc0c291 100644
--- a/Lib/test/test_float.py
+++ b/Lib/test/test_float.py
@@ -88,7 +88,7 @@ class GeneralFloatCases(unittest.TestCase):
self.assertRaises(ValueError, float, " -0x3.p-1 ")
self.assertRaises(ValueError, float, " +0x3.p-1 ")
self.assertEqual(float(" 25.e-1 "), 2.5)
- self.assertEqual(support.fcmp(float(" .25e-1 "), .025), 0)
+ self.assertAlmostEqual(float(" .25e-1 "), .025)
def test_floatconversion(self):
# Make sure that calls to __float__() work properly
diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py
index 7fa950d..d43cea3 100644
--- a/Lib/test/test_format.py
+++ b/Lib/test/test_format.py
@@ -1,4 +1,5 @@
from test.support import verbose, TestFailed
+import locale
import sys
import test.support as support
import unittest
@@ -263,6 +264,49 @@ class FormatTest(unittest.TestCase):
else:
raise TestFailed('"%*d"%(maxsize, -127) should fail')
+ def test_non_ascii(self):
+ self.assertEqual(format("abc", "\u2007<5"), "abc\u2007\u2007")
+ self.assertEqual(format(123, "\u2007<5"), "123\u2007\u2007")
+ self.assertEqual(format(12.3, "\u2007<6"), "12.3\u2007\u2007")
+ self.assertEqual(format(0j, "\u2007<4"), "0j\u2007\u2007")
+ self.assertEqual(format(1+2j, "\u2007<8"), "(1+2j)\u2007\u2007")
+
+ self.assertEqual(format("abc", "\u2007>5"), "\u2007\u2007abc")
+ self.assertEqual(format(123, "\u2007>5"), "\u2007\u2007123")
+ self.assertEqual(format(12.3, "\u2007>6"), "\u2007\u200712.3")
+ self.assertEqual(format(1+2j, "\u2007>8"), "\u2007\u2007(1+2j)")
+ self.assertEqual(format(0j, "\u2007>4"), "\u2007\u20070j")
+
+ self.assertEqual(format("abc", "\u2007^5"), "\u2007abc\u2007")
+ self.assertEqual(format(123, "\u2007^5"), "\u2007123\u2007")
+ self.assertEqual(format(12.3, "\u2007^6"), "\u200712.3\u2007")
+ self.assertEqual(format(1+2j, "\u2007^8"), "\u2007(1+2j)\u2007")
+ self.assertEqual(format(0j, "\u2007^4"), "\u20070j\u2007")
+
+ def test_locale(self):
+ try:
+ oldloc = locale.setlocale(locale.LC_ALL)
+ locale.setlocale(locale.LC_ALL, '')
+ except locale.Error as err:
+ self.skipTest("Cannot set locale: {}".format(err))
+ try:
+ localeconv = locale.localeconv()
+ sep = localeconv['thousands_sep']
+ point = localeconv['decimal_point']
+
+ text = format(123456789, "n")
+ self.assertIn(sep, text)
+ self.assertEqual(text.replace(sep, ''), '123456789')
+
+ text = format(1234.5, "n")
+ self.assertIn(sep, text)
+ self.assertIn(point, text)
+ self.assertEqual(text.replace(sep, ''), '1234' + point + '5')
+ finally:
+ locale.setlocale(locale.LC_ALL, oldloc)
+
+
+
def test_main():
support.run_unittest(FormatTest)
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
index 71bc23e..7028f46 100644
--- a/Lib/test/test_ftplib.py
+++ b/Lib/test/test_ftplib.py
@@ -22,10 +22,25 @@ from test.support import HOST
threading = support.import_module('threading')
# the dummy data returned by server over the data channel when
-# RETR, LIST and NLST commands are issued
+# RETR, LIST, NLST, MLSD commands are issued
RETR_DATA = 'abcde12345\r\n' * 1000
LIST_DATA = 'foo\r\nbar\r\n'
NLST_DATA = 'foo\r\nbar\r\n'
+MLSD_DATA = ("type=cdir;perm=el;unique==keVO1+ZF4; test\r\n"
+ "type=pdir;perm=e;unique==keVO1+d?3; ..\r\n"
+ "type=OS.unix=slink:/foobar;perm=;unique==keVO1+4G4; foobar\r\n"
+ "type=OS.unix=chr-13/29;perm=;unique==keVO1+5G4; device\r\n"
+ "type=OS.unix=blk-11/108;perm=;unique==keVO1+6G4; block\r\n"
+ "type=file;perm=awr;unique==keVO1+8G4; writable\r\n"
+ "type=dir;perm=cpmel;unique==keVO1+7G4; promiscuous\r\n"
+ "type=dir;perm=;unique==keVO1+1t2; no-exec\r\n"
+ "type=file;perm=r;unique==keVO1+EG4; two words\r\n"
+ "type=file;perm=r;unique==keVO1+IH4; leading space\r\n"
+ "type=file;perm=r;unique==keVO1+1G4; file1\r\n"
+ "type=dir;perm=cpmel;unique==keVO1+7G4; incoming\r\n"
+ "type=file;perm=r;unique==keVO1+1G4; file2\r\n"
+ "type=file;perm=r;unique==keVO1+1G4; file3\r\n"
+ "type=file;perm=r;unique==keVO1+1G4; file4\r\n")
class DummyDTPHandler(asynchat.async_chat):
@@ -49,6 +64,11 @@ class DummyDTPHandler(asynchat.async_chat):
self.dtp_conn_closed = True
def push(self, what):
+ if self.baseclass.next_data is not None:
+ what = self.baseclass.next_data
+ self.baseclass.next_data = None
+ if not what:
+ return self.close_when_done()
super(DummyDTPHandler, self).push(what.encode('ascii'))
def handle_error(self):
@@ -69,6 +89,7 @@ class DummyFTPHandler(asynchat.async_chat):
self.last_received_cmd = None
self.last_received_data = ''
self.next_response = ''
+ self.next_data = None
self.rest = None
self.push('220 welcome')
@@ -104,7 +125,7 @@ class DummyFTPHandler(asynchat.async_chat):
addr = list(map(int, arg.split(',')))
ip = '%d.%d.%d.%d' %tuple(addr[:4])
port = (addr[4] * 256) + addr[5]
- s = socket.create_connection((ip, port), timeout=10)
+ s = socket.create_connection((ip, port), timeout=2)
self.dtp = self.dtp_handler(s, baseclass=self)
self.push('200 active data connection established')
@@ -122,7 +143,7 @@ class DummyFTPHandler(asynchat.async_chat):
def cmd_eprt(self, arg):
af, ip, port = arg.split(arg[0])[1:-1]
port = int(port)
- s = socket.create_connection((ip, port), timeout=10)
+ s = socket.create_connection((ip, port), timeout=2)
self.dtp = self.dtp_handler(s, baseclass=self)
self.push('200 active data connection established')
@@ -213,6 +234,14 @@ class DummyFTPHandler(asynchat.async_chat):
self.dtp.push(NLST_DATA)
self.dtp.close_when_done()
+ def cmd_opts(self, arg):
+ self.push('200 opts ok')
+
+ def cmd_mlsd(self, arg):
+ self.push('125 mlsd ok')
+ self.dtp.push(MLSD_DATA)
+ self.dtp.close_when_done()
+
class DummyFTPServer(asyncore.dispatcher, threading.Thread):
@@ -274,11 +303,11 @@ if ssl is not None:
_ssl_closing = False
def secure_connection(self):
- self.del_channel()
socket = ssl.wrap_socket(self.socket, suppress_ragged_eofs=False,
certfile=CERTFILE, server_side=True,
do_handshake_on_connect=False,
ssl_version=ssl.PROTOCOL_SSLv23)
+ self.del_channel()
self.set_socket(socket)
self._ssl_accepting = True
@@ -313,7 +342,10 @@ if ssl is not None:
# http://www.mail-archive.com/openssl-users@openssl.org/msg60710.html
pass
self._ssl_closing = False
- super(SSLConnection, self).close()
+ if getattr(self, '_ccc', False) == False:
+ super(SSLConnection, self).close()
+ else:
+ pass
def handle_read_event(self):
if self._ssl_accepting:
@@ -381,12 +413,18 @@ if ssl is not None:
def __init__(self, conn):
DummyFTPHandler.__init__(self, conn)
self.secure_data_channel = False
+ self._ccc = False
def cmd_auth(self, line):
"""Set up secure control channel."""
self.push('234 AUTH TLS successful')
self.secure_connection()
+ def cmd_ccc(self, line):
+ self.push('220 Reverting back to clear-text')
+ self._ccc = True
+ self._do_ssl_shutdown()
+
def cmd_pbsz(self, line):
"""Negotiate size of buffer for secure data transfer.
For TLS/SSL the only valid value for the parameter is '0'.
@@ -416,13 +454,17 @@ class TestFTPClass(TestCase):
def setUp(self):
self.server = DummyFTPServer((HOST, 0))
self.server.start()
- self.client = ftplib.FTP(timeout=10)
+ self.client = ftplib.FTP(timeout=2)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
+ def check_data(self, received, expected):
+ self.assertEqual(len(received), len(expected))
+ self.assertEqual(received, expected)
+
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(), '220 welcome')
@@ -504,7 +546,7 @@ class TestFTPClass(TestCase):
received.append(data.decode('ascii'))
received = []
self.client.retrbinary('retr', callback)
- self.assertEqual(''.join(received), RETR_DATA)
+ self.check_data(''.join(received), RETR_DATA)
def test_retrbinary_rest(self):
def callback(data):
@@ -512,20 +554,17 @@ class TestFTPClass(TestCase):
for rest in (0, 10, 20):
received = []
self.client.retrbinary('retr', callback, rest=rest)
- self.assertEqual(''.join(received), RETR_DATA[rest:],
- msg='rest test case %d %d %d' % (rest,
- len(''.join(received)),
- len(RETR_DATA[rest:])))
+ self.check_data(''.join(received), RETR_DATA[rest:])
def test_retrlines(self):
received = []
self.client.retrlines('retr', received.append)
- self.assertEqual(''.join(received), RETR_DATA.replace('\r\n', ''))
+ self.check_data(''.join(received), RETR_DATA.replace('\r\n', ''))
def test_storbinary(self):
f = io.BytesIO(RETR_DATA.encode('ascii'))
self.client.storbinary('stor', f)
- self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
+ self.check_data(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
@@ -542,7 +581,7 @@ class TestFTPClass(TestCase):
def test_storlines(self):
f = io.BytesIO(RETR_DATA.replace('\r\n', '\n').encode('ascii'))
self.client.storlines('stor', f)
- self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
+ self.check_data(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
@@ -558,6 +597,64 @@ class TestFTPClass(TestCase):
self.client.dir(lambda x: l.append(x))
self.assertEqual(''.join(l), LIST_DATA.replace('\r\n', ''))
+ def test_mlsd(self):
+ list(self.client.mlsd())
+ list(self.client.mlsd(path='/'))
+ list(self.client.mlsd(path='/', facts=['size', 'type']))
+
+ ls = list(self.client.mlsd())
+ for name, facts in ls:
+ self.assertIsInstance(name, str)
+ self.assertIsInstance(facts, dict)
+ self.assertTrue(name)
+ self.assertIn('type', facts)
+ self.assertIn('perm', facts)
+ self.assertIn('unique', facts)
+
+ def set_data(data):
+ self.server.handler_instance.next_data = data
+
+ def test_entry(line, type=None, perm=None, unique=None, name=None):
+ type = 'type' if type is None else type
+ perm = 'perm' if perm is None else perm
+ unique = 'unique' if unique is None else unique
+ name = 'name' if name is None else name
+ set_data(line)
+ _name, facts = next(self.client.mlsd())
+ self.assertEqual(_name, name)
+ self.assertEqual(facts['type'], type)
+ self.assertEqual(facts['perm'], perm)
+ self.assertEqual(facts['unique'], unique)
+
+ # plain
+ test_entry('type=type;perm=perm;unique=unique; name\r\n')
+ # "=" in fact value
+ test_entry('type=ty=pe;perm=perm;unique=unique; name\r\n', type="ty=pe")
+ test_entry('type==type;perm=perm;unique=unique; name\r\n', type="=type")
+ test_entry('type=t=y=pe;perm=perm;unique=unique; name\r\n', type="t=y=pe")
+ test_entry('type=====;perm=perm;unique=unique; name\r\n', type="====")
+ # spaces in name
+ test_entry('type=type;perm=perm;unique=unique; na me\r\n', name="na me")
+ test_entry('type=type;perm=perm;unique=unique; name \r\n', name="name ")
+ test_entry('type=type;perm=perm;unique=unique; name\r\n', name=" name")
+ test_entry('type=type;perm=perm;unique=unique; n am e\r\n', name="n am e")
+ # ";" in name
+ test_entry('type=type;perm=perm;unique=unique; na;me\r\n', name="na;me")
+ test_entry('type=type;perm=perm;unique=unique; ;name\r\n', name=";name")
+ test_entry('type=type;perm=perm;unique=unique; ;name;\r\n', name=";name;")
+ test_entry('type=type;perm=perm;unique=unique; ;;;;\r\n', name=";;;;")
+ # case sensitiveness
+ set_data('Type=type;TyPe=perm;UNIQUE=unique; name\r\n')
+ _name, facts = next(self.client.mlsd())
+ for x in facts:
+ self.assertTrue(x.islower())
+ # no data (directory empty)
+ set_data('')
+ self.assertRaises(StopIteration, next, self.client.mlsd())
+ set_data('')
+ for x in self.client.mlsd():
+ self.fail("unexpected data %s" % data)
+
def test_makeport(self):
with self.client.makeport():
# IPv4 is in use, just make sure send_eprt has not been used
@@ -584,7 +681,7 @@ class TestFTPClass(TestCase):
return True
# base test
- with ftplib.FTP(timeout=10) as self.client:
+ with ftplib.FTP(timeout=2) as self.client:
self.client.connect(self.server.host, self.server.port)
self.client.sendcmd('noop')
self.assertTrue(is_client_connected())
@@ -592,7 +689,7 @@ class TestFTPClass(TestCase):
self.assertFalse(is_client_connected())
# QUIT sent inside the with block
- with ftplib.FTP(timeout=10) as self.client:
+ with ftplib.FTP(timeout=2) as self.client:
self.client.connect(self.server.host, self.server.port)
self.client.sendcmd('noop')
self.client.quit()
@@ -602,7 +699,7 @@ class TestFTPClass(TestCase):
# force a wrong response code to be sent on QUIT: error_perm
# is expected and the connection is supposed to be closed
try:
- with ftplib.FTP(timeout=10) as self.client:
+ with ftplib.FTP(timeout=2) as self.client:
self.client.connect(self.server.host, self.server.port)
self.client.sendcmd('noop')
self.server.handler_instance.next_response = '550 error on quit'
@@ -616,6 +713,30 @@ class TestFTPClass(TestCase):
self.assertEqual(self.server.handler_instance.last_received_cmd, 'quit')
self.assertFalse(is_client_connected())
+ def test_source_address(self):
+ self.client.quit()
+ port = support.find_unused_port()
+ try:
+ self.client.connect(self.server.host, self.server.port,
+ source_address=(HOST, port))
+ self.assertEqual(self.client.sock.getsockname()[1], port)
+ self.client.quit()
+ except IOError as e:
+ if e.errno == errno.EADDRINUSE:
+ self.skipTest("couldn't bind to port %d" % port)
+ raise
+
+ def test_source_address_passive_connection(self):
+ port = support.find_unused_port()
+ self.client.source_address = (HOST, port)
+ try:
+ with self.client.transfercmd('list') as sock:
+ self.assertEqual(sock.getsockname()[1], port)
+ except IOError as e:
+ if e.errno == errno.EADDRINUSE:
+ self.skipTest("couldn't bind to port %d" % port)
+ raise
+
def test_parse257(self):
self.assertEqual(ftplib.parse257('257 "/foo/bar"'), '/foo/bar')
self.assertEqual(ftplib.parse257('257 "/foo/bar" created'), '/foo/bar')
@@ -632,7 +753,7 @@ class TestFTPClass(TestCase):
class TestIPv6Environment(TestCase):
def setUp(self):
- self.server = DummyFTPServer((HOST, 0), af=socket.AF_INET6)
+ self.server = DummyFTPServer(('::1', 0), af=socket.AF_INET6)
self.server.start()
self.client = ftplib.FTP()
self.client.connect(self.server.host, self.server.port)
@@ -661,6 +782,7 @@ class TestIPv6Environment(TestCase):
received.append(data.decode('ascii'))
received = []
self.client.retrbinary('retr', callback)
+ self.assertEqual(len(''.join(received)), len(RETR_DATA))
self.assertEqual(''.join(received), RETR_DATA)
self.client.set_pasv(True)
retr()
@@ -676,7 +798,7 @@ class TestTLS_FTPClassMixin(TestFTPClass):
def setUp(self):
self.server = DummyTLS_FTPServer((HOST, 0))
self.server.start()
- self.client = ftplib.FTP_TLS(timeout=10)
+ self.client = ftplib.FTP_TLS(timeout=2)
self.client.connect(self.server.host, self.server.port)
# enable TLS
self.client.auth()
@@ -689,7 +811,7 @@ class TestTLS_FTPClass(TestCase):
def setUp(self):
self.server = DummyTLS_FTPServer((HOST, 0))
self.server.start()
- self.client = ftplib.FTP_TLS(timeout=10)
+ self.client = ftplib.FTP_TLS(timeout=2)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
@@ -749,7 +871,7 @@ class TestTLS_FTPClass(TestCase):
self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
keyfile=CERTFILE, context=ctx)
- self.client = ftplib.FTP_TLS(context=ctx, timeout=10)
+ self.client = ftplib.FTP_TLS(context=ctx, timeout=2)
self.client.connect(self.server.host, self.server.port)
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.auth()
@@ -761,6 +883,13 @@ class TestTLS_FTPClass(TestCase):
self.assertIs(sock.context, ctx)
self.assertIsInstance(sock, ssl.SSLSocket)
+ def test_ccc(self):
+ self.assertRaises(ValueError, self.client.ccc)
+ self.client.login(secure=True)
+ self.assertIsInstance(self.client.sock, ssl.SSLSocket)
+ self.client.ccc()
+ self.assertRaises(ValueError, self.client.sock.unwrap)
+
class TestTimeouts(TestCase):
@@ -857,13 +986,8 @@ class TestTimeouts(TestCase):
def test_main():
tests = [TestFTPClass, TestTimeouts]
- if socket.has_ipv6:
- try:
- DummyFTPServer((HOST, 0), af=socket.AF_INET6)
- except socket.error:
- pass
- else:
- tests.append(TestIPv6Environment)
+ if support.IPV6_ENABLED:
+ tests.append(TestIPv6Environment)
if ssl is not None:
tests.extend([TestTLS_FTPClassMixin, TestTLS_FTPClass])
diff --git a/Lib/test/test_funcattrs.py b/Lib/test/test_funcattrs.py
index 4d19368..c8ed830 100644
--- a/Lib/test/test_funcattrs.py
+++ b/Lib/test/test_funcattrs.py
@@ -2,6 +2,15 @@ from test import support
import types
import unittest
+
+def global_function():
+ def inner_function():
+ class LocalClass:
+ pass
+ return LocalClass
+ return lambda: inner_function
+
+
class FuncAttrsTest(unittest.TestCase):
def setUp(self):
class F:
@@ -96,6 +105,24 @@ class FunctionPropertiesTest(FuncAttrsTest):
self.assertEqual(self.fi.a.__name__, 'a')
self.cannot_set_attr(self.fi.a, "__name__", 'a', AttributeError)
+ def test___qualname__(self):
+ # PEP 3155
+ self.assertEqual(self.b.__qualname__, 'FuncAttrsTest.setUp.<locals>.b')
+ self.assertEqual(FuncAttrsTest.setUp.__qualname__, 'FuncAttrsTest.setUp')
+ self.assertEqual(global_function.__qualname__, 'global_function')
+ self.assertEqual(global_function().__qualname__,
+ 'global_function.<locals>.<lambda>')
+ self.assertEqual(global_function()().__qualname__,
+ 'global_function.<locals>.inner_function')
+ self.assertEqual(global_function()()().__qualname__,
+ 'global_function.<locals>.inner_function.<locals>.LocalClass')
+ self.b.__qualname__ = 'c'
+ self.assertEqual(self.b.__qualname__, 'c')
+ self.b.__qualname__ = 'd'
+ self.assertEqual(self.b.__qualname__, 'd')
+ # __qualname__ must be a string
+ self.cannot_set_attr(self.b, '__qualname__', 7, TypeError)
+
def test___code__(self):
num_one, num_two = 7, 8
def a(): pass
@@ -315,11 +342,37 @@ class StaticMethodAttrsTest(unittest.TestCase):
self.assertTrue(s.__func__ is f)
+class BuiltinFunctionPropertiesTest(unittest.TestCase):
+ # XXX Not sure where this should really go since I can't find a
+ # test module specifically for builtin_function_or_method.
+
+ def test_builtin__qualname__(self):
+ import time
+
+ # builtin function:
+ self.assertEqual(len.__qualname__, 'len')
+ self.assertEqual(time.time.__qualname__, 'time')
+
+ # builtin classmethod:
+ self.assertEqual(dict.fromkeys.__qualname__, 'dict.fromkeys')
+ self.assertEqual(float.__getformat__.__qualname__,
+ 'float.__getformat__')
+
+ # builtin staticmethod:
+ self.assertEqual(str.maketrans.__qualname__, 'str.maketrans')
+ self.assertEqual(bytes.maketrans.__qualname__, 'bytes.maketrans')
+
+ # builtin bound instance method:
+ self.assertEqual([1, 2, 3].append.__qualname__, 'list.append')
+ self.assertEqual({'foo': 'bar'}.pop.__qualname__, 'dict.pop')
+
+
def test_main():
support.run_unittest(FunctionPropertiesTest, InstancemethodAttrTest,
ArbitraryFunctionAttrTest, FunctionDictsTest,
FunctionDocstringTest, CellTest,
- StaticMethodAttrsTest)
+ StaticMethodAttrsTest,
+ BuiltinFunctionPropertiesTest)
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index 270cab0..c4910a7 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -226,6 +226,7 @@ class TestUpdateWrapper(unittest.TestCase):
self.check_wrapper(wrapper, f)
self.assertIs(wrapper.__wrapped__, f)
self.assertEqual(wrapper.__name__, 'f')
+ self.assertEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.attr, 'This is also a test')
self.assertEqual(wrapper.__annotations__['a'], 'This is a new annotation')
self.assertNotIn('b', wrapper.__annotations__)
@@ -246,6 +247,7 @@ class TestUpdateWrapper(unittest.TestCase):
functools.update_wrapper(wrapper, f, (), ())
self.check_wrapper(wrapper, f, (), ())
self.assertEqual(wrapper.__name__, 'wrapper')
+ self.assertNotEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.__doc__, None)
self.assertEqual(wrapper.__annotations__, {})
self.assertFalse(hasattr(wrapper, 'attr'))
@@ -263,6 +265,7 @@ class TestUpdateWrapper(unittest.TestCase):
functools.update_wrapper(wrapper, f, assign, update)
self.check_wrapper(wrapper, f, assign, update)
self.assertEqual(wrapper.__name__, 'wrapper')
+ self.assertNotEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.__doc__, None)
self.assertEqual(wrapper.attr, 'This is a different test')
self.assertEqual(wrapper.dict_attr, f.dict_attr)
@@ -309,17 +312,18 @@ class TestWraps(TestUpdateWrapper):
def wrapper():
pass
self.check_wrapper(wrapper, f)
- return wrapper
+ return wrapper, f
def test_default_update(self):
- wrapper = self._default_update()
+ wrapper, f = self._default_update()
self.assertEqual(wrapper.__name__, 'f')
+ self.assertEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.attr, 'This is also a test')
@unittest.skipIf(not sys.flags.optimize <= 1,
"Docstrings are omitted with -O2 and above")
def test_default_update_doc(self):
- wrapper = self._default_update()
+ wrapper, _ = self._default_update()
self.assertEqual(wrapper.__doc__, 'This is a test')
def test_no_update(self):
@@ -332,6 +336,7 @@ class TestWraps(TestUpdateWrapper):
pass
self.check_wrapper(wrapper, f, (), ())
self.assertEqual(wrapper.__name__, 'wrapper')
+ self.assertNotEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.__doc__, None)
self.assertFalse(hasattr(wrapper, 'attr'))
@@ -351,6 +356,7 @@ class TestWraps(TestUpdateWrapper):
pass
self.check_wrapper(wrapper, f, assign, update)
self.assertEqual(wrapper.__name__, 'wrapper')
+ self.assertNotEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.__doc__, None)
self.assertEqual(wrapper.attr, 'This is a different test')
self.assertEqual(wrapper.dict_attr, f.dict_attr)
@@ -436,19 +442,82 @@ class TestReduce(unittest.TestCase):
self.assertEqual(self.func(add, d), "".join(d.keys()))
class TestCmpToKey(unittest.TestCase):
+
def test_cmp_to_key(self):
+ def cmp1(x, y):
+ return (x > y) - (x < y)
+ key = functools.cmp_to_key(cmp1)
+ self.assertEqual(key(3), key(3))
+ self.assertGreater(key(3), key(1))
+ def cmp2(x, y):
+ return int(x) - int(y)
+ key = functools.cmp_to_key(cmp2)
+ self.assertEqual(key(4.0), key('4'))
+ self.assertLess(key(2), key('35'))
+
+ def test_cmp_to_key_arguments(self):
+ def cmp1(x, y):
+ return (x > y) - (x < y)
+ key = functools.cmp_to_key(mycmp=cmp1)
+ self.assertEqual(key(obj=3), key(obj=3))
+ self.assertGreater(key(obj=3), key(obj=1))
+ with self.assertRaises((TypeError, AttributeError)):
+ key(3) > 1 # rhs is not a K object
+ with self.assertRaises((TypeError, AttributeError)):
+ 1 < key(3) # lhs is not a K object
+ with self.assertRaises(TypeError):
+ key = functools.cmp_to_key() # too few args
+ with self.assertRaises(TypeError):
+ key = functools.cmp_to_key(cmp1, None) # too many args
+ key = functools.cmp_to_key(cmp1)
+ with self.assertRaises(TypeError):
+ key() # too few args
+ with self.assertRaises(TypeError):
+ key(None, None) # too many args
+
+ def test_bad_cmp(self):
+ def cmp1(x, y):
+ raise ZeroDivisionError
+ key = functools.cmp_to_key(cmp1)
+ with self.assertRaises(ZeroDivisionError):
+ key(3) > key(1)
+
+ class BadCmp:
+ def __lt__(self, other):
+ raise ZeroDivisionError
+ def cmp1(x, y):
+ return BadCmp()
+ with self.assertRaises(ZeroDivisionError):
+ key(3) > key(1)
+
+ def test_obj_field(self):
+ def cmp1(x, y):
+ return (x > y) - (x < y)
+ key = functools.cmp_to_key(mycmp=cmp1)
+ self.assertEqual(key(50).obj, 50)
+
+ def test_sort_int(self):
def mycmp(x, y):
return y - x
self.assertEqual(sorted(range(5), key=functools.cmp_to_key(mycmp)),
[4, 3, 2, 1, 0])
+ def test_sort_int_str(self):
+ def mycmp(x, y):
+ x, y = int(x), int(y)
+ return (x > y) - (x < y)
+ values = [5, '3', 7, 2, '0', '1', 4, '10', 1]
+ values = sorted(values, key=functools.cmp_to_key(mycmp))
+ self.assertEqual([int(value) for value in values],
+ [0, 1, 1, 2, 3, 4, 5, 7, 10])
+
def test_hash(self):
def mycmp(x, y):
return y - x
key = functools.cmp_to_key(mycmp)
k = key(10)
self.assertRaises(TypeError, hash, k)
- self.assertFalse(isinstance(k, collections.Hashable))
+ self.assertNotIsInstance(k, collections.Hashable)
class TestTotalOrdering(unittest.TestCase):
@@ -671,6 +740,22 @@ class TestLRU(unittest.TestCase):
with self.assertRaises(IndexError):
func(15)
+ def test_lru_with_types(self):
+ for maxsize in (None, 100):
+ @functools.lru_cache(maxsize=maxsize, typed=True)
+ def square(x):
+ return x * x
+ self.assertEqual(square(3), 9)
+ self.assertEqual(type(square(3)), type(9))
+ self.assertEqual(square(3.0), 9.0)
+ self.assertEqual(type(square(3.0)), type(9.0))
+ self.assertEqual(square(x=3), 9)
+ self.assertEqual(type(square(x=3)), type(9))
+ self.assertEqual(square(x=3.0), 9.0)
+ self.assertEqual(type(square(x=3.0)), type(9.0))
+ self.assertEqual(square.cache_info().hits, 4)
+ self.assertEqual(square.cache_info().misses, 4)
+
def test_main(verbose=None):
test_classes = (
TestPartial,
diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py
index c6689a1..3a25eb1 100644
--- a/Lib/test/test_future.py
+++ b/Lib/test/test_future.py
@@ -13,14 +13,14 @@ def get_error_location(msg):
class FutureTest(unittest.TestCase):
def test_future1(self):
- support.unload('test_future1')
- from test import test_future1
- self.assertEqual(test_future1.result, 6)
+ support.unload('future_test1')
+ from test import future_test1
+ self.assertEqual(future_test1.result, 6)
def test_future2(self):
- support.unload('test_future2')
- from test import test_future2
- self.assertEqual(test_future2.result, 6)
+ support.unload('future_test2')
+ from test import future_test2
+ self.assertEqual(future_test2.result, 6)
def test_future3(self):
support.unload('test_future3')
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index 100c767..19313db 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -1,5 +1,6 @@
import unittest
-from test.support import verbose, run_unittest, strip_python_stderr
+from test.support import (verbose, refcount_test, run_unittest,
+ strip_python_stderr)
import sys
import gc
import weakref
@@ -175,6 +176,7 @@ class GCTests(unittest.TestCase):
del d
self.assertEqual(gc.collect(), 2)
+ @refcount_test
def test_frame(self):
def f():
frame = sys._getframe()
@@ -242,6 +244,7 @@ class GCTests(unittest.TestCase):
# For example, disposed tuples are not freed, but reused.
# To minimize variations, though, we first store the get_count() results
# and check them at the end.
+ @refcount_test
def test_get_count(self):
gc.collect()
a, b, c = gc.get_count()
@@ -255,6 +258,7 @@ class GCTests(unittest.TestCase):
# created (the list).
self.assertGreater(d, a)
+ @refcount_test
def test_collect_generations(self):
gc.collect()
# This object will "trickle" into generation N + 1 after
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
index aea7c0c..c4c4803 100644
--- a/Lib/test/test_gdb.py
+++ b/Lib/test/test_gdb.py
@@ -321,7 +321,7 @@ class Foo:
foo = Foo()
foo.an_int = 42
id(foo)''')
- m = re.match(r'<Foo\(an_int=42\) at remote 0x[0-9a-f]+>', gdb_repr)
+ m = re.match(r'<Foo\(an_int=42\) at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected new-style class rendering %r' % gdb_repr)
@@ -334,7 +334,7 @@ foo = Foo()
foo += [1, 2, 3]
foo.an_int = 42
id(foo)''')
- m = re.match(r'<Foo\(an_int=42\) at remote 0x[0-9a-f]+>', gdb_repr)
+ m = re.match(r'<Foo\(an_int=42\) at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected new-style class rendering %r' % gdb_repr)
@@ -349,7 +349,7 @@ class Foo(tuple):
foo = Foo((1, 2, 3))
foo.an_int = 42
id(foo)''')
- m = re.match(r'<Foo\(an_int=42\) at remote 0x[0-9a-f]+>', gdb_repr)
+ m = re.match(r'<Foo\(an_int=42\) at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected new-style class rendering %r' % gdb_repr)
@@ -376,7 +376,7 @@ id(foo)''')
# Match anything for the type name; 0xDEADBEEF could point to
# something arbitrary (see http://bugs.python.org/issue8330)
- pattern = '<.* at remote 0x[0-9a-f]+>'
+ pattern = '<.* at remote 0x-?[0-9a-f]+>'
m = re.match(pattern, gdb_repr)
if not m:
@@ -422,7 +422,7 @@ id(foo)''')
# http://bugs.python.org/issue8032#msg100537 )
gdb_repr, gdb_output = self.get_gdb_repr('id(__builtins__.help)', import_site=True)
- m = re.match(r'<_Helper at remote 0x[0-9a-f]+>', gdb_repr)
+ m = re.match(r'<_Helper at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected rendering %r' % gdb_repr)
@@ -453,7 +453,7 @@ class Foo:
foo = Foo()
foo.an_attr = foo
id(foo)''')
- self.assertTrue(re.match('<Foo\(an_attr=<\.\.\.>\) at remote 0x[0-9a-f]+>',
+ self.assertTrue(re.match('<Foo\(an_attr=<\.\.\.>\) at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
@@ -466,7 +466,7 @@ class Foo(object):
foo = Foo()
foo.an_attr = foo
id(foo)''')
- self.assertTrue(re.match('<Foo\(an_attr=<\.\.\.>\) at remote 0x[0-9a-f]+>',
+ self.assertTrue(re.match('<Foo\(an_attr=<\.\.\.>\) at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
@@ -480,7 +480,7 @@ b = Foo()
a.an_attr = b
b.an_attr = a
id(a)''')
- self.assertTrue(re.match('<Foo\(an_attr=<Foo\(an_attr=<\.\.\.>\) at remote 0x[0-9a-f]+>\) at remote 0x[0-9a-f]+>',
+ self.assertTrue(re.match('<Foo\(an_attr=<Foo\(an_attr=<\.\.\.>\) at remote 0x-?[0-9a-f]+>\) at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
@@ -515,7 +515,7 @@ id(a)''')
def test_builtin_method(self):
gdb_repr, gdb_output = self.get_gdb_repr('import sys; id(sys.stdout.readlines)')
- self.assertTrue(re.match('<built-in method readlines of _io.TextIOWrapper object at remote 0x[0-9a-f]+>',
+ self.assertTrue(re.match('<built-in method readlines of _io.TextIOWrapper object at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
@@ -530,7 +530,7 @@ id(foo.__code__)''',
breakpoint='builtin_id',
cmds_after_breakpoint=['print (PyFrameObject*)(((PyCodeObject*)v)->co_zombieframe)']
)
- self.assertTrue(re.match('.*\s+\$1 =\s+Frame 0x[0-9a-f]+, for file <string>, line 3, in foo \(\)\s+.*',
+ self.assertTrue(re.match('.*\s+\$1 =\s+Frame 0x-?[0-9a-f]+, for file <string>, line 3, in foo \(\)\s+.*',
gdb_output,
re.DOTALL),
'Unexpected gdb representation: %r\n%s' % (gdb_output, gdb_output))
@@ -587,7 +587,7 @@ class StackNavigationTests(DebuggerTests):
cmds_after_breakpoint=['py-up'])
self.assertMultilineMatches(bt,
r'''^.*
-#[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
+#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
baz\(a, b, c\)
$''')
@@ -616,9 +616,9 @@ $''')
cmds_after_breakpoint=['py-up', 'py-down'])
self.assertMultilineMatches(bt,
r'''^.*
-#[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
+#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
baz\(a, b, c\)
-#[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 10, in baz \(args=\(1, 2, 3\)\)
+#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 10, in baz \(args=\(1, 2, 3\)\)
id\(42\)
$''')
@@ -650,11 +650,11 @@ Traceback \(most recent call first\):
cmds_after_breakpoint=['py-bt-full'])
self.assertMultilineMatches(bt,
r'''^.*
-#[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
+#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
baz\(a, b, c\)
-#[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 4, in foo \(a=1, b=2, c=3\)
+#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 4, in foo \(a=1, b=2, c=3\)
bar\(a, b, c\)
-#[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 12, in <module> \(\)
+#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 12, in <module> \(\)
foo\(1, 2, 3\)
''')
@@ -691,7 +691,7 @@ class PyPrintTests(DebuggerTests):
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-print len'])
self.assertMultilineMatches(bt,
- r".*\nbuiltin 'len' = <built-in method len of module object at remote 0x[0-9a-f]+>\n.*")
+ r".*\nbuiltin 'len' = <built-in method len of module object at remote 0x-?[0-9a-f]+>\n.*")
class PyLocalsTests(DebuggerTests):
@unittest.skipIf(python_is_optimized(),
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index 5f47b3e..06f67c2 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -728,29 +728,6 @@ Ye olde Fibonacci generator, tee style.
syntax_tests = """
->>> def f():
-... return 22
-... yield 1
-Traceback (most recent call last):
- ..
-SyntaxError: 'return' with argument inside generator
-
->>> def f():
-... yield 1
-... return 22
-Traceback (most recent call last):
- ..
-SyntaxError: 'return' with argument inside generator
-
-"return None" is not the same as "return" in a generator:
-
->>> def f():
-... yield 1
-... return None
-Traceback (most recent call last):
- ..
-SyntaxError: 'return' with argument inside generator
-
These are fine:
>>> def f():
@@ -866,20 +843,6 @@ These are fine:
>>> type(f())
<class 'generator'>
-
->>> def f():
-... if 0:
-... lambda x: x # shouldn't trigger here
-... return # or here
-... def f(i):
-... return 2*i # or here
-... if 0:
-... return 3 # but *this* sucks (line 8)
-... if 0:
-... yield 2 # because it's a generator (line 10)
-Traceback (most recent call last):
-SyntaxError: 'return' with argument inside generator
-
This one caused a crash (see SF bug 567538):
>>> def f():
@@ -1566,11 +1529,6 @@ Traceback (most recent call last):
...
SyntaxError: 'yield' outside function
->>> def f(): return lambda x=(yield): 1
-Traceback (most recent call last):
- ...
-SyntaxError: 'return' with argument inside generator
-
>>> def f(): x = yield = y
Traceback (most recent call last):
...
diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py
index 50638a1..ebb8396 100644
--- a/Lib/test/test_genericpath.py
+++ b/Lib/test/test_genericpath.py
@@ -2,11 +2,12 @@
Tests common to genericpath, macpath, ntpath and posixpath
"""
-import unittest
-from test import support
-import os
import genericpath
+import os
import sys
+import unittest
+import warnings
+from test import support
def safe_rmdir(dirname):
@@ -258,15 +259,21 @@ class CommonTest(GenericTest):
def test_abspath(self):
self.assertIn("foo", self.pathmodule.abspath("foo"))
- self.assertIn(b"foo", self.pathmodule.abspath(b"foo"))
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ self.assertIn(b"foo", self.pathmodule.abspath(b"foo"))
# Abspath returns bytes when the arg is bytes
- for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'):
- self.assertIsInstance(self.pathmodule.abspath(path), bytes)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'):
+ self.assertIsInstance(self.pathmodule.abspath(path), bytes)
def test_realpath(self):
self.assertIn("foo", self.pathmodule.realpath("foo"))
- self.assertIn(b"foo", self.pathmodule.realpath(b"foo"))
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ self.assertIn(b"foo", self.pathmodule.realpath(b"foo"))
def test_normpath_issue5827(self):
# Make sure normpath preserves unicode
@@ -296,8 +303,10 @@ class CommonTest(GenericTest):
"Mac OS X denies the creation of a directory with an invalid utf8 name")
def test_nonascii_abspath(self):
# Test non-ASCII, non-UTF8 bytes in the path.
- with support.temp_cwd(b'\xe7w\xf0'):
- self.test_abspath()
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ with support.temp_cwd(b'\xe7w\xf0'):
+ self.test_abspath()
def test_main():
diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py
index 1f46af1..413043c 100644
--- a/Lib/test/test_genexps.py
+++ b/Lib/test/test_genexps.py
@@ -257,11 +257,15 @@ Verify that genexps are weakly referencable
"""
+import sys
-__test__ = {'doctests' : doctests}
+# Trace function can throw off the tuple reuse test.
+if hasattr(sys, 'gettrace') and sys.gettrace():
+ __test__ = {}
+else:
+ __test__ = {'doctests' : doctests}
def test_main(verbose=None):
- import sys
from test import support
from test import test_genexps
support.run_doctest(test_genexps, verbose)
diff --git a/Lib/test/test_getargs2.py b/Lib/test/test_getargs2.py
index 3d9c06a..768ea8d 100644
--- a/Lib/test/test_getargs2.py
+++ b/Lib/test/test_getargs2.py
@@ -294,6 +294,15 @@ class Keywords_TestCase(unittest.TestCase):
self.fail('TypeError should have been raised')
class Bytes_TestCase(unittest.TestCase):
+ def test_c(self):
+ from _testcapi import getargs_c
+ self.assertRaises(TypeError, getargs_c, b'abc') # len > 1
+ self.assertEqual(getargs_c(b'a'), b'a')
+ self.assertEqual(getargs_c(bytearray(b'a')), b'a')
+ self.assertRaises(TypeError, getargs_c, memoryview(b'a'))
+ self.assertRaises(TypeError, getargs_c, 's')
+ self.assertRaises(TypeError, getargs_c, None)
+
def test_s(self):
from _testcapi import getargs_s
self.assertEqual(getargs_s('abc\xe9'), b'abc\xc3\xa9')
diff --git a/Lib/test/test_glob.py b/Lib/test/test_glob.py
index 1560a6b..6ee08db 100644
--- a/Lib/test/test_glob.py
+++ b/Lib/test/test_glob.py
@@ -1,5 +1,6 @@
import unittest
-from test.support import run_unittest, TESTFN, skip_unless_symlink, can_symlink
+from test.support import (run_unittest, TESTFN, skip_unless_symlink,
+ can_symlink, create_empty_file)
import glob
import os
import shutil
@@ -14,8 +15,7 @@ class GlobTests(unittest.TestCase):
base, file = os.path.split(filename)
if not os.path.exists(base):
os.makedirs(base)
- f = open(filename, 'w')
- f.close()
+ create_empty_file(filename)
def setUp(self):
self.tempdir = TESTFN+"_dir"
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index 268a633..6b326bd 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -10,7 +10,7 @@ from sys import *
class TokenTests(unittest.TestCase):
- def testBackslash(self):
+ def test_backslash(self):
# Backslash means line continuation:
x = 1 \
+ 1
@@ -20,7 +20,7 @@ class TokenTests(unittest.TestCase):
x = 0
self.assertEqual(x, 0, 'backslash ending comment')
- def testPlainIntegers(self):
+ def test_plain_integers(self):
self.assertEqual(type(000), type(0))
self.assertEqual(0xff, 255)
self.assertEqual(0o377, 255)
@@ -56,7 +56,7 @@ class TokenTests(unittest.TestCase):
else:
self.fail('Weird maxsize value %r' % maxsize)
- def testLongIntegers(self):
+ def test_long_integers(self):
x = 0
x = 0xffffffffffffffff
x = 0Xffffffffffffffff
@@ -66,7 +66,7 @@ class TokenTests(unittest.TestCase):
x = 0b100000000000000000000000000000000000000000000000000000000000000000000
x = 0B111111111111111111111111111111111111111111111111111111111111111111111
- def testFloats(self):
+ def test_floats(self):
x = 3.14
x = 314.
x = 0.314
@@ -80,7 +80,7 @@ class TokenTests(unittest.TestCase):
x = .3e14
x = 3.1e4
- def testStringLiterals(self):
+ def test_string_literals(self):
x = ''; y = ""; self.assertTrue(len(x) == 0 and x == y)
x = '\''; y = "'"; self.assertTrue(len(x) == 1 and x == y and ord(x) == 39)
x = '"'; y = "\""; self.assertTrue(len(x) == 1 and x == y and ord(x) == 34)
@@ -120,11 +120,18 @@ the \'lazy\' dog.\n\
'
self.assertEqual(x, y)
- def testEllipsis(self):
+ def test_ellipsis(self):
x = ...
self.assertTrue(x is Ellipsis)
self.assertRaises(SyntaxError, eval, ".. .")
+ def test_eof_error(self):
+ samples = ("def foo(", "\ndef foo(", "def foo(\n")
+ for s in samples:
+ with self.assertRaises(SyntaxError) as cm:
+ compile(s, "<test>", "exec")
+ self.assertIn("unexpected EOF", str(cm.exception))
+
class GrammarTests(unittest.TestCase):
# single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
@@ -136,11 +143,11 @@ class GrammarTests(unittest.TestCase):
# expr_input: testlist NEWLINE
# XXX Hard to test -- used only in calls to input()
- def testEvalInput(self):
+ def test_eval_input(self):
# testlist ENDMARKER
x = eval('1, 0 or 1')
- def testFuncdef(self):
+ def test_funcdef(self):
### [decorators] 'def' NAME parameters ['->' test] ':' suite
### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
### decorators: decorator+
@@ -324,7 +331,7 @@ class GrammarTests(unittest.TestCase):
check_syntax_error(self, "f(*g(1=2))")
check_syntax_error(self, "f(**g(1=2))")
- def testLambdef(self):
+ def test_lambdef(self):
### lambdef: 'lambda' [varargslist] ':' test
l1 = lambda : 0
self.assertEqual(l1(), 0)
@@ -346,7 +353,7 @@ class GrammarTests(unittest.TestCase):
### stmt: simple_stmt | compound_stmt
# Tested below
- def testSimpleStmt(self):
+ def test_simple_stmt(self):
### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x
def foo():
@@ -357,7 +364,7 @@ class GrammarTests(unittest.TestCase):
### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt
# Tested below
- def testExprStmt(self):
+ def test_expr_stmt(self):
# (exprlist '=')* exprlist
1
1, 2, 3
@@ -370,7 +377,7 @@ class GrammarTests(unittest.TestCase):
check_syntax_error(self, "x + 1 = 1")
check_syntax_error(self, "a + 1 = b + 2")
- def testDelStmt(self):
+ def test_del_stmt(self):
# 'del' exprlist
abc = [1,2,3]
x, y, z = abc
@@ -379,18 +386,18 @@ class GrammarTests(unittest.TestCase):
del abc
del x, y, (z, xyz)
- def testPassStmt(self):
+ def test_pass_stmt(self):
# 'pass'
pass
# flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt
# Tested below
- def testBreakStmt(self):
+ def test_break_stmt(self):
# 'break'
while 1: break
- def testContinueStmt(self):
+ def test_continue_stmt(self):
# 'continue'
i = 1
while i: i = 0; continue
@@ -442,7 +449,7 @@ class GrammarTests(unittest.TestCase):
self.fail("continue then break in try/except in loop broken!")
test_inner()
- def testReturn(self):
+ def test_return(self):
# 'return' [testlist]
def g1(): return
def g2(): return 1
@@ -450,17 +457,49 @@ class GrammarTests(unittest.TestCase):
x = g2()
check_syntax_error(self, "class foo:return 1")
- def testYield(self):
+ def test_yield(self):
+ # Allowed as standalone statement
+ def g(): yield 1
+ def g(): yield from ()
+ # Allowed as RHS of assignment
+ def g(): x = yield 1
+ def g(): x = yield from ()
+ # Ordinary yield accepts implicit tuples
+ def g(): yield 1, 1
+ def g(): x = yield 1, 1
+ # 'yield from' does not
+ check_syntax_error(self, "def g(): yield from (), 1")
+ check_syntax_error(self, "def g(): x = yield from (), 1")
+ # Requires parentheses as subexpression
+ def g(): 1, (yield 1)
+ def g(): 1, (yield from ())
+ check_syntax_error(self, "def g(): 1, yield 1")
+ check_syntax_error(self, "def g(): 1, yield from ()")
+ # Requires parentheses as call argument
+ def g(): f((yield 1))
+ def g(): f((yield 1), 1)
+ def g(): f((yield from ()))
+ def g(): f((yield from ()), 1)
+ check_syntax_error(self, "def g(): f(yield 1)")
+ check_syntax_error(self, "def g(): f(yield 1, 1)")
+ check_syntax_error(self, "def g(): f(yield from ())")
+ check_syntax_error(self, "def g(): f(yield from (), 1)")
+ # Not allowed at top level
+ check_syntax_error(self, "yield")
+ check_syntax_error(self, "yield from")
+ # Not allowed at class scope
check_syntax_error(self, "class foo:yield 1")
+ check_syntax_error(self, "class foo:yield from ()")
+
- def testRaise(self):
+ def test_raise(self):
# 'raise' test [',' test]
try: raise RuntimeError('just testing')
except RuntimeError: pass
try: raise KeyboardInterrupt
except KeyboardInterrupt: pass
- def testImport(self):
+ def test_import(self):
# 'import' dotted_as_names
import sys
import time, sys
@@ -473,13 +512,13 @@ class GrammarTests(unittest.TestCase):
from sys import (path, argv)
from sys import (path, argv,)
- def testGlobal(self):
+ def test_global(self):
# 'global' NAME (',' NAME)*
global a
global a, b
global one, two, three, four, five, six, seven, eight, nine, ten
- def testNonlocal(self):
+ def test_nonlocal(self):
# 'nonlocal' NAME (',' NAME)*
x = 0
y = 0
@@ -487,7 +526,7 @@ class GrammarTests(unittest.TestCase):
nonlocal x
nonlocal x, y
- def testAssert(self):
+ def test_assert(self):
# assertTruestmt: 'assert' test [',' test]
assert 1
assert 1, 1
@@ -526,7 +565,7 @@ class GrammarTests(unittest.TestCase):
### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef
# Tested below
- def testIf(self):
+ def test_if(self):
# 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
if 1: pass
if 1: pass
@@ -539,7 +578,7 @@ class GrammarTests(unittest.TestCase):
elif 0: pass
else: pass
- def testWhile(self):
+ def test_while(self):
# 'while' test ':' suite ['else' ':' suite]
while 0: pass
while 0: pass
@@ -554,7 +593,7 @@ class GrammarTests(unittest.TestCase):
x = 2
self.assertEqual(x, 2)
- def testFor(self):
+ def test_for(self):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
for i in 1, 2, 3: pass
for i, j, k in (): pass
@@ -581,7 +620,7 @@ class GrammarTests(unittest.TestCase):
result.append(x)
self.assertEqual(result, [1, 2, 3])
- def testTry(self):
+ def test_try(self):
### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
### | 'try' ':' suite 'finally' ':' suite
### except_clause: 'except' [expr ['as' expr]]
@@ -604,7 +643,7 @@ class GrammarTests(unittest.TestCase):
try: pass
finally: pass
- def testSuite(self):
+ def test_suite(self):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if 1: pass
if 1:
@@ -619,7 +658,7 @@ class GrammarTests(unittest.TestCase):
pass
#
- def testTest(self):
+ def test_test(self):
### and_test ('or' and_test)*
### and_test: not_test ('and' not_test)*
### not_test: 'not' not_test | comparison
@@ -630,7 +669,7 @@ class GrammarTests(unittest.TestCase):
if not 1 and 1 and 1: pass
if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass
- def testComparison(self):
+ def test_comparison(self):
### comparison: expr (comp_op expr)*
### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
if 1: pass
@@ -647,36 +686,36 @@ class GrammarTests(unittest.TestCase):
if 1 not in (): pass
if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass
- def testBinaryMaskOps(self):
+ def test_binary_mask_ops(self):
x = 1 & 1
x = 1 ^ 1
x = 1 | 1
- def testShiftOps(self):
+ def test_shift_ops(self):
x = 1 << 1
x = 1 >> 1
x = 1 << 1 >> 1
- def testAdditiveOps(self):
+ def test_additive_ops(self):
x = 1
x = 1 + 1
x = 1 - 1 - 1
x = 1 - 1 + 1 - 1 + 1
- def testMultiplicativeOps(self):
+ def test_multiplicative_ops(self):
x = 1 * 1
x = 1 / 1
x = 1 % 1
x = 1 / 1 * 1 % 1
- def testUnaryOps(self):
+ def test_unary_ops(self):
x = +1
x = -1
x = ~1
x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
x = -1*1/1 + 1*1 - ---1*1
- def testSelectors(self):
+ def test_selectors(self):
### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME
### subscript: expr | [expr] ':' [expr]
@@ -706,7 +745,7 @@ class GrammarTests(unittest.TestCase):
L.sort(key=lambda x: x if isinstance(x, tuple) else ())
self.assertEqual(str(L), '[1, (1,), (1, 2), (1, 2, 3)]')
- def testAtoms(self):
+ def test_atoms(self):
### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING
### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [','])
@@ -741,7 +780,7 @@ class GrammarTests(unittest.TestCase):
### testlist: test (',' test)* [',']
# These have been exercised enough above
- def testClassdef(self):
+ def test_classdef(self):
# 'class' NAME ['(' [testlist] ')'] ':' suite
class B: pass
class B2(): pass
@@ -760,14 +799,14 @@ class GrammarTests(unittest.TestCase):
@class_decorator
class G: pass
- def testDictcomps(self):
+ def test_dictcomps(self):
# dictorsetmaker: ( (test ':' test (comp_for |
# (',' test ':' test)* [','])) |
# (test (comp_for | (',' test)* [','])) )
nums = [1, 2, 3]
self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4})
- def testListcomps(self):
+ def test_listcomps(self):
# list comprehension tests
nums = [1, 2, 3, 4, 5]
strs = ["Apple", "Banana", "Coconut"]
@@ -830,7 +869,7 @@ class GrammarTests(unittest.TestCase):
self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'),
('Macdonalds', 'Cheeseburger')])
- def testGenexps(self):
+ def test_genexps(self):
# generator expression tests
g = ([x for x in range(10)] for x in range(1))
self.assertEqual(next(g), [x for x in range(10)])
@@ -865,7 +904,7 @@ class GrammarTests(unittest.TestCase):
check_syntax_error(self, "foo(x for x in range(10), 100)")
check_syntax_error(self, "foo(100, x for x in range(10))")
- def testComprehensionSpecials(self):
+ def test_comprehension_specials(self):
# test for outmost iterable precomputation
x = 10; g = (i for i in range(x)); x = 5
self.assertEqual(len(list(g)), 10)
@@ -904,7 +943,7 @@ class GrammarTests(unittest.TestCase):
with manager() as x, manager():
pass
- def testIfElseExpr(self):
+ def test_if_else_expr(self):
# Test ifelse expressions in various cases
def _checkeval(msg, ret):
"helper to check that evaluation of expressions is done correctly"
diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py
index 5ae7467..d2b4871 100644
--- a/Lib/test/test_gzip.py
+++ b/Lib/test/test_gzip.py
@@ -64,6 +64,21 @@ class TestGzip(unittest.TestCase):
d = f.read()
self.assertEqual(d, data1*50)
+ def test_read1(self):
+ self.test_write()
+ blocks = []
+ nread = 0
+ with gzip.GzipFile(self.filename, 'r') as f:
+ while True:
+ d = f.read1()
+ if not d:
+ break
+ blocks.append(d)
+ nread += len(d)
+ # Check that position was updated correctly (see issue10791).
+ self.assertEqual(f.tell(), nread)
+ self.assertEqual(b''.join(blocks), data1 * 50)
+
def test_io_on_closed_object(self):
# Test that I/O operations on closed GzipFile objects raise a
# ValueError, just like the corresponding functions on file objects.
@@ -124,7 +139,7 @@ class TestGzip(unittest.TestCase):
with io.BufferedReader(f) as r:
lines = [line for line in r]
- self.assertEqual(lines, 50 * data1.splitlines(True))
+ self.assertEqual(lines, 50 * data1.splitlines(keepends=True))
def test_readline(self):
self.test_write()
@@ -323,6 +338,14 @@ class TestGzip(unittest.TestCase):
self.assertEqual(f.read(100), b'')
self.assertEqual(nread, len(uncompressed))
+ def test_textio_readlines(self):
+ # Issue #10791: TextIOWrapper.readlines() fails when wrapping GzipFile.
+ lines = (data1 * 50).decode("ascii").splitlines(keepends=True)
+ self.test_write()
+ with gzip.GzipFile(self.filename, 'r') as f:
+ with io.TextIOWrapper(f, encoding="ascii") as t:
+ self.assertEqual(t.readlines(), lines)
+
def test_fileobj_from_fdopen(self):
# Issue #13781: Opening a GzipFile for writing fails when using a
# fileobj created with os.fdopen().
diff --git a/Lib/test/test_hash.py b/Lib/test/test_hash.py
index c0dd34d..0776779 100644
--- a/Lib/test/test_hash.py
+++ b/Lib/test/test_hash.py
@@ -113,8 +113,7 @@ class DefaultIterSeq(object):
return self.seq[index]
class HashBuiltinsTestCase(unittest.TestCase):
- hashes_to_check = [range(10),
- enumerate(range(10)),
+ hashes_to_check = [enumerate(range(10)),
iter(DefaultIterSeq()),
iter(lambda: 0, 0),
]
@@ -160,8 +159,8 @@ class StringlikeHashRandomizationTests(HashRandomizationTests):
else:
known_hash_of_obj = -1600925533
- # Randomization is disabled by default:
- self.assertEqual(self.get_hash(self.repr_), known_hash_of_obj)
+ # Randomization is enabled by default:
+ self.assertNotEqual(self.get_hash(self.repr_), known_hash_of_obj)
# It can also be disabled by setting the seed to 0:
self.assertEqual(self.get_hash(self.repr_, seed=0), known_hash_of_obj)
@@ -193,6 +192,12 @@ class BytesHashRandomizationTests(StringlikeHashRandomizationTests):
def test_empty_string(self):
self.assertEqual(hash(b""), 0)
+class MemoryviewHashRandomizationTests(StringlikeHashRandomizationTests):
+ repr_ = "memoryview(b'abc')"
+
+ def test_empty_string(self):
+ self.assertEqual(hash(memoryview(b"")), 0)
+
class DatetimeTests(HashRandomizationTests):
def get_hash_command(self, repr_):
return 'import datetime; print(hash(%s))' % repr_
@@ -213,6 +218,7 @@ def test_main():
HashBuiltinsTestCase,
StrHashRandomizationTests,
BytesHashRandomizationTests,
+ MemoryviewHashRandomizationTests,
DatetimeDateTests,
DatetimeDatetimeTests,
DatetimeTimeTests)
diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py
index 41e0dfd..a35ec95 100644
--- a/Lib/test/test_http_cookiejar.py
+++ b/Lib/test/test_http_cookiejar.py
@@ -248,18 +248,19 @@ class FileCookieJarTests(unittest.TestCase):
self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None)
def test_bad_magic(self):
- # IOErrors (eg. file doesn't exist) are allowed to propagate
+ # OSErrors (eg. file doesn't exist) are allowed to propagate
filename = test.support.TESTFN
for cookiejar_class in LWPCookieJar, MozillaCookieJar:
c = cookiejar_class()
try:
c.load(filename="for this test to work, a file with this "
"filename should not exist")
- except IOError as exc:
- # exactly IOError, not LoadError
- self.assertIs(exc.__class__, IOError)
+ except OSError as exc:
+ # an OSError subclass (likely FileNotFoundError), but not
+ # LoadError
+ self.assertIsNot(exc.__class__, LoadError)
else:
- self.fail("expected IOError for invalid filename")
+ self.fail("expected OSError for invalid filename")
# Invalid contents of cookies file (eg. bad magic string)
# causes a LoadError.
try:
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index ff03321..2a0b3e3 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -158,6 +158,23 @@ class BasicTest(TestCase):
self.assertEqual(resp.read(2), b'xt')
self.assertTrue(resp.isclosed())
+ def test_partial_readintos(self):
+ # if we have a lenght, the system knows when to close itself
+ # same behaviour than when we read the whole thing with read()
+ body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText"
+ sock = FakeSocket(body)
+ resp = client.HTTPResponse(sock)
+ resp.begin()
+ b = bytearray(2)
+ n = resp.readinto(b)
+ self.assertEqual(n, 2)
+ self.assertEqual(bytes(b), b'Te')
+ self.assertFalse(resp.isclosed())
+ n = resp.readinto(b)
+ self.assertEqual(n, 2)
+ self.assertEqual(bytes(b), b'xt')
+ self.assertTrue(resp.isclosed())
+
def test_host_port(self):
# Check invalid host_port
@@ -206,6 +223,21 @@ class BasicTest(TestCase):
if resp.read():
self.fail("Did not expect response from HEAD request")
+ def test_readinto_head(self):
+ # Test that the library doesn't attempt to read any data
+ # from a HEAD request. (Tickles SF bug #622042.)
+ sock = FakeSocket(
+ 'HTTP/1.1 200 OK\r\n'
+ 'Content-Length: 14432\r\n'
+ '\r\n',
+ NoEOFStringIO)
+ resp = client.HTTPResponse(sock, method="HEAD")
+ resp.begin()
+ b = bytearray(5)
+ if resp.readinto(b) != 0:
+ self.fail("Did not expect response from HEAD request")
+ self.assertEqual(bytes(b), b'\x00'*5)
+
def test_send_file(self):
expected = (b'GET /foo HTTP/1.1\r\nHost: example.com\r\n'
b'Accept-Encoding: identity\r\nContent-Length:')
@@ -261,15 +293,28 @@ class BasicTest(TestCase):
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello worl\r\n'
- '1\r\n'
- 'd\r\n'
+ '3\r\n'
+ 'd! \r\n'
+ '8\r\n'
+ 'and now \r\n'
+ '22\r\n'
+ 'for something completely different\r\n'
)
+ expected = b'hello world! and now for something completely different'
sock = FakeSocket(chunked_start + '0\r\n')
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
- self.assertEqual(resp.read(), b'hello world')
+ self.assertEqual(resp.read(), expected)
resp.close()
+ # Various read sizes
+ for n in range(1, 12):
+ sock = FakeSocket(chunked_start + '0\r\n')
+ resp = client.HTTPResponse(sock, method="GET")
+ resp.begin()
+ self.assertEqual(resp.read(n) + resp.read(n) + resp.read(), expected)
+ resp.close()
+
for x in ('', 'foo\r\n'):
sock = FakeSocket(chunked_start + x)
resp = client.HTTPResponse(sock, method="GET")
@@ -277,9 +322,64 @@ class BasicTest(TestCase):
try:
resp.read()
except client.IncompleteRead as i:
- self.assertEqual(i.partial, b'hello world')
- self.assertEqual(repr(i),'IncompleteRead(11 bytes read)')
- self.assertEqual(str(i),'IncompleteRead(11 bytes read)')
+ self.assertEqual(i.partial, expected)
+ expected_message = 'IncompleteRead(%d bytes read)' % len(expected)
+ self.assertEqual(repr(i), expected_message)
+ self.assertEqual(str(i), expected_message)
+ else:
+ self.fail('IncompleteRead expected')
+ finally:
+ resp.close()
+
+ def test_readinto_chunked(self):
+ chunked_start = (
+ 'HTTP/1.1 200 OK\r\n'
+ 'Transfer-Encoding: chunked\r\n\r\n'
+ 'a\r\n'
+ 'hello worl\r\n'
+ '3\r\n'
+ 'd! \r\n'
+ '8\r\n'
+ 'and now \r\n'
+ '22\r\n'
+ 'for something completely different\r\n'
+ )
+ expected = b'hello world! and now for something completely different'
+ nexpected = len(expected)
+ b = bytearray(128)
+
+ sock = FakeSocket(chunked_start + '0\r\n')
+ resp = client.HTTPResponse(sock, method="GET")
+ resp.begin()
+ n = resp.readinto(b)
+ self.assertEqual(b[:nexpected], expected)
+ self.assertEqual(n, nexpected)
+ resp.close()
+
+ # Various read sizes
+ for n in range(1, 12):
+ sock = FakeSocket(chunked_start + '0\r\n')
+ resp = client.HTTPResponse(sock, method="GET")
+ resp.begin()
+ m = memoryview(b)
+ i = resp.readinto(m[0:n])
+ i += resp.readinto(m[i:n + i])
+ i += resp.readinto(m[i:])
+ self.assertEqual(b[:nexpected], expected)
+ self.assertEqual(i, nexpected)
+ resp.close()
+
+ for x in ('', 'foo\r\n'):
+ sock = FakeSocket(chunked_start + x)
+ resp = client.HTTPResponse(sock, method="GET")
+ resp.begin()
+ try:
+ n = resp.readinto(b)
+ except client.IncompleteRead as i:
+ self.assertEqual(i.partial, expected)
+ expected_message = 'IncompleteRead(%d bytes read)' % len(expected)
+ self.assertEqual(repr(i), expected_message)
+ self.assertEqual(str(i), expected_message)
else:
self.fail('IncompleteRead expected')
finally:
@@ -302,6 +402,26 @@ class BasicTest(TestCase):
self.assertEqual(resp.reason, 'OK')
self.assertTrue(resp.isclosed())
+ def test_readinto_chunked_head(self):
+ chunked_start = (
+ 'HTTP/1.1 200 OK\r\n'
+ 'Transfer-Encoding: chunked\r\n\r\n'
+ 'a\r\n'
+ 'hello world\r\n'
+ '1\r\n'
+ 'd\r\n'
+ )
+ sock = FakeSocket(chunked_start + '0\r\n')
+ resp = client.HTTPResponse(sock, method="HEAD")
+ resp.begin()
+ b = bytearray(5)
+ n = resp.readinto(b)
+ self.assertEqual(n, 0)
+ self.assertEqual(bytes(b), b'\x00'*5)
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(resp.reason, 'OK')
+ self.assertTrue(resp.isclosed())
+
def test_negative_content_length(self):
sock = FakeSocket(
'HTTP/1.1 200 OK\r\nContent-Length: -1\r\n\r\nHello\r\n')
@@ -508,8 +628,7 @@ class HTTPSTest(TestCase):
def test_local_good_hostname(self):
# The (valid) cert validates the HTTP hostname
import ssl
- from test.ssl_servers import make_https_server
- server = make_https_server(self, CERT_localhost)
+ server = self.make_server(CERT_localhost)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
@@ -517,12 +636,12 @@ class HTTPSTest(TestCase):
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
+ del server
def test_local_bad_hostname(self):
# The (valid) cert doesn't validate the HTTP hostname
import ssl
- from test.ssl_servers import make_https_server
- server = make_https_server(self, CERT_fakehostname)
+ server = self.make_server(CERT_fakehostname)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_fakehostname)
@@ -540,6 +659,7 @@ class HTTPSTest(TestCase):
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
+ del server
@unittest.skipIf(not hasattr(client, 'HTTPSConnection'),
'http.client.HTTPSConnection not available')
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index 4d58e4b..1b807dd 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -466,6 +466,23 @@ class RejectingSocketlessRequestHandler(SocketlessRequestHandler):
self.send_error(417)
return False
+
+class AuditableBytesIO:
+
+ def __init__(self):
+ self.datas = []
+
+ def write(self, data):
+ self.datas.append(data)
+
+ def getData(self):
+ return b''.join(self.datas)
+
+ @property
+ def numWrites(self):
+ return len(self.datas)
+
+
class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
"""Test the functionality of the BaseHTTPServer.
@@ -532,27 +549,49 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
- def test_header_buffering(self):
+ def test_header_buffering_of_send_error(self):
- def _readAndReseek(f):
- pos = f.tell()
- f.seek(0)
- data = f.read()
- f.seek(pos)
- return data
+ input = BytesIO(b'GET / HTTP/1.1\r\n\r\n')
+ output = AuditableBytesIO()
+ handler = SocketlessRequestHandler()
+ handler.rfile = input
+ handler.wfile = output
+ handler.request_version = 'HTTP/1.1'
+ handler.requestline = ''
+ handler.command = None
+
+ handler.send_error(418)
+ self.assertEqual(output.numWrites, 2)
+
+ def test_header_buffering_of_send_response_only(self):
input = BytesIO(b'GET / HTTP/1.1\r\n\r\n')
- output = BytesIO()
- self.handler.rfile = input
- self.handler.wfile = output
- self.handler.request_version = 'HTTP/1.1'
+ output = AuditableBytesIO()
+ handler = SocketlessRequestHandler()
+ handler.rfile = input
+ handler.wfile = output
+ handler.request_version = 'HTTP/1.1'
- self.handler.send_header('Foo', 'foo')
- self.handler.send_header('bar', 'bar')
- self.assertEqual(_readAndReseek(output), b'')
- self.handler.end_headers()
- self.assertEqual(_readAndReseek(output),
- b'Foo: foo\r\nbar: bar\r\n\r\n')
+ handler.send_response_only(418)
+ self.assertEqual(output.numWrites, 0)
+ handler.end_headers()
+ self.assertEqual(output.numWrites, 1)
+
+ def test_header_buffering_of_send_header(self):
+
+ input = BytesIO(b'GET / HTTP/1.1\r\n\r\n')
+ output = AuditableBytesIO()
+ handler = SocketlessRequestHandler()
+ handler.rfile = input
+ handler.wfile = output
+ handler.request_version = 'HTTP/1.1'
+
+ handler.send_header('Foo', 'foo')
+ handler.send_header('bar', 'bar')
+ self.assertEqual(output.numWrites, 0)
+ handler.end_headers()
+ self.assertEqual(output.getData(), b'Foo: foo\r\nbar: bar\r\n\r\n')
+ self.assertEqual(output.numWrites, 1)
def test_header_unbuffered_when_continue(self):
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py
index 8034000..c4c7ecc 100644
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -258,11 +258,58 @@ class RemoteIMAP_SSLTest(RemoteIMAPTest):
port = 993
imap_class = IMAP4_SSL
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def create_ssl_context(self):
+ ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ ssl_context.load_cert_chain(CERTFILE)
+ return ssl_context
+
+ def check_logincapa(self, server):
+ try:
+ for cap in server.capabilities:
+ self.assertIsInstance(cap, str)
+ self.assertFalse('LOGINDISABLED' in server.capabilities)
+ self.assertTrue('AUTH=PLAIN' in server.capabilities)
+ rs = server.login(self.username, self.password)
+ self.assertEqual(rs[0], 'OK')
+ finally:
+ server.logout()
+
def test_logincapa(self):
- for cap in self.server.capabilities:
- self.assertIsInstance(cap, str)
- self.assertFalse('LOGINDISABLED' in self.server.capabilities)
- self.assertTrue('AUTH=PLAIN' in self.server.capabilities)
+ with transient_internet(self.host):
+ _server = self.imap_class(self.host, self.port)
+ self.check_logincapa(_server)
+
+ def test_logincapa_with_client_certfile(self):
+ with transient_internet(self.host):
+ _server = self.imap_class(self.host, self.port, certfile=CERTFILE)
+ self.check_logincapa(_server)
+
+ def test_logincapa_with_client_ssl_context(self):
+ with transient_internet(self.host):
+ _server = self.imap_class(self.host, self.port, ssl_context=self.create_ssl_context())
+ self.check_logincapa(_server)
+
+ def test_logout(self):
+ with transient_internet(self.host):
+ _server = self.imap_class(self.host, self.port)
+ rs = _server.logout()
+ self.assertEqual(rs[0], 'BYE')
+
+ def test_ssl_context_certfile_exclusive(self):
+ with transient_internet(self.host):
+ self.assertRaises(ValueError, self.imap_class, self.host, self.port,
+ certfile=CERTFILE, ssl_context=self.create_ssl_context())
+
+ def test_ssl_context_keyfile_exclusive(self):
+ with transient_internet(self.host):
+ self.assertRaises(ValueError, self.imap_class, self.host, self.port,
+ keyfile=CERTFILE, ssl_context=self.create_ssl_context())
def test_main():
diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py
index 551ad1b..cd34a46 100644
--- a/Lib/test/test_imp.py
+++ b/Lib/test/test_imp.py
@@ -58,6 +58,12 @@ class ImportTests(unittest.TestCase):
with imp.find_module('module_' + mod, self.test_path)[0] as fd:
self.assertEqual(fd.encoding, encoding)
+ path = [os.path.dirname(__file__)]
+ self.assertRaisesRegex(SyntaxError,
+ r"Non-UTF-8 code starting with '\\xf6'"
+ r" in file .*badsyntax_pep3120.py",
+ imp.find_module, 'badsyntax_pep3120', path)
+
def test_issue1267(self):
for mod, encoding, _ in self.test_strings:
fp, filename, info = imp.find_module('module_' + mod,
@@ -215,6 +221,10 @@ class PEP3147Tests(unittest.TestCase):
self.assertEqual(
imp.cache_from_source('/foo/bar/baz/qux.py', True),
'/foo/bar/baz/__pycache__/qux.{}.pyc'.format(self.tag))
+ # Directory with a dot, filename without dot
+ self.assertEqual(
+ imp.cache_from_source('/foo.bar/file', True),
+ '/foo.bar/__pycache__/file{}.pyc'.format(self.tag))
def test_cache_from_source_optimized(self):
# Given the path to a .py file, return the path to its PEP 3147
@@ -314,14 +324,16 @@ class PEP3147Tests(unittest.TestCase):
shutil.rmtree('pep3147')
self.addCleanup(cleanup)
# Touch the __init__.py file.
- with open('pep3147/__init__.py', 'w'):
- pass
+ support.create_empty_file('pep3147/__init__.py')
+ importlib.invalidate_caches()
+ expected___file__ = os.sep.join(('.', 'pep3147', '__init__.py'))
m = __import__('pep3147')
+ self.assertEqual(m.__file__, expected___file__, (m.__file__, m.__path__, sys.path, sys.path_importer_cache))
# Ensure we load the pyc file.
- support.forget('pep3147')
+ support.unload('pep3147')
m = __import__('pep3147')
- self.assertEqual(m.__file__,
- os.sep.join(('.', 'pep3147', '__init__.py')))
+ support.unload('pep3147')
+ self.assertEqual(m.__file__, expected___file__, (m.__file__, m.__path__, sys.path, sys.path_importer_cache))
class NullImporterTests(unittest.TestCase):
diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py
index 48443ea..bd2da72 100644
--- a/Lib/test/test_import.py
+++ b/Lib/test/test_import.py
@@ -2,6 +2,7 @@ import builtins
import imp
from importlib.test.import_ import test_relative_imports
from importlib.test.import_ import util as importlib_util
+import importlib
import marshal
import os
import platform
@@ -16,7 +17,7 @@ import errno
from test.support import (
EnvironmentVarGuard, TESTFN, check_warnings, forget, is_jython,
make_legacy_pyc, rmtree, run_unittest, swap_attr, swap_item, temp_umask,
- unlink, unload)
+ unlink, unload, create_empty_file)
from test import script_helper
@@ -34,6 +35,7 @@ class ImportTests(unittest.TestCase):
def setUp(self):
remove_files(TESTFN)
+ importlib.invalidate_caches()
def tearDown(self):
unload(TESTFN)
@@ -98,25 +100,24 @@ class ImportTests(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix',
"test meaningful only on posix systems")
- def test_execute_bit_not_copied(self):
- # Issue 6070: under posix .pyc files got their execute bit set if
- # the .py file had the execute bit set, but they aren't executable.
- with temp_umask(0o022):
+ def test_creation_mode(self):
+ mask = 0o022
+ with temp_umask(mask):
sys.path.insert(0, os.curdir)
try:
fname = TESTFN + os.extsep + "py"
- open(fname, 'w').close()
- os.chmod(fname, (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
- stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH))
+ create_empty_file(fname)
fn = imp.cache_from_source(fname)
unlink(fn)
+ importlib.invalidate_caches()
__import__(TESTFN)
if not os.path.exists(fn):
self.fail("__import__ did not result in creation of "
"either a .pyc or .pyo file")
s = os.stat(fn)
- self.assertEqual(stat.S_IMODE(s.st_mode),
- stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
+ # Check that the umask is respected, and the executable bits
+ # aren't set.
+ self.assertEqual(stat.S_IMODE(s.st_mode), 0o666 & ~mask)
finally:
del sys.path[0]
remove_files(TESTFN)
@@ -262,6 +263,7 @@ class ImportTests(unittest.TestCase):
os.remove(source)
del sys.modules[TESTFN]
make_legacy_pyc(source)
+ importlib.invalidate_caches()
mod = __import__(TESTFN)
base, ext = os.path.splitext(mod.__file__)
self.assertIn(ext, ('.pyc', '.pyo'))
@@ -297,8 +299,6 @@ class ImportTests(unittest.TestCase):
self.skipTest('path is not encodable to {}'.format(encoding))
with self.assertRaises(ImportError) as c:
__import__(path)
- self.assertEqual("Import by filename is not supported.",
- c.exception.args[0])
def test_import_in_del_does_not_crash(self):
# Issue 4236
@@ -362,6 +362,7 @@ func_filename = func.__code__.co_filename
with open(self.file_name, "w") as f:
f.write(self.module_source)
sys.path.insert(0, self.dir_name)
+ importlib.invalidate_caches()
def tearDown(self):
sys.path[:] = self.sys_path
@@ -409,7 +410,7 @@ func_filename = func.__code__.co_filename
def test_foreign_code(self):
py_compile.compile(self.file_name)
with open(self.compiled_name, "rb") as f:
- header = f.read(8)
+ header = f.read(12)
code = marshal.load(f)
constants = list(code.co_consts)
foreign_code = test_main.__code__
@@ -556,6 +557,7 @@ class PycacheTests(unittest.TestCase):
with open(self.source, 'w') as fp:
print('# This is a test file written by test_import.py', file=fp)
sys.path.insert(0, os.curdir)
+ importlib.invalidate_caches()
def tearDown(self):
assert sys.path[0] == os.curdir, 'Unexpected sys.path[0]'
@@ -603,6 +605,7 @@ class PycacheTests(unittest.TestCase):
pyc_file = make_legacy_pyc(self.source)
os.remove(self.source)
unload(TESTFN)
+ importlib.invalidate_caches()
m = __import__(TESTFN)
self.assertEqual(m.__file__,
os.path.join(os.curdir, os.path.relpath(pyc_file)))
@@ -623,6 +626,7 @@ class PycacheTests(unittest.TestCase):
pyc_file = make_legacy_pyc(self.source)
os.remove(self.source)
unload(TESTFN)
+ importlib.invalidate_caches()
m = __import__(TESTFN)
self.assertEqual(m.__cached__,
os.path.join(os.curdir, os.path.relpath(pyc_file)))
@@ -673,6 +677,16 @@ class PycacheTests(unittest.TestCase):
self.assertEqual(sys.modules['pep3147.foo'].__cached__,
os.path.join(os.curdir, foo_pyc))
+ def test_recompute_pyc_same_second(self):
+ # Even when the source file doesn't change timestamp, a change in
+ # source size is enough to trigger recomputation of the pyc file.
+ __import__(TESTFN)
+ unload(TESTFN)
+ with open(self.source, 'a') as fp:
+ print("x = 5", file=fp)
+ m = __import__(TESTFN)
+ self.assertEqual(m.x, 5)
+
class RelativeImportFromImportlibTests(test_relative_imports.RelativeImports):
diff --git a/Lib/test/test_importhooks.py b/Lib/test/test_importhooks.py
index ec6730e..7a25657 100644
--- a/Lib/test/test_importhooks.py
+++ b/Lib/test/test_importhooks.py
@@ -51,7 +51,7 @@ class TestImporter:
def __init__(self, path=test_path):
if path != test_path:
- # if out class is on sys.path_hooks, we must raise
+ # if our class is on sys.path_hooks, we must raise
# ImportError for any path item that we can't handle.
raise ImportError
self.path = path
@@ -229,7 +229,9 @@ class ImportHooksTestCase(ImportHooksBaseTestCase):
i = ImpWrapper()
sys.meta_path.append(i)
sys.path_hooks.append(ImpWrapper)
- mnames = ("colorsys", "urllib.parse", "distutils.core")
+ mnames = (
+ "colorsys", "urllib.parse", "distutils.core", "sys",
+ )
for mname in mnames:
parent = mname.split(".")[0]
for n in list(sys.modules):
@@ -237,7 +239,8 @@ class ImportHooksTestCase(ImportHooksBaseTestCase):
del sys.modules[n]
for mname in mnames:
m = __import__(mname, globals(), locals(), ["__dummy__"])
- m.__loader__ # to make sure we actually handled the import
+ # to make sure we actually handled the import
+ self.assertTrue(hasattr(m, "__loader__"))
def test_main():
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index 4b7ee4e..d840bbe 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -304,7 +304,7 @@ class TestRetrievingSourceCode(GetSourceBase):
getlines = linecache.getlines
def monkey(filename, module_globals=None):
if filename == fn:
- return source.splitlines(True)
+ return source.splitlines(keepends=True)
else:
return getlines(filename, module_globals)
linecache.getlines = monkey
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index ea82cea..96258b4 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -49,7 +49,7 @@ except ImportError:
def _default_chunk_size():
"""Get the default TextIOWrapper chunk size"""
- with open(__file__, "r", encoding="latin1") as f:
+ with open(__file__, "r", encoding="latin-1") as f:
return f._CHUNK_SIZE
@@ -634,6 +634,15 @@ class IOTest(unittest.TestCase):
for obj in test:
self.assertTrue(hasattr(obj, "__dict__"))
+ def test_opener(self):
+ with self.open(support.TESTFN, "w") as f:
+ f.write("egg\n")
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ def opener(path, flags):
+ return fd
+ with self.open("non-existent", "r", opener=opener) as f:
+ self.assertEqual(f.read(), "egg\n")
+
class CIOTest(IOTest):
def test_IOBase_finalize(self):
@@ -835,6 +844,12 @@ class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
self.assertEqual(b, b"gf")
self.assertEqual(bufio.readinto(b), 0)
self.assertEqual(b, b"gf")
+ rawio = self.MockRawIO((b"abc", None))
+ bufio = self.tp(rawio)
+ self.assertEqual(bufio.readinto(b), 2)
+ self.assertEqual(b, b"ab")
+ self.assertEqual(bufio.readinto(b), 1)
+ self.assertEqual(b, b"cb")
def test_readlines(self):
def bufio():
@@ -1802,11 +1817,11 @@ class TextIOWrapperTest(unittest.TestCase):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
- t.__init__(b, encoding="latin1", newline="\r\n")
- self.assertEqual(t.encoding, "latin1")
+ t.__init__(b, encoding="latin-1", newline="\r\n")
+ self.assertEqual(t.encoding, "latin-1")
self.assertEqual(t.line_buffering, False)
- t.__init__(b, encoding="utf8", line_buffering=True)
- self.assertEqual(t.encoding, "utf8")
+ t.__init__(b, encoding="utf-8", line_buffering=True)
+ self.assertEqual(t.encoding, "utf-8")
self.assertEqual(t.line_buffering, True)
self.assertEqual("\xe9\n", t.readline())
self.assertRaises(TypeError, t.__init__, b, newline=42)
@@ -1856,8 +1871,8 @@ class TextIOWrapperTest(unittest.TestCase):
def test_encoding(self):
# Check the encoding attribute is always set, and valid
b = self.BytesIO()
- t = self.TextIOWrapper(b, encoding="utf8")
- self.assertEqual(t.encoding, "utf8")
+ t = self.TextIOWrapper(b, encoding="utf-8")
+ self.assertEqual(t.encoding, "utf-8")
t = self.TextIOWrapper(b)
self.assertTrue(t.encoding is not None)
codecs.lookup(t.encoding)
@@ -1950,8 +1965,8 @@ class TextIOWrapperTest(unittest.TestCase):
testdata = b"AAA\nBB\x00B\nCCC\rDDD\rEEE\r\nFFF\r\nGGG"
normalized = testdata.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
for newline, expected in [
- (None, normalized.decode("ascii").splitlines(True)),
- ("", testdata.decode("ascii").splitlines(True)),
+ (None, normalized.decode("ascii").splitlines(keepends=True)),
+ ("", testdata.decode("ascii").splitlines(keepends=True)),
("\n", ["AAA\n", "BB\x00B\n", "CCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r\n", ["AAA\nBB\x00B\nCCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r", ["AAA\nBB\x00B\nCCC\r", "DDD\r", "EEE\r", "\nFFF\r", "\nGGG"]),
@@ -2036,7 +2051,7 @@ class TextIOWrapperTest(unittest.TestCase):
def test_basic_io(self):
for chunksize in (1, 2, 3, 4, 5, 15, 16, 17, 31, 32, 33, 63, 64, 65):
- for enc in "ascii", "latin1", "utf8" :# , "utf-16-be", "utf-16-le":
+ for enc in "ascii", "latin-1", "utf-8" :# , "utf-16-be", "utf-16-le":
f = self.open(support.TESTFN, "w+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.write("abc"), 3)
@@ -2086,7 +2101,7 @@ class TextIOWrapperTest(unittest.TestCase):
self.assertEqual(rlines, wlines)
def test_telling(self):
- f = self.open(support.TESTFN, "w+", encoding="utf8")
+ f = self.open(support.TESTFN, "w+", encoding="utf-8")
p0 = f.tell()
f.write("\xff\n")
p1 = f.tell()
@@ -2332,6 +2347,7 @@ class TextIOWrapperTest(unittest.TestCase):
with self.open(support.TESTFN, "w", errors="replace") as f:
self.assertEqual(f.errors, "replace")
+ @support.no_tracing
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_threads_write(self):
# Issue6750: concurrent writes could duplicate data
@@ -2649,12 +2665,6 @@ class MiscIOTest(unittest.TestCase):
def test_blockingioerror(self):
# Various BlockingIOError issues
- self.assertRaises(TypeError, self.BlockingIOError)
- self.assertRaises(TypeError, self.BlockingIOError, 1)
- self.assertRaises(TypeError, self.BlockingIOError, 1, 2, 3, 4)
- self.assertRaises(TypeError, self.BlockingIOError, 1, "", None)
- b = self.BlockingIOError(1, "")
- self.assertEqual(b.characters_written, 0)
class C(str):
pass
c = C("")
@@ -2796,6 +2806,7 @@ class MiscIOTest(unittest.TestCase):
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
+ self.assertEqual(e.args[2], e.characters_written)
sent[-1] = sent[-1][:e.characters_written]
received.append(rf.read())
msg = b'BLOCKED'
@@ -2808,6 +2819,7 @@ class MiscIOTest(unittest.TestCase):
break
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
+ self.assertEqual(e.args[2], e.characters_written)
self.assertEqual(e.characters_written, 0)
received.append(rf.read())
@@ -2818,6 +2830,19 @@ class MiscIOTest(unittest.TestCase):
self.assertTrue(wf.closed)
self.assertTrue(rf.closed)
+ def test_create_fail(self):
+ # 'x' mode fails if file is existing
+ with self.open(support.TESTFN, 'w'):
+ pass
+ self.assertRaises(FileExistsError, self.open, support.TESTFN, 'x')
+
+ def test_create_writes(self):
+ # 'x' mode opens for writing
+ with self.open(support.TESTFN, 'xb') as f:
+ f.write(b"spam")
+ with self.open(support.TESTFN, 'rb') as f:
+ self.assertEqual(b"spam", f.read())
+
class CMiscIOTest(MiscIOTest):
io = io
@@ -2838,14 +2863,14 @@ class SignalsTest(unittest.TestCase):
1/0
@unittest.skipUnless(threading, 'Threading required for this test.')
- @unittest.skipIf(sys.platform in ('freebsd5', 'freebsd6', 'freebsd7'),
- 'issue #12429: skip test on FreeBSD <= 7')
def check_interrupted_write(self, item, bytes, **fdopen_kwargs):
"""Check that a partial write, when it gets interrupted, properly
invokes the signal handler, and bubbles up the exception raised
in the latter."""
read_results = []
def _read():
+ if hasattr(signal, 'pthread_sigmask'):
+ signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGALRM])
s = os.read(r, 1)
read_results.append(s)
t = threading.Thread(target=_read)
@@ -2863,7 +2888,7 @@ class SignalsTest(unittest.TestCase):
# The buffered IO layer must check for pending signal
# handlers, which in this case will invoke alarm_interrupt().
self.assertRaises(ZeroDivisionError,
- wio.write, item * (1024 * 1024))
+ wio.write, item * (support.PIPE_MAX_SIZE // len(item)))
t.join()
# We got one byte, get another one and check that it isn't a
# repeat of the first one.
@@ -2890,6 +2915,7 @@ class SignalsTest(unittest.TestCase):
def test_interrupted_write_text(self):
self.check_interrupted_write("xy", b"xy", mode="w", encoding="ascii")
+ @support.no_tracing
def check_reentrant_write(self, data, **fdopen_kwargs):
def on_alarm(*args):
# Will be called reentrantly from the same thread
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
index 8cdc597..957991c 100644
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -69,11 +69,21 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(list(accumulate('abc')), ['a', 'ab', 'abc']) # works with non-numeric
self.assertEqual(list(accumulate([])), []) # empty iterable
self.assertEqual(list(accumulate([7])), [7]) # iterable of length one
- self.assertRaises(TypeError, accumulate, range(10), 5) # too many args
+ self.assertRaises(TypeError, accumulate, range(10), 5, 6) # too many args
self.assertRaises(TypeError, accumulate) # too few args
self.assertRaises(TypeError, accumulate, x=range(10)) # unexpected kwd arg
self.assertRaises(TypeError, list, accumulate([1, []])) # args that don't add
+ s = [2, 8, 9, 5, 7, 0, 3, 4, 1, 6]
+ self.assertEqual(list(accumulate(s, min)),
+ [2, 2, 2, 2, 2, 0, 0, 0, 0, 0])
+ self.assertEqual(list(accumulate(s, max)),
+ [2, 8, 9, 9, 9, 9, 9, 9, 9, 9])
+ self.assertEqual(list(accumulate(s, operator.mul)),
+ [2, 16, 144, 720, 5040, 0, 0, 0, 0, 0])
+ with self.assertRaises(TypeError):
+ list(accumulate(s, chr)) # unary-operation
+
def test_chain(self):
def chain2(*iterables):
@@ -158,7 +168,8 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version
self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version
- # Test implementation detail: tuple re-use
+ @support.impl_detail("tuple reuse is specific to CPython")
+ def test_combinations_tuple_reuse(self):
self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(combinations('abcde', 3))))), 1)
@@ -228,7 +239,9 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version
self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version
- # Test implementation detail: tuple re-use
+ @support.impl_detail("tuple reuse is specific to CPython")
+ def test_combinations_with_replacement_tuple_reuse(self):
+ cwr = combinations_with_replacement
self.assertEqual(len(set(map(id, cwr('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(cwr('abcde', 3))))), 1)
@@ -292,7 +305,8 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(permutations(values, None))) # test r as None
self.assertEqual(result, list(permutations(values))) # test default r
- # Test implementation detail: tuple re-use
+ @support.impl_detail("tuple resuse is CPython specific")
+ def test_permutations_tuple_reuse(self):
self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(permutations('abcde', 3))))), 1)
@@ -556,11 +570,13 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(list(zip()), lzip())
self.assertRaises(TypeError, zip, 3)
self.assertRaises(TypeError, zip, range(3), 3)
- # Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in zip('abc', 'def')],
lzip('abc', 'def'))
self.assertEqual([pair for pair in zip('abc', 'def')],
lzip('abc', 'def'))
+
+ @support.impl_detail("tuple reuse is specific to CPython")
+ def test_zip_tuple_reuse(self):
ids = list(map(id, zip('abc', 'def')))
self.assertEqual(min(ids), max(ids))
ids = list(map(id, list(zip('abc', 'def'))))
@@ -603,11 +619,13 @@ class TestBasicOps(unittest.TestCase):
else:
self.fail('Did not raise Type in: ' + stmt)
- # Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in zip_longest('abc', 'def')],
list(zip('abc', 'def')))
self.assertEqual([pair for pair in zip_longest('abc', 'def')],
list(zip('abc', 'def')))
+
+ @support.impl_detail("tuple reuse is specific to CPython")
+ def test_zip_longest_tuple_reuse(self):
ids = list(map(id, zip_longest('abc', 'def')))
self.assertEqual(min(ids), max(ids))
ids = list(map(id, list(zip_longest('abc', 'def'))))
@@ -711,7 +729,8 @@ class TestBasicOps(unittest.TestCase):
args = map(iter, args)
self.assertEqual(len(list(product(*args))), expected_len)
- # Test implementation detail: tuple re-use
+ @support.impl_detail("tuple reuse is specific to CPython")
+ def test_product_tuple_reuse(self):
self.assertEqual(len(set(map(id, product('abc', 'def')))), 1)
self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1)
diff --git a/Lib/test/test_keywordonlyarg.py b/Lib/test/test_keywordonlyarg.py
index 3aebd68..61d40f9 100644
--- a/Lib/test/test_keywordonlyarg.py
+++ b/Lib/test/test_keywordonlyarg.py
@@ -78,7 +78,7 @@ class KeywordOnlyArgTestCase(unittest.TestCase):
pass
with self.assertRaises(TypeError) as exc:
f(1, 2, 3)
- expected = "f() takes at most 2 positional arguments (3 given)"
+ expected = "f() takes from 1 to 2 positional arguments but 3 were given"
self.assertEqual(str(exc.exception), expected)
def testSyntaxErrorForFunctionCall(self):
diff --git a/Lib/test/test_lib2to3.py b/Lib/test/test_lib2to3.py
index 1afaf70..df4c37b 100644
--- a/Lib/test/test_lib2to3.py
+++ b/Lib/test/test_lib2to3.py
@@ -9,8 +9,8 @@ from test.support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
- for m in (test_fixers, test_pytree,test_util, test_refactor,
- test_parser, test_main_):
+ for m in (test_fixers, test_pytree, test_util, test_refactor, test_parser,
+ test_main_):
tests.addTests(loader.loadTestsFromModule(m))
return tests
diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py
index 8f7574b..7fdb6da 100644
--- a/Lib/test/test_locale.py
+++ b/Lib/test/test_locale.py
@@ -401,6 +401,8 @@ class TestMiscellaneous(unittest.TestCase):
# Unsupported locale on this system
self.skipTest('test needs Turkish locale')
loc = locale.getlocale(locale.LC_CTYPE)
+ if verbose:
+ print('got locale %a' % (loc,))
locale.setlocale(locale.LC_CTYPE, loc)
self.assertEqual(loc, locale.getlocale(locale.LC_CTYPE))
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index ab66596..b0f8f9f 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -36,20 +36,43 @@ import queue
import re
import select
import socket
-from socketserver import ThreadingTCPServer, StreamRequestHandler
import struct
import sys
import tempfile
-from test.support import captured_stdout, run_with_locale, run_unittest
-from test.support import TestHandler, Matcher
+from test.support import (captured_stdout, run_with_locale, run_unittest,
+ patch, requires_zlib, TestHandler, Matcher)
import textwrap
+import time
import unittest
import warnings
import weakref
try:
import threading
+ # The following imports are needed only for tests which
+ # require threading
+ import asynchat
+ import asyncore
+ import errno
+ from http.server import HTTPServer, BaseHTTPRequestHandler
+ import smtpd
+ from urllib.parse import urlparse, parse_qs
+ from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
+ ThreadingTCPServer, StreamRequestHandler)
except ImportError:
threading = None
+try:
+ import win32evtlog
+except ImportError:
+ win32evtlog = None
+try:
+ import win32evtlogutil
+except ImportError:
+ win32evtlogutil = None
+ win32evtlog = None
+try:
+ import zlib
+except ImportError:
+ pass
class BaseTest(unittest.TestCase):
@@ -77,9 +100,7 @@ class BaseTest(unittest.TestCase):
finally:
logging._releaseLock()
- # Set two unused loggers: one non-ASCII and one Unicode.
- # This is to test correct operation when sorting existing
- # loggers in the configuration code. See issue 8201.
+ # Set two unused loggers
self.logger1 = logging.getLogger("\xab\xd7\xbb")
self.logger2 = logging.getLogger("\u013f\u00d6\u0047")
@@ -140,8 +161,7 @@ class BaseTest(unittest.TestCase):
except AttributeError:
# StringIO.StringIO lacks a reset() method.
actual_lines = stream.getvalue().splitlines()
- self.assertEqual(len(actual_lines), len(expected_values),
- '%s vs. %s' % (actual_lines, expected_values))
+ self.assertEqual(len(actual_lines), len(expected_values))
for actual, expected in zip(actual_lines, expected_values):
match = pat.search(actual)
if not match:
@@ -179,17 +199,17 @@ class BuiltinLevelsTest(BaseTest):
INF.log(logging.CRITICAL, m())
INF.error(m())
- INF.warn(m())
+ INF.warning(m())
INF.info(m())
DEB.log(logging.CRITICAL, m())
DEB.error(m())
- DEB.warn (m())
- DEB.info (m())
+ DEB.warning(m())
+ DEB.info(m())
DEB.debug(m())
# These should not log.
- ERR.warn(m())
+ ERR.warning(m())
ERR.info(m())
ERR.debug(m())
@@ -223,7 +243,7 @@ class BuiltinLevelsTest(BaseTest):
INF_ERR.error(m())
# These should not log.
- INF_ERR.warn(m())
+ INF_ERR.warning(m())
INF_ERR.info(m())
INF_ERR.debug(m())
@@ -247,14 +267,14 @@ class BuiltinLevelsTest(BaseTest):
# These should log.
INF_UNDEF.log(logging.CRITICAL, m())
INF_UNDEF.error(m())
- INF_UNDEF.warn(m())
+ INF_UNDEF.warning(m())
INF_UNDEF.info(m())
INF_ERR_UNDEF.log(logging.CRITICAL, m())
INF_ERR_UNDEF.error(m())
# These should not log.
INF_UNDEF.debug(m())
- INF_ERR_UNDEF.warn(m())
+ INF_ERR_UNDEF.warning(m())
INF_ERR_UNDEF.info(m())
INF_ERR_UNDEF.debug(m())
@@ -293,8 +313,6 @@ class BuiltinLevelsTest(BaseTest):
('INF.BADPARENT', 'INFO', '4'),
])
- def test_invalid_name(self):
- self.assertRaises(TypeError, logging.getLogger, any)
class BasicFilterTest(BaseTest):
@@ -353,6 +371,10 @@ class BasicFilterTest(BaseTest):
finally:
handler.removeFilter(filterfunc)
+ def test_empty_filter(self):
+ f = logging.Filter()
+ r = logging.makeLogRecord({'name': 'spam.eggs'})
+ self.assertTrue(f.filter(r))
#
# First, we define our levels. There can be as many as you want - the only
@@ -496,6 +518,439 @@ class CustomLevelsAndFiltersTest(BaseTest):
handler.removeFilter(garr)
+class HandlerTest(BaseTest):
+ def test_name(self):
+ h = logging.Handler()
+ h.name = 'generic'
+ self.assertEqual(h.name, 'generic')
+ h.name = 'anothergeneric'
+ self.assertEqual(h.name, 'anothergeneric')
+ self.assertRaises(NotImplementedError, h.emit, None)
+
+ def test_builtin_handlers(self):
+ # We can't actually *use* too many handlers in the tests,
+ # but we can try instantiating them with various options
+ if sys.platform in ('linux', 'darwin'):
+ for existing in (True, False):
+ fd, fn = tempfile.mkstemp()
+ os.close(fd)
+ if not existing:
+ os.unlink(fn)
+ h = logging.handlers.WatchedFileHandler(fn, delay=True)
+ if existing:
+ dev, ino = h.dev, h.ino
+ self.assertNotEqual(dev, -1)
+ self.assertNotEqual(ino, -1)
+ r = logging.makeLogRecord({'msg': 'Test'})
+ h.handle(r)
+ # Now remove the file.
+ os.unlink(fn)
+ self.assertFalse(os.path.exists(fn))
+ # The next call should recreate the file.
+ h.handle(r)
+ self.assertTrue(os.path.exists(fn))
+ else:
+ self.assertEqual(h.dev, -1)
+ self.assertEqual(h.ino, -1)
+ h.close()
+ if existing:
+ os.unlink(fn)
+ if sys.platform == 'darwin':
+ sockname = '/var/run/syslog'
+ else:
+ sockname = '/dev/log'
+ try:
+ h = logging.handlers.SysLogHandler(sockname)
+ self.assertEqual(h.facility, h.LOG_USER)
+ self.assertTrue(h.unixsocket)
+ h.close()
+ except socket.error: # syslogd might not be available
+ pass
+ for method in ('GET', 'POST', 'PUT'):
+ if method == 'PUT':
+ self.assertRaises(ValueError, logging.handlers.HTTPHandler,
+ 'localhost', '/log', method)
+ else:
+ h = logging.handlers.HTTPHandler('localhost', '/log', method)
+ h.close()
+ h = logging.handlers.BufferingHandler(0)
+ r = logging.makeLogRecord({})
+ self.assertTrue(h.shouldFlush(r))
+ h.close()
+ h = logging.handlers.BufferingHandler(1)
+ self.assertFalse(h.shouldFlush(r))
+ h.close()
+
+class BadStream(object):
+ def write(self, data):
+ raise RuntimeError('deliberate mistake')
+
+class TestStreamHandler(logging.StreamHandler):
+ def handleError(self, record):
+ self.error_record = record
+
+class StreamHandlerTest(BaseTest):
+ def test_error_handling(self):
+ h = TestStreamHandler(BadStream())
+ r = logging.makeLogRecord({})
+ old_raise = logging.raiseExceptions
+ old_stderr = sys.stderr
+ try:
+ h.handle(r)
+ self.assertIs(h.error_record, r)
+ h = logging.StreamHandler(BadStream())
+ sys.stderr = sio = io.StringIO()
+ h.handle(r)
+ self.assertIn('\nRuntimeError: deliberate mistake\n',
+ sio.getvalue())
+ logging.raiseExceptions = False
+ sys.stderr = sio = io.StringIO()
+ h.handle(r)
+ self.assertEqual('', sio.getvalue())
+ finally:
+ logging.raiseExceptions = old_raise
+ sys.stderr = old_stderr
+
+# -- The following section could be moved into a server_helper.py module
+# -- if it proves to be of wider utility than just test_logging
+
+if threading:
+ class TestSMTPChannel(smtpd.SMTPChannel):
+ """
+ This derived class has had to be created because smtpd does not
+ support use of custom channel maps, although they are allowed by
+ asyncore's design. Issue #11959 has been raised to address this,
+ and if resolved satisfactorily, some of this code can be removed.
+ """
+ def __init__(self, server, conn, addr, sockmap):
+ asynchat.async_chat.__init__(self, conn, sockmap)
+ self.smtp_server = server
+ self.conn = conn
+ self.addr = addr
+ self.received_lines = []
+ self.smtp_state = self.COMMAND
+ self.seen_greeting = ''
+ self.mailfrom = None
+ self.rcpttos = []
+ self.received_data = ''
+ self.fqdn = socket.getfqdn()
+ self.num_bytes = 0
+ try:
+ self.peer = conn.getpeername()
+ except socket.error as err:
+ # a race condition may occur if the other end is closing
+ # before we can get the peername
+ self.close()
+ if err.args[0] != errno.ENOTCONN:
+ raise
+ return
+ self.push('220 %s %s' % (self.fqdn, smtpd.__version__))
+ self.set_terminator(b'\r\n')
+
+
+ class TestSMTPServer(smtpd.SMTPServer):
+ """
+ This class implements a test SMTP server.
+
+ :param addr: A (host, port) tuple which the server listens on.
+ You can specify a port value of zero: the server's
+ *port* attribute will hold the actual port number
+ used, which can be used in client connections.
+ :param handler: A callable which will be called to process
+ incoming messages. The handler will be passed
+ the client address tuple, who the message is from,
+ a list of recipients and the message data.
+ :param poll_interval: The interval, in seconds, used in the underlying
+ :func:`select` or :func:`poll` call by
+ :func:`asyncore.loop`.
+ :param sockmap: A dictionary which will be used to hold
+ :class:`asyncore.dispatcher` instances used by
+ :func:`asyncore.loop`. This avoids changing the
+ :mod:`asyncore` module's global state.
+ """
+ channel_class = TestSMTPChannel
+
+ def __init__(self, addr, handler, poll_interval, sockmap):
+ self._localaddr = addr
+ self._remoteaddr = None
+ self.sockmap = sockmap
+ asyncore.dispatcher.__init__(self, map=sockmap)
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.setblocking(0)
+ self.set_socket(sock, map=sockmap)
+ # try to re-use a server port if possible
+ self.set_reuse_addr()
+ self.bind(addr)
+ self.port = sock.getsockname()[1]
+ self.listen(5)
+ except:
+ self.close()
+ raise
+ self._handler = handler
+ self._thread = None
+ self.poll_interval = poll_interval
+
+ def handle_accepted(self, conn, addr):
+ """
+ Redefined only because the base class does not pass in a
+ map, forcing use of a global in :mod:`asyncore`.
+ """
+ channel = self.channel_class(self, conn, addr, self.sockmap)
+
+ def process_message(self, peer, mailfrom, rcpttos, data):
+ """
+ Delegates to the handler passed in to the server's constructor.
+
+ Typically, this will be a test case method.
+ :param peer: The client (host, port) tuple.
+ :param mailfrom: The address of the sender.
+ :param rcpttos: The addresses of the recipients.
+ :param data: The message.
+ """
+ self._handler(peer, mailfrom, rcpttos, data)
+
+ def start(self):
+ """
+ Start the server running on a separate daemon thread.
+ """
+ self._thread = t = threading.Thread(target=self.serve_forever,
+ args=(self.poll_interval,))
+ t.setDaemon(True)
+ t.start()
+
+ def serve_forever(self, poll_interval):
+ """
+ Run the :mod:`asyncore` loop until normal termination
+ conditions arise.
+ :param poll_interval: The interval, in seconds, used in the underlying
+ :func:`select` or :func:`poll` call by
+ :func:`asyncore.loop`.
+ """
+ try:
+ asyncore.loop(poll_interval, map=self.sockmap)
+ except select.error:
+ # On FreeBSD 8, closing the server repeatably
+ # raises this error. We swallow it if the
+ # server has been closed.
+ if self.connected or self.accepting:
+ raise
+
+ def stop(self, timeout=None):
+ """
+ Stop the thread by closing the server instance.
+ Wait for the server thread to terminate.
+
+ :param timeout: How long to wait for the server thread
+ to terminate.
+ """
+ self.close()
+ self._thread.join(timeout)
+ self._thread = None
+
+ class ControlMixin(object):
+ """
+ This mixin is used to start a server on a separate thread, and
+ shut it down programmatically. Request handling is simplified - instead
+ of needing to derive a suitable RequestHandler subclass, you just
+ provide a callable which will be passed each received request to be
+ processed.
+
+ :param handler: A handler callable which will be called with a
+ single parameter - the request - in order to
+ process the request. This handler is called on the
+ server thread, effectively meaning that requests are
+ processed serially. While not quite Web scale ;-),
+ this should be fine for testing applications.
+ :param poll_interval: The polling interval in seconds.
+ """
+ def __init__(self, handler, poll_interval):
+ self._thread = None
+ self.poll_interval = poll_interval
+ self._handler = handler
+ self.ready = threading.Event()
+
+ def start(self):
+ """
+ Create a daemon thread to run the server, and start it.
+ """
+ self._thread = t = threading.Thread(target=self.serve_forever,
+ args=(self.poll_interval,))
+ t.setDaemon(True)
+ t.start()
+
+ def serve_forever(self, poll_interval):
+ """
+ Run the server. Set the ready flag before entering the
+ service loop.
+ """
+ self.ready.set()
+ super(ControlMixin, self).serve_forever(poll_interval)
+
+ def stop(self, timeout=None):
+ """
+ Tell the server thread to stop, and wait for it to do so.
+
+ :param timeout: How long to wait for the server thread
+ to terminate.
+ """
+ self.shutdown()
+ if self._thread is not None:
+ self._thread.join(timeout)
+ self._thread = None
+ self.server_close()
+ self.ready.clear()
+
+ class TestHTTPServer(ControlMixin, HTTPServer):
+ """
+ An HTTP server which is controllable using :class:`ControlMixin`.
+
+ :param addr: A tuple with the IP address and port to listen on.
+ :param handler: A handler callable which will be called with a
+ single parameter - the request - in order to
+ process the request.
+ :param poll_interval: The polling interval in seconds.
+ :param log: Pass ``True`` to enable log messages.
+ """
+ def __init__(self, addr, handler, poll_interval=0.5,
+ log=False, sslctx=None):
+ class DelegatingHTTPRequestHandler(BaseHTTPRequestHandler):
+ def __getattr__(self, name, default=None):
+ if name.startswith('do_'):
+ return self.process_request
+ raise AttributeError(name)
+
+ def process_request(self):
+ self.server._handler(self)
+
+ def log_message(self, format, *args):
+ if log:
+ super(DelegatingHTTPRequestHandler,
+ self).log_message(format, *args)
+ HTTPServer.__init__(self, addr, DelegatingHTTPRequestHandler)
+ ControlMixin.__init__(self, handler, poll_interval)
+ self.sslctx = sslctx
+
+ def get_request(self):
+ try:
+ sock, addr = self.socket.accept()
+ if self.sslctx:
+ sock = self.sslctx.wrap_socket(sock, server_side=True)
+ except socket.error as e:
+ # socket errors are silenced by the caller, print them here
+ sys.stderr.write("Got an error:\n%s\n" % e)
+ raise
+ return sock, addr
+
+ class TestTCPServer(ControlMixin, ThreadingTCPServer):
+ """
+ A TCP server which is controllable using :class:`ControlMixin`.
+
+ :param addr: A tuple with the IP address and port to listen on.
+ :param handler: A handler callable which will be called with a single
+ parameter - the request - in order to process the request.
+ :param poll_interval: The polling interval in seconds.
+ :bind_and_activate: If True (the default), binds the server and starts it
+ listening. If False, you need to call
+ :meth:`server_bind` and :meth:`server_activate` at
+ some later time before calling :meth:`start`, so that
+ the server will set up the socket and listen on it.
+ """
+
+ allow_reuse_address = True
+
+ def __init__(self, addr, handler, poll_interval=0.5,
+ bind_and_activate=True):
+ class DelegatingTCPRequestHandler(StreamRequestHandler):
+
+ def handle(self):
+ self.server._handler(self)
+ ThreadingTCPServer.__init__(self, addr, DelegatingTCPRequestHandler,
+ bind_and_activate)
+ ControlMixin.__init__(self, handler, poll_interval)
+
+ def server_bind(self):
+ super(TestTCPServer, self).server_bind()
+ self.port = self.socket.getsockname()[1]
+
+ class TestUDPServer(ControlMixin, ThreadingUDPServer):
+ """
+ A UDP server which is controllable using :class:`ControlMixin`.
+
+ :param addr: A tuple with the IP address and port to listen on.
+ :param handler: A handler callable which will be called with a
+ single parameter - the request - in order to
+ process the request.
+ :param poll_interval: The polling interval for shutdown requests,
+ in seconds.
+ :bind_and_activate: If True (the default), binds the server and
+ starts it listening. If False, you need to
+ call :meth:`server_bind` and
+ :meth:`server_activate` at some later time
+ before calling :meth:`start`, so that the server will
+ set up the socket and listen on it.
+ """
+ def __init__(self, addr, handler, poll_interval=0.5,
+ bind_and_activate=True):
+ class DelegatingUDPRequestHandler(DatagramRequestHandler):
+
+ def handle(self):
+ self.server._handler(self)
+
+ def finish(self):
+ data = self.wfile.getvalue()
+ if data:
+ try:
+ super(DelegatingUDPRequestHandler, self).finish()
+ except socket.error:
+ if not self.server._closed:
+ raise
+
+ ThreadingUDPServer.__init__(self, addr,
+ DelegatingUDPRequestHandler,
+ bind_and_activate)
+ ControlMixin.__init__(self, handler, poll_interval)
+ self._closed = False
+
+ def server_bind(self):
+ super(TestUDPServer, self).server_bind()
+ self.port = self.socket.getsockname()[1]
+
+ def server_close(self):
+ super(TestUDPServer, self).server_close()
+ self._closed = True
+
+# - end of server_helper section
+
+@unittest.skipUnless(threading, 'Threading required for this test.')
+class SMTPHandlerTest(BaseTest):
+ def test_basic(self):
+ sockmap = {}
+ server = TestSMTPServer(('localhost', 0), self.process_message, 0.001,
+ sockmap)
+ server.start()
+ addr = ('localhost', server.port)
+ h = logging.handlers.SMTPHandler(addr, 'me', 'you', 'Log')
+ self.assertEqual(h.toaddrs, ['you'])
+ self.messages = []
+ r = logging.makeLogRecord({'msg': 'Hello'})
+ self.handled = threading.Event()
+ h.handle(r)
+ self.handled.wait(5.0) # 14314: don't wait forever
+ server.stop()
+ self.assertTrue(self.handled.is_set())
+ self.assertEqual(len(self.messages), 1)
+ peer, mailfrom, rcpttos, data = self.messages[0]
+ self.assertEqual(mailfrom, 'me')
+ self.assertEqual(rcpttos, ['you'])
+ self.assertIn('\nSubject: Log\n', data)
+ self.assertTrue(data.endswith('\n\nHello'))
+ h.close()
+
+ def process_message(self, *args):
+ self.messages.append(args)
+ self.handled.set()
+
class MemoryHandlerTest(BaseTest):
"""Tests for the MemoryHandler."""
@@ -523,7 +978,7 @@ class MemoryHandlerTest(BaseTest):
self.mem_logger.info(self.next_message())
self.assert_log_lines([])
# This will flush because the level is >= logging.WARNING
- self.mem_logger.warn(self.next_message())
+ self.mem_logger.warning(self.next_message())
lines = [
('DEBUG', '1'),
('INFO', '2'),
@@ -868,116 +1323,280 @@ class ConfigFileTest(BaseTest):
# Original logger output is empty.
self.assert_log_lines([])
-class LogRecordStreamHandler(StreamRequestHandler):
- """Handler for a streaming logging request. It saves the log message in the
- TCP server's 'log_output' attribute."""
+@unittest.skipUnless(threading, 'Threading required for this test.')
+class SocketHandlerTest(BaseTest):
+
+ """Test for SocketHandler objects."""
+
+ def setUp(self):
+ """Set up a TCP server to receive log messages, and a SocketHandler
+ pointing to that server's address and port."""
+ BaseTest.setUp(self)
+ addr = ('localhost', 0)
+ self.server = server = TestTCPServer(addr, self.handle_socket,
+ 0.01)
+ server.start()
+ server.ready.wait()
+ self.sock_hdlr = logging.handlers.SocketHandler('localhost',
+ server.port)
+ self.log_output = ''
+ self.root_logger.removeHandler(self.root_logger.handlers[0])
+ self.root_logger.addHandler(self.sock_hdlr)
+ self.handled = threading.Semaphore(0)
- TCP_LOG_END = "!!!END!!!"
+ def tearDown(self):
+ """Shutdown the TCP server."""
+ try:
+ self.server.stop(2.0)
+ self.root_logger.removeHandler(self.sock_hdlr)
+ self.sock_hdlr.close()
+ finally:
+ BaseTest.tearDown(self)
- def handle(self):
- """Handle multiple requests - each expected to be of 4-byte length,
- followed by the LogRecord in pickle format. Logs the record
- according to whatever policy is configured locally."""
+ def handle_socket(self, request):
+ conn = request.connection
while True:
- chunk = self.connection.recv(4)
+ chunk = conn.recv(4)
if len(chunk) < 4:
break
slen = struct.unpack(">L", chunk)[0]
- chunk = self.connection.recv(slen)
+ chunk = conn.recv(slen)
while len(chunk) < slen:
- chunk = chunk + self.connection.recv(slen - len(chunk))
- obj = self.unpickle(chunk)
+ chunk = chunk + conn.recv(slen - len(chunk))
+ obj = pickle.loads(chunk)
record = logging.makeLogRecord(obj)
- self.handle_log_record(record)
+ self.log_output += record.msg + '\n'
+ self.handled.release()
- def unpickle(self, data):
- return pickle.loads(data)
+ def test_output(self):
+ # The log message sent to the SocketHandler is properly received.
+ logger = logging.getLogger("tcp")
+ logger.error("spam")
+ self.handled.acquire()
+ logger.debug("eggs")
+ self.handled.acquire()
+ self.assertEqual(self.log_output, "spam\neggs\n")
- def handle_log_record(self, record):
- # If the end-of-messages sentinel is seen, tell the server to
- # terminate.
- if self.TCP_LOG_END in record.msg:
- self.server.abort = 1
- return
- self.server.log_output += record.msg + "\n"
+ def test_noserver(self):
+ # Kill the server
+ self.server.stop(2.0)
+ #The logging call should try to connect, which should fail
+ try:
+ raise RuntimeError('Deliberate mistake')
+ except RuntimeError:
+ self.root_logger.exception('Never sent')
+ self.root_logger.error('Never sent, either')
+ now = time.time()
+ self.assertTrue(self.sock_hdlr.retryTime > now)
+ time.sleep(self.sock_hdlr.retryTime - now + 0.001)
+ self.root_logger.error('Nor this')
-class LogRecordSocketReceiver(ThreadingTCPServer):
+@unittest.skipUnless(threading, 'Threading required for this test.')
+class DatagramHandlerTest(BaseTest):
- """A simple-minded TCP socket-based logging receiver suitable for test
- purposes."""
+ """Test for DatagramHandler."""
- allow_reuse_address = 1
- log_output = ""
+ def setUp(self):
+ """Set up a UDP server to receive log messages, and a DatagramHandler
+ pointing to that server's address and port."""
+ BaseTest.setUp(self)
+ addr = ('localhost', 0)
+ self.server = server = TestUDPServer(addr, self.handle_datagram, 0.01)
+ server.start()
+ server.ready.wait()
+ self.sock_hdlr = logging.handlers.DatagramHandler('localhost',
+ server.port)
+ self.log_output = ''
+ self.root_logger.removeHandler(self.root_logger.handlers[0])
+ self.root_logger.addHandler(self.sock_hdlr)
+ self.handled = threading.Event()
- def __init__(self, host='localhost',
- port=logging.handlers.DEFAULT_TCP_LOGGING_PORT,
- handler=LogRecordStreamHandler):
- ThreadingTCPServer.__init__(self, (host, port), handler)
- self.abort = False
- self.timeout = 0.1
- self.finished = threading.Event()
+ def tearDown(self):
+ """Shutdown the UDP server."""
+ try:
+ self.server.stop(2.0)
+ self.root_logger.removeHandler(self.sock_hdlr)
+ self.sock_hdlr.close()
+ finally:
+ BaseTest.tearDown(self)
+
+ def handle_datagram(self, request):
+ slen = struct.pack('>L', 0) # length of prefix
+ packet = request.packet[len(slen):]
+ obj = pickle.loads(packet)
+ record = logging.makeLogRecord(obj)
+ self.log_output += record.msg + '\n'
+ self.handled.set()
- def serve_until_stopped(self):
- while not self.abort:
- rd, wr, ex = select.select([self.socket.fileno()], [], [],
- self.timeout)
- if rd:
- self.handle_request()
- # Notify the main thread that we're about to exit
- self.finished.set()
- # close the listen socket
- self.server_close()
+ def test_output(self):
+ # The log message sent to the DatagramHandler is properly received.
+ logger = logging.getLogger("udp")
+ logger.error("spam")
+ self.handled.wait()
+ self.handled.clear()
+ logger.error("eggs")
+ self.handled.wait()
+ self.assertEqual(self.log_output, "spam\neggs\n")
@unittest.skipUnless(threading, 'Threading required for this test.')
-class SocketHandlerTest(BaseTest):
+class SysLogHandlerTest(BaseTest):
- """Test for SocketHandler objects."""
+ """Test for SysLogHandler using UDP."""
def setUp(self):
- """Set up a TCP server to receive log messages, and a SocketHandler
+ """Set up a UDP server to receive log messages, and a SysLogHandler
pointing to that server's address and port."""
BaseTest.setUp(self)
- self.tcpserver = LogRecordSocketReceiver(port=0)
- self.port = self.tcpserver.socket.getsockname()[1]
- self.threads = [
- threading.Thread(target=self.tcpserver.serve_until_stopped)]
- for thread in self.threads:
- thread.start()
-
- self.sock_hdlr = logging.handlers.SocketHandler('localhost', self.port)
- self.sock_hdlr.setFormatter(self.root_formatter)
+ addr = ('localhost', 0)
+ self.server = server = TestUDPServer(addr, self.handle_datagram,
+ 0.01)
+ server.start()
+ server.ready.wait()
+ self.sl_hdlr = logging.handlers.SysLogHandler(('localhost',
+ server.port))
+ self.log_output = ''
self.root_logger.removeHandler(self.root_logger.handlers[0])
- self.root_logger.addHandler(self.sock_hdlr)
+ self.root_logger.addHandler(self.sl_hdlr)
+ self.handled = threading.Event()
def tearDown(self):
- """Shutdown the TCP server."""
+ """Shutdown the UDP server."""
try:
- self.tcpserver.abort = True
- del self.tcpserver
- self.root_logger.removeHandler(self.sock_hdlr)
- self.sock_hdlr.close()
- for thread in self.threads:
- thread.join(2.0)
+ self.server.stop(2.0)
+ self.root_logger.removeHandler(self.sl_hdlr)
+ self.sl_hdlr.close()
finally:
BaseTest.tearDown(self)
- def get_output(self):
- """Get the log output as received by the TCP server."""
- # Signal the TCP receiver and wait for it to terminate.
- self.root_logger.critical(LogRecordStreamHandler.TCP_LOG_END)
- self.tcpserver.finished.wait(2.0)
- return self.tcpserver.log_output
+ def handle_datagram(self, request):
+ self.log_output = request.packet
+ self.handled.set()
def test_output(self):
- # The log message sent to the SocketHandler is properly received.
- logger = logging.getLogger("tcp")
- logger.error("spam")
- logger.debug("eggs")
- self.assertEqual(self.get_output(), "spam\neggs\n")
+ # The log message sent to the SysLogHandler is properly received.
+ logger = logging.getLogger("slh")
+ logger.error("sp\xe4m")
+ self.handled.wait()
+ self.assertEqual(self.log_output, b'<11>\xef\xbb\xbfsp\xc3\xa4m\x00')
+ self.handled.clear()
+ self.sl_hdlr.append_nul = False
+ logger.error("sp\xe4m")
+ self.handled.wait()
+ self.assertEqual(self.log_output, b'<11>\xef\xbb\xbfsp\xc3\xa4m')
+ self.handled.clear()
+ self.sl_hdlr.ident = "h\xe4m-"
+ logger.error("sp\xe4m")
+ self.handled.wait()
+ self.assertEqual(self.log_output, b'<11>\xef\xbb\xbfh\xc3\xa4m-sp\xc3\xa4m')
+
+
+@unittest.skipUnless(threading, 'Threading required for this test.')
+class HTTPHandlerTest(BaseTest):
+ """Test for HTTPHandler."""
+
+ PEMFILE = """-----BEGIN RSA PRIVATE KEY-----
+MIICXQIBAAKBgQDGT4xS5r91rbLJQK2nUDenBhBG6qFk+bVOjuAGC/LSHlAoBnvG
+zQG3agOG+e7c5z2XT8m2ktORLqG3E4mYmbxgyhDrzP6ei2Anc+pszmnxPoK3Puh5
+aXV+XKt0bU0C1m2+ACmGGJ0t3P408art82nOxBw8ZHgIg9Dtp6xIUCyOqwIDAQAB
+AoGBAJFTnFboaKh5eUrIzjmNrKsG44jEyy+vWvHN/FgSC4l103HxhmWiuL5Lv3f7
+0tMp1tX7D6xvHwIG9VWvyKb/Cq9rJsDibmDVIOslnOWeQhG+XwJyitR0pq/KlJIB
+5LjORcBw795oKWOAi6RcOb1ON59tysEFYhAGQO9k6VL621gRAkEA/Gb+YXULLpbs
+piXN3q4zcHzeaVANo69tUZ6TjaQqMeTxE4tOYM0G0ZoSeHEdaP59AOZGKXXNGSQy
+2z/MddcYGQJBAMkjLSYIpOLJY11ja8OwwswFG2hEzHe0cS9bzo++R/jc1bHA5R0Y
+i6vA5iPi+wopPFvpytdBol7UuEBe5xZrxWMCQQCWxELRHiP2yWpEeLJ3gGDzoXMN
+PydWjhRju7Bx3AzkTtf+D6lawz1+eGTuEss5i0JKBkMEwvwnN2s1ce+EuF4JAkBb
+E96h1lAzkVW5OAfYOPY8RCPA90ZO/hoyg7PpSxR0ECuDrgERR8gXIeYUYfejBkEa
+rab4CfRoVJKKM28Yq/xZAkBvuq670JRCwOgfUTdww7WpdOQBYPkzQccsKNCslQW8
+/DyW6y06oQusSENUvynT6dr3LJxt/NgZPhZX2+k1eYDV
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIICGzCCAYSgAwIBAgIJAIq84a2Q/OvlMA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV
+BAMTCWxvY2FsaG9zdDAeFw0xMTA1MjExMDIzMzNaFw03NTAzMjEwMzU1MTdaMBQx
+EjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
+xk+MUua/da2yyUCtp1A3pwYQRuqhZPm1To7gBgvy0h5QKAZ7xs0Bt2oDhvnu3Oc9
+l0/JtpLTkS6htxOJmJm8YMoQ68z+notgJ3PqbM5p8T6Ctz7oeWl1flyrdG1NAtZt
+vgAphhidLdz+NPGq7fNpzsQcPGR4CIPQ7aesSFAsjqsCAwEAAaN1MHMwHQYDVR0O
+BBYEFLWaUPO6N7efGiuoS9i3DVYcUwn0MEQGA1UdIwQ9MDuAFLWaUPO6N7efGiuo
+S9i3DVYcUwn0oRikFjAUMRIwEAYDVQQDEwlsb2NhbGhvc3SCCQCKvOGtkPzr5TAM
+BgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAMK5whPjLNQK1Ivvk88oqJqq
+4f889OwikGP0eUhOBhbFlsZs+jq5YZC2UzHz+evzKBlgAP1u4lP/cB85CnjvWqM+
+1c/lywFHQ6HOdDeQ1L72tSYMrNOG4XNmLn0h7rx6GoTU7dcFRfseahBCq8mv0IDt
+IRbTpvlHWPjsSvHz0ZOH
+-----END CERTIFICATE-----"""
+ def setUp(self):
+ """Set up an HTTP server to receive log messages, and a HTTPHandler
+ pointing to that server's address and port."""
+ BaseTest.setUp(self)
+ self.handled = threading.Event()
+
+ def handle_request(self, request):
+ self.command = request.command
+ self.log_data = urlparse(request.path)
+ if self.command == 'POST':
+ try:
+ rlen = int(request.headers['Content-Length'])
+ self.post_data = request.rfile.read(rlen)
+ except:
+ self.post_data = None
+ request.send_response(200)
+ request.end_headers()
+ self.handled.set()
+
+ def test_output(self):
+ # The log message sent to the HTTPHandler is properly received.
+ logger = logging.getLogger("http")
+ root_logger = self.root_logger
+ root_logger.removeHandler(self.root_logger.handlers[0])
+ for secure in (False, True):
+ addr = ('localhost', 0)
+ if secure:
+ try:
+ import ssl
+ fd, fn = tempfile.mkstemp()
+ os.close(fd)
+ with open(fn, 'w') as f:
+ f.write(self.PEMFILE)
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ sslctx.load_cert_chain(fn)
+ os.unlink(fn)
+ except ImportError:
+ sslctx = None
+ else:
+ sslctx = None
+ self.server = server = TestHTTPServer(addr, self.handle_request,
+ 0.01, sslctx=sslctx)
+ server.start()
+ server.ready.wait()
+ host = 'localhost:%d' % server.server_port
+ secure_client = secure and sslctx
+ self.h_hdlr = logging.handlers.HTTPHandler(host, '/frob',
+ secure=secure_client)
+ self.log_data = None
+ root_logger.addHandler(self.h_hdlr)
+
+ for method in ('GET', 'POST'):
+ self.h_hdlr.method = method
+ self.handled.clear()
+ msg = "sp\xe4m"
+ logger.error(msg)
+ self.handled.wait()
+ self.assertEqual(self.log_data.path, '/frob')
+ self.assertEqual(self.command, method)
+ if method == 'GET':
+ d = parse_qs(self.log_data.query)
+ else:
+ d = parse_qs(self.post_data.decode('utf-8'))
+ self.assertEqual(d['name'], ['http'])
+ self.assertEqual(d['funcName'], ['test_output'])
+ self.assertEqual(d['msg'], [msg])
+
+ self.server.stop(2.0)
+ self.root_logger.removeHandler(self.h_hdlr)
+ self.h_hdlr.close()
class MemoryTest(BaseTest):
@@ -1085,28 +1704,39 @@ class WarningsTest(BaseTest):
def test_warnings(self):
with warnings.catch_warnings():
logging.captureWarnings(True)
- try:
- warnings.filterwarnings("always", category=UserWarning)
- file = io.StringIO()
- h = logging.StreamHandler(file)
- logger = logging.getLogger("py.warnings")
- logger.addHandler(h)
- warnings.warn("I'm warning you...")
- logger.removeHandler(h)
- s = file.getvalue()
- h.close()
- self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0)
-
- #See if an explicit file uses the original implementation
- file = io.StringIO()
- warnings.showwarning("Explicit", UserWarning, "dummy.py", 42,
- file, "Dummy line")
- s = file.getvalue()
- file.close()
- self.assertEqual(s,
- "dummy.py:42: UserWarning: Explicit\n Dummy line\n")
- finally:
- logging.captureWarnings(False)
+ self.addCleanup(logging.captureWarnings, False)
+ warnings.filterwarnings("always", category=UserWarning)
+ stream = io.StringIO()
+ h = logging.StreamHandler(stream)
+ logger = logging.getLogger("py.warnings")
+ logger.addHandler(h)
+ warnings.warn("I'm warning you...")
+ logger.removeHandler(h)
+ s = stream.getvalue()
+ h.close()
+ self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0)
+
+ #See if an explicit file uses the original implementation
+ a_file = io.StringIO()
+ warnings.showwarning("Explicit", UserWarning, "dummy.py", 42,
+ a_file, "Dummy line")
+ s = a_file.getvalue()
+ a_file.close()
+ self.assertEqual(s,
+ "dummy.py:42: UserWarning: Explicit\n Dummy line\n")
+
+ def test_warnings_no_handlers(self):
+ with warnings.catch_warnings():
+ logging.captureWarnings(True)
+ self.addCleanup(logging.captureWarnings, False)
+
+ # confirm our assumption: no loggers are set
+ logger = logging.getLogger("py.warnings")
+ self.assertEqual(logger.handlers, [])
+
+ warnings.showwarning("Explicit", UserWarning, "dummy.py", 42)
+ self.assertEqual(len(logger.handlers), 1)
+ self.assertIsInstance(logger.handlers[0], logging.NullHandler)
def formatFunc(format, datefmt=None):
@@ -1959,6 +2589,7 @@ class ConfigDictTest(BaseTest):
logging.config.stopListening()
t.join(2.0)
+ @unittest.skipUnless(threading, 'Threading required for this test.')
def test_listen_config_10_ok(self):
with captured_stdout() as output:
self.setup_via_listener(json.dumps(self.config10))
@@ -1978,6 +2609,7 @@ class ConfigDictTest(BaseTest):
('ERROR', '4'),
], stream=output)
+ @unittest.skipUnless(threading, 'Threading required for this test.')
def test_listen_config_1_ok(self):
with captured_stdout() as output:
self.setup_via_listener(textwrap.dedent(ConfigFileTest.config1))
@@ -1992,6 +2624,27 @@ class ConfigDictTest(BaseTest):
# Original logger output is empty.
self.assert_log_lines([])
+ def test_baseconfig(self):
+ d = {
+ 'atuple': (1, 2, 3),
+ 'alist': ['a', 'b', 'c'],
+ 'adict': {'d': 'e', 'f': 3 },
+ 'nest1': ('g', ('h', 'i'), 'j'),
+ 'nest2': ['k', ['l', 'm'], 'n'],
+ 'nest3': ['o', 'cfg://alist', 'p'],
+ }
+ bc = logging.config.BaseConfigurator(d)
+ self.assertEqual(bc.convert('cfg://atuple[1]'), 2)
+ self.assertEqual(bc.convert('cfg://alist[1]'), 'b')
+ self.assertEqual(bc.convert('cfg://nest1[1][0]'), 'h')
+ self.assertEqual(bc.convert('cfg://nest2[1][1]'), 'm')
+ self.assertEqual(bc.convert('cfg://adict.d'), 'e')
+ self.assertEqual(bc.convert('cfg://adict[f]'), 3)
+ v = bc.convert('cfg://nest3')
+ self.assertEqual(v.pop(1), ['a', 'b', 'c'])
+ self.assertRaises(KeyError, bc.convert, 'cfg://nosuch')
+ self.assertRaises(ValueError, bc.convert, 'cfg://!')
+ self.assertRaises(KeyError, bc.convert, 'cfg://adict[2]')
class ManagerTest(BaseTest):
def test_manager_loggerclass(self):
@@ -2010,6 +2663,11 @@ class ManagerTest(BaseTest):
self.assertEqual(logged, ['should appear in logged'])
+ def test_set_log_record_factory(self):
+ man = logging.Manager(None)
+ expected = object()
+ man.setLogRecordFactory(expected)
+ self.assertEqual(man.logRecordFactory, expected)
class ChildLoggerTest(BaseTest):
def test_child_loggers(self):
@@ -2111,6 +2769,18 @@ class QueueHandlerTest(BaseTest):
self.assertTrue(handler.matches(levelno=logging.ERROR, message='2'))
self.assertTrue(handler.matches(levelno=logging.CRITICAL, message='3'))
+ZERO = datetime.timedelta(0)
+
+class UTC(datetime.tzinfo):
+ def utcoffset(self, dt):
+ return ZERO
+
+ dst = utcoffset
+
+ def tzname(self, dt):
+ return 'UTC'
+
+utc = UTC()
class FormatterTest(unittest.TestCase):
def setUp(self):
@@ -2184,6 +2854,69 @@ class FormatterTest(unittest.TestCase):
f = logging.Formatter('asctime', style='$')
self.assertFalse(f.usesTime())
+ def test_invalid_style(self):
+ self.assertRaises(ValueError, logging.Formatter, None, None, 'x')
+
+ def test_time(self):
+ r = self.get_record()
+ dt = datetime.datetime(1993,4,21,8,3,0,0,utc)
+ r.created = time.mktime(dt.timetuple()) - time.timezone
+ r.msecs = 123
+ f = logging.Formatter('%(asctime)s %(message)s')
+ f.converter = time.gmtime
+ self.assertEqual(f.formatTime(r), '1993-04-21 08:03:00,123')
+ self.assertEqual(f.formatTime(r, '%Y:%d'), '1993:21')
+ f.format(r)
+ self.assertEqual(r.asctime, '1993-04-21 08:03:00,123')
+
+class TestBufferingFormatter(logging.BufferingFormatter):
+ def formatHeader(self, records):
+ return '[(%d)' % len(records)
+
+ def formatFooter(self, records):
+ return '(%d)]' % len(records)
+
+class BufferingFormatterTest(unittest.TestCase):
+ def setUp(self):
+ self.records = [
+ logging.makeLogRecord({'msg': 'one'}),
+ logging.makeLogRecord({'msg': 'two'}),
+ ]
+
+ def test_default(self):
+ f = logging.BufferingFormatter()
+ self.assertEqual('', f.format([]))
+ self.assertEqual('onetwo', f.format(self.records))
+
+ def test_custom(self):
+ f = TestBufferingFormatter()
+ self.assertEqual('[(2)onetwo(2)]', f.format(self.records))
+ lf = logging.Formatter('<%(message)s>')
+ f = TestBufferingFormatter(lf)
+ self.assertEqual('[(2)<one><two>(2)]', f.format(self.records))
+
+class ExceptionTest(BaseTest):
+ def test_formatting(self):
+ r = self.root_logger
+ h = RecordingHandler()
+ r.addHandler(h)
+ try:
+ raise RuntimeError('deliberate mistake')
+ except:
+ logging.exception('failed', stack_info=True)
+ r.removeHandler(h)
+ h.close()
+ r = h.records[0]
+ self.assertTrue(r.exc_text.startswith('Traceback (most recent '
+ 'call last):\n'))
+ self.assertTrue(r.exc_text.endswith('\nRuntimeError: '
+ 'deliberate mistake'))
+ self.assertTrue(r.stack_info.startswith('Stack (most recent '
+ 'call last):\n'))
+ self.assertTrue(r.stack_info.endswith('logging.exception(\'failed\', '
+ 'stack_info=True)'))
+
+
class LastResortTest(BaseTest):
def test_last_resort(self):
# Test the last resort handler
@@ -2194,6 +2927,8 @@ class LastResortTest(BaseTest):
old_raise_exceptions = logging.raiseExceptions
try:
sys.stderr = sio = io.StringIO()
+ root.debug('This should not appear')
+ self.assertEqual(sio.getvalue(), '')
root.warning('This is your final chance!')
self.assertEqual(sio.getvalue(), 'This is your final chance!\n')
#No handlers and no last resort, so 'No handlers' message
@@ -2218,6 +2953,586 @@ class LastResortTest(BaseTest):
logging.raiseExceptions = old_raise_exceptions
+class FakeHandler:
+
+ def __init__(self, identifier, called):
+ for method in ('acquire', 'flush', 'close', 'release'):
+ setattr(self, method, self.record_call(identifier, method, called))
+
+ def record_call(self, identifier, method_name, called):
+ def inner():
+ called.append('{} - {}'.format(identifier, method_name))
+ return inner
+
+
+class RecordingHandler(logging.NullHandler):
+
+ def __init__(self, *args, **kwargs):
+ super(RecordingHandler, self).__init__(*args, **kwargs)
+ self.records = []
+
+ def handle(self, record):
+ """Keep track of all the emitted records."""
+ self.records.append(record)
+
+
+class ShutdownTest(BaseTest):
+
+ """Test suite for the shutdown method."""
+
+ def setUp(self):
+ super(ShutdownTest, self).setUp()
+ self.called = []
+
+ raise_exceptions = logging.raiseExceptions
+ self.addCleanup(setattr, logging, 'raiseExceptions', raise_exceptions)
+
+ def raise_error(self, error):
+ def inner():
+ raise error()
+ return inner
+
+ def test_no_failure(self):
+ # create some fake handlers
+ handler0 = FakeHandler(0, self.called)
+ handler1 = FakeHandler(1, self.called)
+ handler2 = FakeHandler(2, self.called)
+
+ # create live weakref to those handlers
+ handlers = map(logging.weakref.ref, [handler0, handler1, handler2])
+
+ logging.shutdown(handlerList=list(handlers))
+
+ expected = ['2 - acquire', '2 - flush', '2 - close', '2 - release',
+ '1 - acquire', '1 - flush', '1 - close', '1 - release',
+ '0 - acquire', '0 - flush', '0 - close', '0 - release']
+ self.assertEqual(expected, self.called)
+
+ def _test_with_failure_in_method(self, method, error):
+ handler = FakeHandler(0, self.called)
+ setattr(handler, method, self.raise_error(error))
+ handlers = [logging.weakref.ref(handler)]
+
+ logging.shutdown(handlerList=list(handlers))
+
+ self.assertEqual('0 - release', self.called[-1])
+
+ def test_with_ioerror_in_acquire(self):
+ self._test_with_failure_in_method('acquire', IOError)
+
+ def test_with_ioerror_in_flush(self):
+ self._test_with_failure_in_method('flush', IOError)
+
+ def test_with_ioerror_in_close(self):
+ self._test_with_failure_in_method('close', IOError)
+
+ def test_with_valueerror_in_acquire(self):
+ self._test_with_failure_in_method('acquire', ValueError)
+
+ def test_with_valueerror_in_flush(self):
+ self._test_with_failure_in_method('flush', ValueError)
+
+ def test_with_valueerror_in_close(self):
+ self._test_with_failure_in_method('close', ValueError)
+
+ def test_with_other_error_in_acquire_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('acquire', IndexError)
+
+ def test_with_other_error_in_flush_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('flush', IndexError)
+
+ def test_with_other_error_in_close_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('close', IndexError)
+
+ def test_with_other_error_in_acquire_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'acquire', IndexError)
+
+ def test_with_other_error_in_flush_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'flush', IndexError)
+
+ def test_with_other_error_in_close_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'close', IndexError)
+
+
+class ModuleLevelMiscTest(BaseTest):
+
+ """Test suite for some module level methods."""
+
+ def test_disable(self):
+ old_disable = logging.root.manager.disable
+ # confirm our assumptions are correct
+ self.assertEqual(old_disable, 0)
+ self.addCleanup(logging.disable, old_disable)
+
+ logging.disable(83)
+ self.assertEqual(logging.root.manager.disable, 83)
+
+ def _test_log(self, method, level=None):
+ called = []
+ patch(self, logging, 'basicConfig',
+ lambda *a, **kw: called.append((a, kw)))
+
+ recording = RecordingHandler()
+ logging.root.addHandler(recording)
+
+ log_method = getattr(logging, method)
+ if level is not None:
+ log_method(level, "test me: %r", recording)
+ else:
+ log_method("test me: %r", recording)
+
+ self.assertEqual(len(recording.records), 1)
+ record = recording.records[0]
+ self.assertEqual(record.getMessage(), "test me: %r" % recording)
+
+ expected_level = level if level is not None else getattr(logging, method.upper())
+ self.assertEqual(record.levelno, expected_level)
+
+ # basicConfig was not called!
+ self.assertEqual(called, [])
+
+ def test_log(self):
+ self._test_log('log', logging.ERROR)
+
+ def test_debug(self):
+ self._test_log('debug')
+
+ def test_info(self):
+ self._test_log('info')
+
+ def test_warning(self):
+ self._test_log('warning')
+
+ def test_error(self):
+ self._test_log('error')
+
+ def test_critical(self):
+ self._test_log('critical')
+
+ def test_set_logger_class(self):
+ self.assertRaises(TypeError, logging.setLoggerClass, object)
+
+ class MyLogger(logging.Logger):
+ pass
+
+ logging.setLoggerClass(MyLogger)
+ self.assertEqual(logging.getLoggerClass(), MyLogger)
+
+ logging.setLoggerClass(logging.Logger)
+ self.assertEqual(logging.getLoggerClass(), logging.Logger)
+
+class LogRecordTest(BaseTest):
+ def test_str_rep(self):
+ r = logging.makeLogRecord({})
+ s = str(r)
+ self.assertTrue(s.startswith('<LogRecord: '))
+ self.assertTrue(s.endswith('>'))
+
+ def test_dict_arg(self):
+ h = RecordingHandler()
+ r = logging.getLogger()
+ r.addHandler(h)
+ d = {'less' : 'more' }
+ logging.warning('less is %(less)s', d)
+ self.assertIs(h.records[0].args, d)
+ self.assertEqual(h.records[0].message, 'less is more')
+ r.removeHandler(h)
+ h.close()
+
+ def test_multiprocessing(self):
+ r = logging.makeLogRecord({})
+ self.assertEqual(r.processName, 'MainProcess')
+ try:
+ import multiprocessing as mp
+ r = logging.makeLogRecord({})
+ self.assertEqual(r.processName, mp.current_process().name)
+ except ImportError:
+ pass
+
+ def test_optional(self):
+ r = logging.makeLogRecord({})
+ NOT_NONE = self.assertIsNotNone
+ if threading:
+ NOT_NONE(r.thread)
+ NOT_NONE(r.threadName)
+ NOT_NONE(r.process)
+ NOT_NONE(r.processName)
+ log_threads = logging.logThreads
+ log_processes = logging.logProcesses
+ log_multiprocessing = logging.logMultiprocessing
+ try:
+ logging.logThreads = False
+ logging.logProcesses = False
+ logging.logMultiprocessing = False
+ r = logging.makeLogRecord({})
+ NONE = self.assertIsNone
+ NONE(r.thread)
+ NONE(r.threadName)
+ NONE(r.process)
+ NONE(r.processName)
+ finally:
+ logging.logThreads = log_threads
+ logging.logProcesses = log_processes
+ logging.logMultiprocessing = log_multiprocessing
+
+class BasicConfigTest(unittest.TestCase):
+
+ """Test suite for logging.basicConfig."""
+
+ def setUp(self):
+ super(BasicConfigTest, self).setUp()
+ self.handlers = logging.root.handlers
+ self.saved_handlers = logging._handlers.copy()
+ self.saved_handler_list = logging._handlerList[:]
+ self.original_logging_level = logging.root.level
+ self.addCleanup(self.cleanup)
+ logging.root.handlers = []
+
+ def tearDown(self):
+ for h in logging.root.handlers[:]:
+ logging.root.removeHandler(h)
+ h.close()
+ super(BasicConfigTest, self).tearDown()
+
+ def cleanup(self):
+ setattr(logging.root, 'handlers', self.handlers)
+ logging._handlers.clear()
+ logging._handlers.update(self.saved_handlers)
+ logging._handlerList[:] = self.saved_handler_list
+ logging.root.level = self.original_logging_level
+
+ def test_no_kwargs(self):
+ logging.basicConfig()
+
+ # handler defaults to a StreamHandler to sys.stderr
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.StreamHandler)
+ self.assertEqual(handler.stream, sys.stderr)
+
+ formatter = handler.formatter
+ # format defaults to logging.BASIC_FORMAT
+ self.assertEqual(formatter._style._fmt, logging.BASIC_FORMAT)
+ # datefmt defaults to None
+ self.assertIsNone(formatter.datefmt)
+ # style defaults to %
+ self.assertIsInstance(formatter._style, logging.PercentStyle)
+
+ # level is not explicitly set
+ self.assertEqual(logging.root.level, self.original_logging_level)
+
+ def test_filename(self):
+ logging.basicConfig(filename='test.log')
+
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.FileHandler)
+
+ expected = logging.FileHandler('test.log', 'a')
+ self.addCleanup(expected.close)
+ self.assertEqual(handler.stream.mode, expected.stream.mode)
+ self.assertEqual(handler.stream.name, expected.stream.name)
+
+ def test_filemode(self):
+ logging.basicConfig(filename='test.log', filemode='wb')
+
+ handler = logging.root.handlers[0]
+ expected = logging.FileHandler('test.log', 'wb')
+ self.addCleanup(expected.close)
+ self.assertEqual(handler.stream.mode, expected.stream.mode)
+
+ def test_stream(self):
+ stream = io.StringIO()
+ self.addCleanup(stream.close)
+ logging.basicConfig(stream=stream)
+
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.StreamHandler)
+ self.assertEqual(handler.stream, stream)
+
+ def test_format(self):
+ logging.basicConfig(format='foo')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertEqual(formatter._style._fmt, 'foo')
+
+ def test_datefmt(self):
+ logging.basicConfig(datefmt='bar')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertEqual(formatter.datefmt, 'bar')
+
+ def test_style(self):
+ logging.basicConfig(style='$')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertIsInstance(formatter._style, logging.StringTemplateStyle)
+
+ def test_level(self):
+ old_level = logging.root.level
+ self.addCleanup(logging.root.setLevel, old_level)
+
+ logging.basicConfig(level=57)
+ self.assertEqual(logging.root.level, 57)
+ # Test that second call has no effect
+ logging.basicConfig(level=58)
+ self.assertEqual(logging.root.level, 57)
+
+ def test_incompatible(self):
+ assertRaises = self.assertRaises
+ handlers = [logging.StreamHandler()]
+ stream = sys.stderr
+ assertRaises(ValueError, logging.basicConfig, filename='test.log',
+ stream=stream)
+ assertRaises(ValueError, logging.basicConfig, filename='test.log',
+ handlers=handlers)
+ assertRaises(ValueError, logging.basicConfig, stream=stream,
+ handlers=handlers)
+
+ def test_handlers(self):
+ handlers = [
+ logging.StreamHandler(),
+ logging.StreamHandler(sys.stdout),
+ logging.StreamHandler(),
+ ]
+ f = logging.Formatter()
+ handlers[2].setFormatter(f)
+ logging.basicConfig(handlers=handlers)
+ self.assertIs(handlers[0], logging.root.handlers[0])
+ self.assertIs(handlers[1], logging.root.handlers[1])
+ self.assertIs(handlers[2], logging.root.handlers[2])
+ self.assertIsNotNone(handlers[0].formatter)
+ self.assertIsNotNone(handlers[1].formatter)
+ self.assertIs(handlers[2].formatter, f)
+ self.assertIs(handlers[0].formatter, handlers[1].formatter)
+
+ def _test_log(self, method, level=None):
+ # logging.root has no handlers so basicConfig should be called
+ called = []
+
+ old_basic_config = logging.basicConfig
+ def my_basic_config(*a, **kw):
+ old_basic_config()
+ old_level = logging.root.level
+ logging.root.setLevel(100) # avoid having messages in stderr
+ self.addCleanup(logging.root.setLevel, old_level)
+ called.append((a, kw))
+
+ patch(self, logging, 'basicConfig', my_basic_config)
+
+ log_method = getattr(logging, method)
+ if level is not None:
+ log_method(level, "test me")
+ else:
+ log_method("test me")
+
+ # basicConfig was called with no arguments
+ self.assertEqual(called, [((), {})])
+
+ def test_log(self):
+ self._test_log('log', logging.WARNING)
+
+ def test_debug(self):
+ self._test_log('debug')
+
+ def test_info(self):
+ self._test_log('info')
+
+ def test_warning(self):
+ self._test_log('warning')
+
+ def test_error(self):
+ self._test_log('error')
+
+ def test_critical(self):
+ self._test_log('critical')
+
+
+class LoggerAdapterTest(unittest.TestCase):
+
+ def setUp(self):
+ super(LoggerAdapterTest, self).setUp()
+ old_handler_list = logging._handlerList[:]
+
+ self.recording = RecordingHandler()
+ self.logger = logging.root
+ self.logger.addHandler(self.recording)
+ self.addCleanup(self.logger.removeHandler, self.recording)
+ self.addCleanup(self.recording.close)
+
+ def cleanup():
+ logging._handlerList[:] = old_handler_list
+
+ self.addCleanup(cleanup)
+ self.addCleanup(logging.shutdown)
+ self.adapter = logging.LoggerAdapter(logger=self.logger, extra=None)
+
+ def test_exception(self):
+ msg = 'testing exception: %r'
+ exc = None
+ try:
+ 1 / 0
+ except ZeroDivisionError as e:
+ exc = e
+ self.adapter.exception(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+ self.assertEqual(record.exc_info,
+ (exc.__class__, exc, exc.__traceback__))
+
+ def test_critical(self):
+ msg = 'critical test! %r'
+ self.adapter.critical(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.CRITICAL)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+
+ def test_is_enabled_for(self):
+ old_disable = self.adapter.logger.manager.disable
+ self.adapter.logger.manager.disable = 33
+ self.addCleanup(setattr, self.adapter.logger.manager, 'disable',
+ old_disable)
+ self.assertFalse(self.adapter.isEnabledFor(32))
+
+ def test_has_handlers(self):
+ self.assertTrue(self.adapter.hasHandlers())
+
+ for handler in self.logger.handlers:
+ self.logger.removeHandler(handler)
+
+ self.assertFalse(self.logger.hasHandlers())
+ self.assertFalse(self.adapter.hasHandlers())
+
+
+class LoggerTest(BaseTest):
+
+ def setUp(self):
+ super(LoggerTest, self).setUp()
+ self.recording = RecordingHandler()
+ self.logger = logging.Logger(name='blah')
+ self.logger.addHandler(self.recording)
+ self.addCleanup(self.logger.removeHandler, self.recording)
+ self.addCleanup(self.recording.close)
+ self.addCleanup(logging.shutdown)
+
+ def test_set_invalid_level(self):
+ self.assertRaises(TypeError, self.logger.setLevel, object())
+
+ def test_exception(self):
+ msg = 'testing exception: %r'
+ exc = None
+ try:
+ 1 / 0
+ except ZeroDivisionError as e:
+ exc = e
+ self.logger.exception(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+ self.assertEqual(record.exc_info,
+ (exc.__class__, exc, exc.__traceback__))
+
+ def test_log_invalid_level_with_raise(self):
+ old_raise = logging.raiseExceptions
+ self.addCleanup(setattr, logging, 'raiseExecptions', old_raise)
+
+ logging.raiseExceptions = True
+ self.assertRaises(TypeError, self.logger.log, '10', 'test message')
+
+ def test_log_invalid_level_no_raise(self):
+ old_raise = logging.raiseExceptions
+ self.addCleanup(setattr, logging, 'raiseExecptions', old_raise)
+
+ logging.raiseExceptions = False
+ self.logger.log('10', 'test message') # no exception happens
+
+ def test_find_caller_with_stack_info(self):
+ called = []
+ patch(self, logging.traceback, 'print_stack',
+ lambda f, file: called.append(file.getvalue()))
+
+ self.logger.findCaller(stack_info=True)
+
+ self.assertEqual(len(called), 1)
+ self.assertEqual('Stack (most recent call last):\n', called[0])
+
+ def test_make_record_with_extra_overwrite(self):
+ name = 'my record'
+ level = 13
+ fn = lno = msg = args = exc_info = func = sinfo = None
+ rv = logging._logRecordFactory(name, level, fn, lno, msg, args,
+ exc_info, func, sinfo)
+
+ for key in ('message', 'asctime') + tuple(rv.__dict__.keys()):
+ extra = {key: 'some value'}
+ self.assertRaises(KeyError, self.logger.makeRecord, name, level,
+ fn, lno, msg, args, exc_info,
+ extra=extra, sinfo=sinfo)
+
+ def test_make_record_with_extra_no_overwrite(self):
+ name = 'my record'
+ level = 13
+ fn = lno = msg = args = exc_info = func = sinfo = None
+ extra = {'valid_key': 'some value'}
+ result = self.logger.makeRecord(name, level, fn, lno, msg, args,
+ exc_info, extra=extra, sinfo=sinfo)
+ self.assertIn('valid_key', result.__dict__)
+
+ def test_has_handlers(self):
+ self.assertTrue(self.logger.hasHandlers())
+
+ for handler in self.logger.handlers:
+ self.logger.removeHandler(handler)
+ self.assertFalse(self.logger.hasHandlers())
+
+ def test_has_handlers_no_propagate(self):
+ child_logger = logging.getLogger('blah.child')
+ child_logger.propagate = False
+ self.assertFalse(child_logger.hasHandlers())
+
+ def test_is_enabled_for(self):
+ old_disable = self.logger.manager.disable
+ self.logger.manager.disable = 23
+ self.addCleanup(setattr, self.logger.manager, 'disable', old_disable)
+ self.assertFalse(self.logger.isEnabledFor(22))
+
+ def test_root_logger_aliases(self):
+ root = logging.getLogger()
+ self.assertIs(root, logging.root)
+ self.assertIs(root, logging.getLogger(None))
+ self.assertIs(root, logging.getLogger(''))
+ self.assertIs(root, logging.getLogger('foo').root)
+ self.assertIs(root, logging.getLogger('foo.bar').root)
+ self.assertIs(root, logging.getLogger('foo').parent)
+
+ self.assertIsNot(root, logging.getLogger('\0'))
+ self.assertIsNot(root, logging.getLogger('foo.bar').parent)
+
+ def test_invalid_names(self):
+ self.assertRaises(TypeError, logging.getLogger, any)
+ self.assertRaises(TypeError, logging.getLogger, b'foo')
+
+
class BaseFileTest(BaseTest):
"Base class for handler tests that write log files"
@@ -2237,10 +3552,21 @@ class BaseFileTest(BaseTest):
def assertLogFile(self, filename):
"Assert a log file is there and register it for deletion"
self.assertTrue(os.path.exists(filename),
- msg="Log file %r does not exist")
+ msg="Log file %r does not exist" % filename)
self.rmfiles.append(filename)
+class FileHandlerTest(BaseFileTest):
+ def test_delay(self):
+ os.unlink(self.fn)
+ fh = logging.FileHandler(self.fn, delay=True)
+ self.assertIsNone(fh.stream)
+ self.assertFalse(os.path.exists(self.fn))
+ fh.handle(logging.makeLogRecord({}))
+ self.assertIsNotNone(fh.stream)
+ self.assertTrue(os.path.exists(self.fn))
+ fh.close()
+
class RotatingFileHandlerTest(BaseFileTest):
def next_rec(self):
return logging.LogRecord('n', logging.DEBUG, 'p', 1,
@@ -2266,20 +3592,117 @@ class RotatingFileHandlerTest(BaseFileTest):
rh.close()
def test_rollover_filenames(self):
+ def namer(name):
+ return name + ".test"
rh = logging.handlers.RotatingFileHandler(
self.fn, backupCount=2, maxBytes=1)
+ rh.namer = namer
rh.emit(self.next_rec())
self.assertLogFile(self.fn)
rh.emit(self.next_rec())
- self.assertLogFile(self.fn + ".1")
+ self.assertLogFile(namer(self.fn + ".1"))
rh.emit(self.next_rec())
- self.assertLogFile(self.fn + ".2")
- self.assertFalse(os.path.exists(self.fn + ".3"))
+ self.assertLogFile(namer(self.fn + ".2"))
+ self.assertFalse(os.path.exists(namer(self.fn + ".3")))
+ rh.close()
+
+ @requires_zlib
+ def test_rotator(self):
+ def namer(name):
+ return name + ".gz"
+
+ def rotator(source, dest):
+ with open(source, "rb") as sf:
+ data = sf.read()
+ compressed = zlib.compress(data, 9)
+ with open(dest, "wb") as df:
+ df.write(compressed)
+ os.remove(source)
+
+ rh = logging.handlers.RotatingFileHandler(
+ self.fn, backupCount=2, maxBytes=1)
+ rh.rotator = rotator
+ rh.namer = namer
+ m1 = self.next_rec()
+ rh.emit(m1)
+ self.assertLogFile(self.fn)
+ m2 = self.next_rec()
+ rh.emit(m2)
+ fn = namer(self.fn + ".1")
+ self.assertLogFile(fn)
+ newline = os.linesep
+ with open(fn, "rb") as f:
+ compressed = f.read()
+ data = zlib.decompress(compressed)
+ self.assertEqual(data.decode("ascii"), m1.msg + newline)
+ rh.emit(self.next_rec())
+ fn = namer(self.fn + ".2")
+ self.assertLogFile(fn)
+ with open(fn, "rb") as f:
+ compressed = f.read()
+ data = zlib.decompress(compressed)
+ self.assertEqual(data.decode("ascii"), m1.msg + newline)
+ rh.emit(self.next_rec())
+ fn = namer(self.fn + ".2")
+ with open(fn, "rb") as f:
+ compressed = f.read()
+ data = zlib.decompress(compressed)
+ self.assertEqual(data.decode("ascii"), m2.msg + newline)
+ self.assertFalse(os.path.exists(namer(self.fn + ".3")))
rh.close()
class TimedRotatingFileHandlerTest(BaseFileTest):
- # test methods added below
- pass
+ # other test methods added below
+ def test_rollover(self):
+ fh = logging.handlers.TimedRotatingFileHandler(self.fn, 'S',
+ backupCount=1)
+ fmt = logging.Formatter('%(asctime)s %(message)s')
+ fh.setFormatter(fmt)
+ r1 = logging.makeLogRecord({'msg': 'testing - initial'})
+ fh.emit(r1)
+ self.assertLogFile(self.fn)
+ time.sleep(1.1) # a little over a second ...
+ r2 = logging.makeLogRecord({'msg': 'testing - after delay'})
+ fh.emit(r2)
+ fh.close()
+ # At this point, we should have a recent rotated file which we
+ # can test for the existence of. However, in practice, on some
+ # machines which run really slowly, we don't know how far back
+ # in time to go to look for the log file. So, we go back a fair
+ # bit, and stop as soon as we see a rotated file. In theory this
+ # could of course still fail, but the chances are lower.
+ found = False
+ now = datetime.datetime.now()
+ GO_BACK = 5 * 60 # seconds
+ for secs in range(GO_BACK):
+ prev = now - datetime.timedelta(seconds=secs)
+ fn = self.fn + prev.strftime(".%Y-%m-%d_%H-%M-%S")
+ found = os.path.exists(fn)
+ if found:
+ self.rmfiles.append(fn)
+ break
+ msg = 'No rotated files found, went back %d seconds' % GO_BACK
+ if not found:
+ #print additional diagnostics
+ dn, fn = os.path.split(self.fn)
+ files = [f for f in os.listdir(dn) if f.startswith(fn)]
+ print('Test time: %s' % now.strftime("%Y-%m-%d %H-%M-%S"), file=sys.stderr)
+ print('The only matching files are: %s' % files, file=sys.stderr)
+ for f in files:
+ print('Contents of %s:' % f)
+ path = os.path.join(dn, f)
+ with open(path, 'r') as tf:
+ print(tf.read())
+ self.assertTrue(found, msg=msg)
+
+ def test_invalid(self):
+ assertRaises = self.assertRaises
+ assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
+ self.fn, 'X', delay=True)
+ assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
+ self.fn, 'W', delay=True)
+ assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
+ self.fn, 'W7', delay=True)
def secs(**kw):
return datetime.timedelta(**kw) // datetime.timedelta(seconds=1)
@@ -2328,19 +3751,51 @@ for when, exp in (('S', 1),
rh.close()
setattr(TimedRotatingFileHandlerTest, "test_compute_rollover_%s" % when, test_compute_rollover)
+
+@unittest.skipUnless(win32evtlog, 'win32evtlog/win32evtlogutil required for this test.')
+class NTEventLogHandlerTest(BaseTest):
+ def test_basic(self):
+ logtype = 'Application'
+ elh = win32evtlog.OpenEventLog(None, logtype)
+ num_recs = win32evtlog.GetNumberOfEventLogRecords(elh)
+ h = logging.handlers.NTEventLogHandler('test_logging')
+ r = logging.makeLogRecord({'msg': 'Test Log Message'})
+ h.handle(r)
+ h.close()
+ # Now see if the event is recorded
+ self.assertTrue(num_recs < win32evtlog.GetNumberOfEventLogRecords(elh))
+ flags = win32evtlog.EVENTLOG_BACKWARDS_READ | \
+ win32evtlog.EVENTLOG_SEQUENTIAL_READ
+ found = False
+ GO_BACK = 100
+ events = win32evtlog.ReadEventLog(elh, flags, GO_BACK)
+ for e in events:
+ if e.SourceName != 'test_logging':
+ continue
+ msg = win32evtlogutil.SafeFormatMessage(e, logtype)
+ if msg != 'Test Log Message\r\n':
+ continue
+ found = True
+ break
+ msg = 'Record not found in event log, went back %d records' % GO_BACK
+ self.assertTrue(found, msg=msg)
+
# Set the locale to the platform-dependent default. I have no idea
# why the test does this, but in any case we save the current locale
# first and restore it at the end.
@run_with_locale('LC_ALL', '')
def test_main():
run_unittest(BuiltinLevelsTest, BasicFilterTest,
- CustomLevelsAndFiltersTest, MemoryHandlerTest,
- ConfigFileTest, SocketHandlerTest, MemoryTest,
- EncodingTest, WarningsTest, ConfigDictTest, ManagerTest,
- FormatterTest,
- LogRecordFactoryTest, ChildLoggerTest, QueueHandlerTest,
- RotatingFileHandlerTest,
- LastResortTest,
+ CustomLevelsAndFiltersTest, HandlerTest, MemoryHandlerTest,
+ ConfigFileTest, SocketHandlerTest, DatagramHandlerTest,
+ MemoryTest, EncodingTest, WarningsTest, ConfigDictTest,
+ ManagerTest, FormatterTest, BufferingFormatterTest,
+ StreamHandlerTest, LogRecordFactoryTest, ChildLoggerTest,
+ QueueHandlerTest, ShutdownTest, ModuleLevelMiscTest,
+ BasicConfigTest, LoggerAdapterTest, LoggerTest,
+ SMTPHandlerTest, FileHandlerTest, RotatingFileHandlerTest,
+ LastResortTest, LogRecordTest, ExceptionTest,
+ SysLogHandlerTest, HTTPHandlerTest, NTEventLogHandlerTest,
TimedRotatingFileHandlerTest
)
diff --git a/Lib/test/test_long.py b/Lib/test/test_long.py
index 04066ec..05b3e3e 100644
--- a/Lib/test/test_long.py
+++ b/Lib/test/test_long.py
@@ -43,6 +43,53 @@ DBL_MIN_EXP = sys.float_info.min_exp
DBL_MANT_DIG = sys.float_info.mant_dig
DBL_MIN_OVERFLOW = 2**DBL_MAX_EXP - 2**(DBL_MAX_EXP - DBL_MANT_DIG - 1)
+
+# Pure Python version of correctly-rounded integer-to-float conversion.
+def int_to_float(n):
+ """
+ Correctly-rounded integer-to-float conversion.
+
+ """
+ # Constants, depending only on the floating-point format in use.
+ # We use an extra 2 bits of precision for rounding purposes.
+ PRECISION = sys.float_info.mant_dig + 2
+ SHIFT_MAX = sys.float_info.max_exp - PRECISION
+ Q_MAX = 1 << PRECISION
+ ROUND_HALF_TO_EVEN_CORRECTION = [0, -1, -2, 1, 0, -1, 2, 1]
+
+ # Reduce to the case where n is positive.
+ if n == 0:
+ return 0.0
+ elif n < 0:
+ return -int_to_float(-n)
+
+ # Convert n to a 'floating-point' number q * 2**shift, where q is an
+ # integer with 'PRECISION' significant bits. When shifting n to create q,
+ # the least significant bit of q is treated as 'sticky'. That is, the
+ # least significant bit of q is set if either the corresponding bit of n
+ # was already set, or any one of the bits of n lost in the shift was set.
+ shift = n.bit_length() - PRECISION
+ q = n << -shift if shift < 0 else (n >> shift) | bool(n & ~(-1 << shift))
+
+ # Round half to even (actually rounds to the nearest multiple of 4,
+ # rounding ties to a multiple of 8).
+ q += ROUND_HALF_TO_EVEN_CORRECTION[q & 7]
+
+ # Detect overflow.
+ if shift + (q == Q_MAX) > SHIFT_MAX:
+ raise OverflowError("integer too large to convert to float")
+
+ # Checks: q is exactly representable, and q**2**shift doesn't overflow.
+ assert q % 4 == 0 and q // 4 <= 2**(sys.float_info.mant_dig)
+ assert q * 2**shift <= sys.float_info.max
+
+ # Some circularity here, since float(q) is doing an int-to-float
+ # conversion. But here q is of bounded size, and is exactly representable
+ # as a float. In a low-level C-like language, this operation would be a
+ # simple cast (e.g., from unsigned long long to double).
+ return math.ldexp(float(q), shift)
+
+
# pure Python version of correctly-rounded true division
def truediv(a, b):
"""Correctly-rounded true division for integers."""
@@ -367,6 +414,23 @@ class LongTest(unittest.TestCase):
return 1729
self.assertEqual(int(LongTrunc()), 1729)
+ def check_float_conversion(self, n):
+ # Check that int -> float conversion behaviour matches
+ # that of the pure Python version above.
+ try:
+ actual = float(n)
+ except OverflowError:
+ actual = 'overflow'
+
+ try:
+ expected = int_to_float(n)
+ except OverflowError:
+ expected = 'overflow'
+
+ msg = ("Error in conversion of integer {} to float. "
+ "Got {}, expected {}.".format(n, actual, expected))
+ self.assertEqual(actual, expected, msg)
+
@support.requires_IEEE_754
def test_float_conversion(self):
@@ -421,6 +485,22 @@ class LongTest(unittest.TestCase):
y = 2**p * 2**53
self.assertEqual(int(float(x)), y)
+ # Compare builtin float conversion with pure Python int_to_float
+ # function above.
+ test_values = [
+ int_dbl_max-1, int_dbl_max, int_dbl_max+1,
+ halfway-1, halfway, halfway + 1,
+ top_power-1, top_power, top_power+1,
+ 2*top_power-1, 2*top_power, top_power*top_power,
+ ]
+ test_values.extend(exact_values)
+ for p in range(-4, 8):
+ for x in range(-128, 128):
+ test_values.append(2**(p+53) + x)
+ for value in test_values:
+ self.check_float_conversion(value)
+ self.check_float_conversion(-value)
+
def test_float_overflow(self):
for x in -2.0, -1.0, 0.0, 1.0, 2.0:
self.assertEqual(float(int(x)), x)
diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py
new file mode 100644
index 0000000..ffde557
--- /dev/null
+++ b/Lib/test/test_lzma.py
@@ -0,0 +1,1344 @@
+from io import BytesIO, UnsupportedOperation
+import os
+import random
+import unittest
+
+from test.support import (
+ _4G, TESTFN, import_module, bigmemtest, run_unittest, unlink
+)
+
+lzma = import_module("lzma")
+from lzma import LZMACompressor, LZMADecompressor, LZMAError, LZMAFile
+
+
+class CompressorDecompressorTestCase(unittest.TestCase):
+
+ # Test error cases.
+
+ def test_simple_bad_args(self):
+ self.assertRaises(TypeError, LZMACompressor, [])
+ self.assertRaises(TypeError, LZMACompressor, format=3.45)
+ self.assertRaises(TypeError, LZMACompressor, check="")
+ self.assertRaises(TypeError, LZMACompressor, preset="asdf")
+ self.assertRaises(TypeError, LZMACompressor, filters=3)
+ # Can't specify FORMAT_AUTO when compressing.
+ self.assertRaises(ValueError, LZMACompressor, format=lzma.FORMAT_AUTO)
+ # Can't specify a preset and a custom filter chain at the same time.
+ with self.assertRaises(ValueError):
+ LZMACompressor(preset=7, filters=[{"id": lzma.FILTER_LZMA2}])
+
+ self.assertRaises(TypeError, LZMADecompressor, ())
+ self.assertRaises(TypeError, LZMADecompressor, memlimit=b"qw")
+ with self.assertRaises(TypeError):
+ LZMADecompressor(lzma.FORMAT_RAW, filters="zzz")
+ # Cannot specify a memory limit with FILTER_RAW.
+ with self.assertRaises(ValueError):
+ LZMADecompressor(lzma.FORMAT_RAW, memlimit=0x1000000)
+ # Can only specify a custom filter chain with FILTER_RAW.
+ self.assertRaises(ValueError, LZMADecompressor, filters=FILTERS_RAW_1)
+ with self.assertRaises(ValueError):
+ LZMADecompressor(format=lzma.FORMAT_XZ, filters=FILTERS_RAW_1)
+ with self.assertRaises(ValueError):
+ LZMADecompressor(format=lzma.FORMAT_ALONE, filters=FILTERS_RAW_1)
+
+ lzc = LZMACompressor()
+ self.assertRaises(TypeError, lzc.compress)
+ self.assertRaises(TypeError, lzc.compress, b"foo", b"bar")
+ self.assertRaises(TypeError, lzc.flush, b"blah")
+ empty = lzc.flush()
+ self.assertRaises(ValueError, lzc.compress, b"quux")
+ self.assertRaises(ValueError, lzc.flush)
+
+ lzd = LZMADecompressor()
+ self.assertRaises(TypeError, lzd.decompress)
+ self.assertRaises(TypeError, lzd.decompress, b"foo", b"bar")
+ lzd.decompress(empty)
+ self.assertRaises(EOFError, lzd.decompress, b"quux")
+
+ def test_bad_filter_spec(self):
+ self.assertRaises(TypeError, LZMACompressor, filters=[b"wobsite"])
+ self.assertRaises(ValueError, LZMACompressor, filters=[{"xyzzy": 3}])
+ self.assertRaises(ValueError, LZMACompressor, filters=[{"id": 98765}])
+ with self.assertRaises(ValueError):
+ LZMACompressor(filters=[{"id": lzma.FILTER_LZMA2, "foo": 0}])
+ with self.assertRaises(ValueError):
+ LZMACompressor(filters=[{"id": lzma.FILTER_DELTA, "foo": 0}])
+ with self.assertRaises(ValueError):
+ LZMACompressor(filters=[{"id": lzma.FILTER_X86, "foo": 0}])
+
+ def test_decompressor_after_eof(self):
+ lzd = LZMADecompressor()
+ lzd.decompress(COMPRESSED_XZ)
+ self.assertRaises(EOFError, lzd.decompress, b"nyan")
+
+ def test_decompressor_memlimit(self):
+ lzd = LZMADecompressor(memlimit=1024)
+ self.assertRaises(LZMAError, lzd.decompress, COMPRESSED_XZ)
+
+ lzd = LZMADecompressor(lzma.FORMAT_XZ, memlimit=1024)
+ self.assertRaises(LZMAError, lzd.decompress, COMPRESSED_XZ)
+
+ lzd = LZMADecompressor(lzma.FORMAT_ALONE, memlimit=1024)
+ self.assertRaises(LZMAError, lzd.decompress, COMPRESSED_ALONE)
+
+ # Test LZMADecompressor on known-good input data.
+
+ def _test_decompressor(self, lzd, data, check, unused_data=b""):
+ self.assertFalse(lzd.eof)
+ out = lzd.decompress(data)
+ self.assertEqual(out, INPUT)
+ self.assertEqual(lzd.check, check)
+ self.assertTrue(lzd.eof)
+ self.assertEqual(lzd.unused_data, unused_data)
+
+ def test_decompressor_auto(self):
+ lzd = LZMADecompressor()
+ self._test_decompressor(lzd, COMPRESSED_XZ, lzma.CHECK_CRC64)
+
+ lzd = LZMADecompressor()
+ self._test_decompressor(lzd, COMPRESSED_ALONE, lzma.CHECK_NONE)
+
+ def test_decompressor_xz(self):
+ lzd = LZMADecompressor(lzma.FORMAT_XZ)
+ self._test_decompressor(lzd, COMPRESSED_XZ, lzma.CHECK_CRC64)
+
+ def test_decompressor_alone(self):
+ lzd = LZMADecompressor(lzma.FORMAT_ALONE)
+ self._test_decompressor(lzd, COMPRESSED_ALONE, lzma.CHECK_NONE)
+
+ def test_decompressor_raw_1(self):
+ lzd = LZMADecompressor(lzma.FORMAT_RAW, filters=FILTERS_RAW_1)
+ self._test_decompressor(lzd, COMPRESSED_RAW_1, lzma.CHECK_NONE)
+
+ def test_decompressor_raw_2(self):
+ lzd = LZMADecompressor(lzma.FORMAT_RAW, filters=FILTERS_RAW_2)
+ self._test_decompressor(lzd, COMPRESSED_RAW_2, lzma.CHECK_NONE)
+
+ def test_decompressor_raw_3(self):
+ lzd = LZMADecompressor(lzma.FORMAT_RAW, filters=FILTERS_RAW_3)
+ self._test_decompressor(lzd, COMPRESSED_RAW_3, lzma.CHECK_NONE)
+
+ def test_decompressor_raw_4(self):
+ lzd = LZMADecompressor(lzma.FORMAT_RAW, filters=FILTERS_RAW_4)
+ self._test_decompressor(lzd, COMPRESSED_RAW_4, lzma.CHECK_NONE)
+
+ def test_decompressor_chunks(self):
+ lzd = LZMADecompressor()
+ out = []
+ for i in range(0, len(COMPRESSED_XZ), 10):
+ self.assertFalse(lzd.eof)
+ out.append(lzd.decompress(COMPRESSED_XZ[i:i+10]))
+ out = b"".join(out)
+ self.assertEqual(out, INPUT)
+ self.assertEqual(lzd.check, lzma.CHECK_CRC64)
+ self.assertTrue(lzd.eof)
+ self.assertEqual(lzd.unused_data, b"")
+
+ def test_decompressor_unused_data(self):
+ lzd = LZMADecompressor()
+ extra = b"fooblibar"
+ self._test_decompressor(lzd, COMPRESSED_XZ + extra, lzma.CHECK_CRC64,
+ unused_data=extra)
+
+ def test_decompressor_bad_input(self):
+ lzd = LZMADecompressor()
+ self.assertRaises(LZMAError, lzd.decompress, COMPRESSED_RAW_1)
+
+ lzd = LZMADecompressor(lzma.FORMAT_XZ)
+ self.assertRaises(LZMAError, lzd.decompress, COMPRESSED_ALONE)
+
+ lzd = LZMADecompressor(lzma.FORMAT_ALONE)
+ self.assertRaises(LZMAError, lzd.decompress, COMPRESSED_XZ)
+
+ lzd = LZMADecompressor(lzma.FORMAT_RAW, filters=FILTERS_RAW_1)
+ self.assertRaises(LZMAError, lzd.decompress, COMPRESSED_XZ)
+
+ # Test that LZMACompressor->LZMADecompressor preserves the input data.
+
+ def test_roundtrip_xz(self):
+ lzc = LZMACompressor()
+ cdata = lzc.compress(INPUT) + lzc.flush()
+ lzd = LZMADecompressor()
+ self._test_decompressor(lzd, cdata, lzma.CHECK_CRC64)
+
+ def test_roundtrip_alone(self):
+ lzc = LZMACompressor(lzma.FORMAT_ALONE)
+ cdata = lzc.compress(INPUT) + lzc.flush()
+ lzd = LZMADecompressor()
+ self._test_decompressor(lzd, cdata, lzma.CHECK_NONE)
+
+ def test_roundtrip_raw(self):
+ lzc = LZMACompressor(lzma.FORMAT_RAW, filters=FILTERS_RAW_4)
+ cdata = lzc.compress(INPUT) + lzc.flush()
+ lzd = LZMADecompressor(lzma.FORMAT_RAW, filters=FILTERS_RAW_4)
+ self._test_decompressor(lzd, cdata, lzma.CHECK_NONE)
+
+ def test_roundtrip_chunks(self):
+ lzc = LZMACompressor()
+ cdata = []
+ for i in range(0, len(INPUT), 10):
+ cdata.append(lzc.compress(INPUT[i:i+10]))
+ cdata.append(lzc.flush())
+ cdata = b"".join(cdata)
+ lzd = LZMADecompressor()
+ self._test_decompressor(lzd, cdata, lzma.CHECK_CRC64)
+
+ # LZMADecompressor intentionally does not handle concatenated streams.
+
+ def test_decompressor_multistream(self):
+ lzd = LZMADecompressor()
+ self._test_decompressor(lzd, COMPRESSED_XZ + COMPRESSED_ALONE,
+ lzma.CHECK_CRC64, unused_data=COMPRESSED_ALONE)
+
+ # Test with inputs larger than 4GiB.
+
+ @bigmemtest(size=_4G + 100, memuse=2)
+ def test_compressor_bigmem(self, size):
+ lzc = LZMACompressor()
+ cdata = lzc.compress(b"x" * size) + lzc.flush()
+ ddata = lzma.decompress(cdata)
+ try:
+ self.assertEqual(len(ddata), size)
+ self.assertEqual(len(ddata.strip(b"x")), 0)
+ finally:
+ ddata = None
+
+ @bigmemtest(size=_4G + 100, memuse=3)
+ def test_decompressor_bigmem(self, size):
+ lzd = LZMADecompressor()
+ blocksize = 10 * 1024 * 1024
+ block = random.getrandbits(blocksize * 8).to_bytes(blocksize, "little")
+ try:
+ input = block * (size // blocksize + 1)
+ cdata = lzma.compress(input)
+ ddata = lzd.decompress(cdata)
+ self.assertEqual(ddata, input)
+ finally:
+ input = cdata = ddata = None
+
+
+class CompressDecompressFunctionTestCase(unittest.TestCase):
+
+ # Test error cases:
+
+ def test_bad_args(self):
+ self.assertRaises(TypeError, lzma.compress)
+ self.assertRaises(TypeError, lzma.compress, [])
+ self.assertRaises(TypeError, lzma.compress, b"", format="xz")
+ self.assertRaises(TypeError, lzma.compress, b"", check="none")
+ self.assertRaises(TypeError, lzma.compress, b"", preset="blah")
+ self.assertRaises(TypeError, lzma.compress, b"", filters=1024)
+ # Can't specify a preset and a custom filter chain at the same time.
+ with self.assertRaises(ValueError):
+ lzma.compress(b"", preset=3, filters=[{"id": lzma.FILTER_LZMA2}])
+
+ self.assertRaises(TypeError, lzma.decompress)
+ self.assertRaises(TypeError, lzma.decompress, [])
+ self.assertRaises(TypeError, lzma.decompress, b"", format="lzma")
+ self.assertRaises(TypeError, lzma.decompress, b"", memlimit=7.3e9)
+ with self.assertRaises(TypeError):
+ lzma.decompress(b"", format=lzma.FORMAT_RAW, filters={})
+ # Cannot specify a memory limit with FILTER_RAW.
+ with self.assertRaises(ValueError):
+ lzma.decompress(b"", format=lzma.FORMAT_RAW, memlimit=0x1000000)
+ # Can only specify a custom filter chain with FILTER_RAW.
+ with self.assertRaises(ValueError):
+ lzma.decompress(b"", filters=FILTERS_RAW_1)
+ with self.assertRaises(ValueError):
+ lzma.decompress(b"", format=lzma.FORMAT_XZ, filters=FILTERS_RAW_1)
+ with self.assertRaises(ValueError):
+ lzma.decompress(
+ b"", format=lzma.FORMAT_ALONE, filters=FILTERS_RAW_1)
+
+ def test_decompress_memlimit(self):
+ with self.assertRaises(LZMAError):
+ lzma.decompress(COMPRESSED_XZ, memlimit=1024)
+ with self.assertRaises(LZMAError):
+ lzma.decompress(
+ COMPRESSED_XZ, format=lzma.FORMAT_XZ, memlimit=1024)
+ with self.assertRaises(LZMAError):
+ lzma.decompress(
+ COMPRESSED_ALONE, format=lzma.FORMAT_ALONE, memlimit=1024)
+
+ # Test LZMADecompressor on known-good input data.
+
+ def test_decompress_good_input(self):
+ ddata = lzma.decompress(COMPRESSED_XZ)
+ self.assertEqual(ddata, INPUT)
+
+ ddata = lzma.decompress(COMPRESSED_ALONE)
+ self.assertEqual(ddata, INPUT)
+
+ ddata = lzma.decompress(COMPRESSED_XZ, lzma.FORMAT_XZ)
+ self.assertEqual(ddata, INPUT)
+
+ ddata = lzma.decompress(COMPRESSED_ALONE, lzma.FORMAT_ALONE)
+ self.assertEqual(ddata, INPUT)
+
+ ddata = lzma.decompress(
+ COMPRESSED_RAW_1, lzma.FORMAT_RAW, filters=FILTERS_RAW_1)
+ self.assertEqual(ddata, INPUT)
+
+ ddata = lzma.decompress(
+ COMPRESSED_RAW_2, lzma.FORMAT_RAW, filters=FILTERS_RAW_2)
+ self.assertEqual(ddata, INPUT)
+
+ ddata = lzma.decompress(
+ COMPRESSED_RAW_3, lzma.FORMAT_RAW, filters=FILTERS_RAW_3)
+ self.assertEqual(ddata, INPUT)
+
+ ddata = lzma.decompress(
+ COMPRESSED_RAW_4, lzma.FORMAT_RAW, filters=FILTERS_RAW_4)
+ self.assertEqual(ddata, INPUT)
+
+ def test_decompress_incomplete_input(self):
+ self.assertRaises(LZMAError, lzma.decompress, COMPRESSED_XZ[:128])
+ self.assertRaises(LZMAError, lzma.decompress, COMPRESSED_ALONE[:128])
+ self.assertRaises(LZMAError, lzma.decompress, COMPRESSED_RAW_1[:128],
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_1)
+ self.assertRaises(LZMAError, lzma.decompress, COMPRESSED_RAW_2[:128],
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_2)
+ self.assertRaises(LZMAError, lzma.decompress, COMPRESSED_RAW_3[:128],
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_3)
+ self.assertRaises(LZMAError, lzma.decompress, COMPRESSED_RAW_4[:128],
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_4)
+
+ def test_decompress_bad_input(self):
+ with self.assertRaises(LZMAError):
+ lzma.decompress(COMPRESSED_RAW_1)
+ with self.assertRaises(LZMAError):
+ lzma.decompress(COMPRESSED_ALONE, format=lzma.FORMAT_XZ)
+ with self.assertRaises(LZMAError):
+ lzma.decompress(COMPRESSED_XZ, format=lzma.FORMAT_ALONE)
+ with self.assertRaises(LZMAError):
+ lzma.decompress(COMPRESSED_XZ, format=lzma.FORMAT_RAW,
+ filters=FILTERS_RAW_1)
+
+ # Test that compress()->decompress() preserves the input data.
+
+ def test_roundtrip(self):
+ cdata = lzma.compress(INPUT)
+ ddata = lzma.decompress(cdata)
+ self.assertEqual(ddata, INPUT)
+
+ cdata = lzma.compress(INPUT, lzma.FORMAT_XZ)
+ ddata = lzma.decompress(cdata)
+ self.assertEqual(ddata, INPUT)
+
+ cdata = lzma.compress(INPUT, lzma.FORMAT_ALONE)
+ ddata = lzma.decompress(cdata)
+ self.assertEqual(ddata, INPUT)
+
+ cdata = lzma.compress(INPUT, lzma.FORMAT_RAW, filters=FILTERS_RAW_4)
+ ddata = lzma.decompress(cdata, lzma.FORMAT_RAW, filters=FILTERS_RAW_4)
+ self.assertEqual(ddata, INPUT)
+
+ # Unlike LZMADecompressor, decompress() *does* handle concatenated streams.
+
+ def test_decompress_multistream(self):
+ ddata = lzma.decompress(COMPRESSED_XZ + COMPRESSED_ALONE)
+ self.assertEqual(ddata, INPUT * 2)
+
+
+class TempFile:
+ """Context manager - creates a file, and deletes it on __exit__."""
+
+ def __init__(self, filename, data=b""):
+ self.filename = filename
+ self.data = data
+
+ def __enter__(self):
+ with open(self.filename, "wb") as f:
+ f.write(self.data)
+
+ def __exit__(self, *args):
+ unlink(self.filename)
+
+
+class FileTestCase(unittest.TestCase):
+
+ def test_init(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ pass
+ with LZMAFile(fileobj=BytesIO(), mode="w") as f:
+ pass
+ with LZMAFile(fileobj=BytesIO(), mode="a") as f:
+ pass
+
+ def test_init_with_filename(self):
+ with TempFile(TESTFN, COMPRESSED_XZ):
+ with LZMAFile(TESTFN) as f:
+ pass
+ with LZMAFile(TESTFN, "w") as f:
+ pass
+ with LZMAFile(TESTFN, "a") as f:
+ pass
+
+ def test_init_bad_mode(self):
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode=(3, "x"))
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="")
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="x")
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="rb")
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="r+")
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="wb")
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="w+")
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="rw")
+
+ def test_init_bad_check(self):
+ with self.assertRaises(TypeError):
+ LZMAFile(fileobj=BytesIO(), mode="w", check=b"asd")
+ # CHECK_UNKNOWN and anything above CHECK_ID_MAX should be invalid.
+ with self.assertRaises(LZMAError):
+ LZMAFile(fileobj=BytesIO(), mode="w", check=lzma.CHECK_UNKNOWN)
+ with self.assertRaises(LZMAError):
+ LZMAFile(fileobj=BytesIO(), mode="w", check=lzma.CHECK_ID_MAX + 3)
+ # Cannot specify a check with mode="r".
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_NONE)
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_CRC32)
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_CRC64)
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_SHA256)
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_UNKNOWN)
+
+ def test_init_bad_preset(self):
+ with self.assertRaises(TypeError):
+ LZMAFile(fileobj=BytesIO(), mode="w", preset=4.39)
+ with self.assertRaises(LZMAError):
+ LZMAFile(fileobj=BytesIO(), mode="w", preset=10)
+ with self.assertRaises(LZMAError):
+ LZMAFile(fileobj=BytesIO(), mode="w", preset=23)
+ with self.assertRaises(OverflowError):
+ LZMAFile(fileobj=BytesIO(), mode="w", preset=-1)
+ with self.assertRaises(OverflowError):
+ LZMAFile(fileobj=BytesIO(), mode="w", preset=-7)
+ with self.assertRaises(TypeError):
+ LZMAFile(fileobj=BytesIO(), mode="w", preset="foo")
+ # Cannot specify a preset with mode="r".
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), preset=3)
+
+ def test_init_bad_filter_spec(self):
+ with self.assertRaises(TypeError):
+ LZMAFile(fileobj=BytesIO(), mode="w", filters=[b"wobsite"])
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(), mode="w", filters=[{"xyzzy": 3}])
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(), mode="w", filters=[{"id": 98765}])
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(), mode="w",
+ filters=[{"id": lzma.FILTER_LZMA2, "foo": 0}])
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(), mode="w",
+ filters=[{"id": lzma.FILTER_DELTA, "foo": 0}])
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(), mode="w",
+ filters=[{"id": lzma.FILTER_X86, "foo": 0}])
+
+ def test_init_with_preset_and_filters(self):
+ with self.assertRaises(ValueError):
+ LZMAFile(fileobj=BytesIO(), mode="w", format=lzma.FORMAT_RAW,
+ preset=6, filters=FILTERS_RAW_1)
+
+ def test_init_with_filename_and_fileobj(self):
+ with self.assertRaises(ValueError):
+ LZMAFile("/dev/null", fileobj=BytesIO())
+
+ def test_close(self):
+ with BytesIO(COMPRESSED_XZ) as src:
+ f = LZMAFile(fileobj=src)
+ f.close()
+ # LZMAFile.close() should not close the underlying file object.
+ self.assertFalse(src.closed)
+ # Try closing an already-closed LZMAFile.
+ f.close()
+ self.assertFalse(src.closed)
+
+ # Test with a real file on disk, opened directly by LZMAFile.
+ with TempFile(TESTFN, COMPRESSED_XZ):
+ f = LZMAFile(TESTFN)
+ fp = f._fp
+ f.close()
+ # Here, LZMAFile.close() *should* close the underlying file object.
+ self.assertTrue(fp.closed)
+ # Try closing an already-closed LZMAFile.
+ f.close()
+
+ def test_closed(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ try:
+ self.assertFalse(f.closed)
+ f.read()
+ self.assertFalse(f.closed)
+ finally:
+ f.close()
+ self.assertTrue(f.closed)
+
+ f = LZMAFile(fileobj=BytesIO(), mode="w")
+ try:
+ self.assertFalse(f.closed)
+ finally:
+ f.close()
+ self.assertTrue(f.closed)
+
+ def test_fileno(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ try:
+ self.assertRaises(UnsupportedOperation, f.fileno)
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.fileno)
+ with TempFile(TESTFN, COMPRESSED_XZ):
+ f = LZMAFile(TESTFN)
+ try:
+ self.assertEqual(f.fileno(), f._fp.fileno())
+ self.assertIsInstance(f.fileno(), int)
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.fileno)
+
+ def test_seekable(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ try:
+ self.assertTrue(f.seekable())
+ f.read()
+ self.assertTrue(f.seekable())
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.seekable)
+
+ f = LZMAFile(fileobj=BytesIO(), mode="w")
+ try:
+ self.assertFalse(f.seekable())
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.seekable)
+
+ src = BytesIO(COMPRESSED_XZ)
+ src.seekable = lambda: False
+ f = LZMAFile(fileobj=src)
+ try:
+ self.assertFalse(f.seekable())
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.seekable)
+
+ def test_readable(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ try:
+ self.assertTrue(f.readable())
+ f.read()
+ self.assertTrue(f.readable())
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.readable)
+
+ f = LZMAFile(fileobj=BytesIO(), mode="w")
+ try:
+ self.assertFalse(f.readable())
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.readable)
+
+ def test_writable(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ try:
+ self.assertFalse(f.writable())
+ f.read()
+ self.assertFalse(f.writable())
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.writable)
+
+ f = LZMAFile(fileobj=BytesIO(), mode="w")
+ try:
+ self.assertTrue(f.writable())
+ finally:
+ f.close()
+ self.assertRaises(ValueError, f.writable)
+
+ def test_read(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE)) as f:
+ self.assertEqual(f.read(), INPUT)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ),
+ format=lzma.FORMAT_XZ) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE),
+ format=lzma.FORMAT_ALONE) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_1),
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_1) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_2),
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_2) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_3),
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_3) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_4),
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_4) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+
+ def test_read_0(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertEqual(f.read(0), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE)) as f:
+ self.assertEqual(f.read(0), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ),
+ format=lzma.FORMAT_XZ) as f:
+ self.assertEqual(f.read(0), b"")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE),
+ format=lzma.FORMAT_ALONE) as f:
+ self.assertEqual(f.read(0), b"")
+
+ def test_read_10(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ chunks = []
+ while True:
+ result = f.read(10)
+ if not result:
+ break
+ self.assertLessEqual(len(result), 10)
+ chunks.append(result)
+ self.assertEqual(b"".join(chunks), INPUT)
+
+ def test_read_multistream(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 5)) as f:
+ self.assertEqual(f.read(), INPUT * 5)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ + COMPRESSED_ALONE)) as f:
+ self.assertEqual(f.read(), INPUT * 2)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_3 * 4),
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_3) as f:
+ self.assertEqual(f.read(), INPUT * 4)
+
+ def test_read_multistream_buffer_size_aligned(self):
+ # Test the case where a stream boundary coincides with the end
+ # of the raw read buffer.
+ saved_buffer_size = lzma._BUFFER_SIZE
+ lzma._BUFFER_SIZE = len(COMPRESSED_XZ)
+ try:
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 5)) as f:
+ self.assertEqual(f.read(), INPUT * 5)
+ finally:
+ lzma._BUFFER_SIZE = saved_buffer_size
+
+ def test_read_from_file(self):
+ with TempFile(TESTFN, COMPRESSED_XZ):
+ with LZMAFile(TESTFN) as f:
+ self.assertEqual(f.read(), INPUT)
+ self.assertEqual(f.read(), b"")
+
+ def test_read_incomplete(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ[:128])) as f:
+ self.assertRaises(EOFError, f.read)
+
+ def test_read_bad_args(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ f.close()
+ self.assertRaises(ValueError, f.read)
+ with LZMAFile(fileobj=BytesIO(), mode="w") as f:
+ self.assertRaises(ValueError, f.read)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertRaises(TypeError, f.read, None)
+
+ def test_read1(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ blocks = []
+ while True:
+ result = f.read1()
+ if not result:
+ break
+ blocks.append(result)
+ self.assertEqual(b"".join(blocks), INPUT)
+ self.assertEqual(f.read1(), b"")
+
+ def test_read1_0(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertEqual(f.read1(0), b"")
+
+ def test_read1_10(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ blocks = []
+ while True:
+ result = f.read1(10)
+ if not result:
+ break
+ blocks.append(result)
+ self.assertEqual(b"".join(blocks), INPUT)
+ self.assertEqual(f.read1(), b"")
+
+ def test_read1_multistream(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 5)) as f:
+ blocks = []
+ while True:
+ result = f.read1()
+ if not result:
+ break
+ blocks.append(result)
+ self.assertEqual(b"".join(blocks), INPUT * 5)
+ self.assertEqual(f.read1(), b"")
+
+ def test_read1_bad_args(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ f.close()
+ self.assertRaises(ValueError, f.read1)
+ with LZMAFile(fileobj=BytesIO(), mode="w") as f:
+ self.assertRaises(ValueError, f.read1)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertRaises(TypeError, f.read1, None)
+
+ def test_peek(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ result = f.peek()
+ self.assertGreater(len(result), 0)
+ self.assertTrue(INPUT.startswith(result))
+ self.assertEqual(f.read(), INPUT)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ result = f.peek(10)
+ self.assertGreater(len(result), 0)
+ self.assertTrue(INPUT.startswith(result))
+ self.assertEqual(f.read(), INPUT)
+
+ def test_peek_bad_args(self):
+ with LZMAFile(fileobj=BytesIO(), mode="w") as f:
+ self.assertRaises(ValueError, f.peek)
+
+ def test_iterator(self):
+ with BytesIO(INPUT) as f:
+ lines = f.readlines()
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertListEqual(list(iter(f)), lines)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE)) as f:
+ self.assertListEqual(list(iter(f)), lines)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ),
+ format=lzma.FORMAT_XZ) as f:
+ self.assertListEqual(list(iter(f)), lines)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE),
+ format=lzma.FORMAT_ALONE) as f:
+ self.assertListEqual(list(iter(f)), lines)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_2),
+ format=lzma.FORMAT_RAW, filters=FILTERS_RAW_2) as f:
+ self.assertListEqual(list(iter(f)), lines)
+
+ def test_readline(self):
+ with BytesIO(INPUT) as f:
+ lines = f.readlines()
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ for line in lines:
+ self.assertEqual(f.readline(), line)
+
+ def test_readlines(self):
+ with BytesIO(INPUT) as f:
+ lines = f.readlines()
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertListEqual(f.readlines(), lines)
+
+ def test_write(self):
+ with BytesIO() as dst:
+ with LZMAFile(fileobj=dst, mode="w") as f:
+ f.write(INPUT)
+ expected = lzma.compress(INPUT)
+ self.assertEqual(dst.getvalue(), expected)
+ with BytesIO() as dst:
+ with LZMAFile(fileobj=dst, mode="w", format=lzma.FORMAT_XZ) as f:
+ f.write(INPUT)
+ expected = lzma.compress(INPUT, format=lzma.FORMAT_XZ)
+ self.assertEqual(dst.getvalue(), expected)
+ with BytesIO() as dst:
+ with LZMAFile(fileobj=dst, mode="w", format=lzma.FORMAT_ALONE) as f:
+ f.write(INPUT)
+ expected = lzma.compress(INPUT, format=lzma.FORMAT_ALONE)
+ self.assertEqual(dst.getvalue(), expected)
+ with BytesIO() as dst:
+ with LZMAFile(fileobj=dst, mode="w", format=lzma.FORMAT_RAW,
+ filters=FILTERS_RAW_2) as f:
+ f.write(INPUT)
+ expected = lzma.compress(INPUT, format=lzma.FORMAT_RAW,
+ filters=FILTERS_RAW_2)
+ self.assertEqual(dst.getvalue(), expected)
+
+ def test_write_10(self):
+ with BytesIO() as dst:
+ with LZMAFile(fileobj=dst, mode="w") as f:
+ for start in range(0, len(INPUT), 10):
+ f.write(INPUT[start:start+10])
+ expected = lzma.compress(INPUT)
+ self.assertEqual(dst.getvalue(), expected)
+
+ def test_write_append(self):
+ part1 = INPUT[:1024]
+ part2 = INPUT[1024:1536]
+ part3 = INPUT[1536:]
+ expected = b"".join(lzma.compress(x) for x in (part1, part2, part3))
+ with BytesIO() as dst:
+ with LZMAFile(fileobj=dst, mode="w") as f:
+ f.write(part1)
+ with LZMAFile(fileobj=dst, mode="a") as f:
+ f.write(part2)
+ with LZMAFile(fileobj=dst, mode="a") as f:
+ f.write(part3)
+ self.assertEqual(dst.getvalue(), expected)
+
+ def test_write_to_file(self):
+ try:
+ with LZMAFile(TESTFN, "w") as f:
+ f.write(INPUT)
+ expected = lzma.compress(INPUT)
+ with open(TESTFN, "rb") as f:
+ self.assertEqual(f.read(), expected)
+ finally:
+ unlink(TESTFN)
+
+ def test_write_append_to_file(self):
+ part1 = INPUT[:1024]
+ part2 = INPUT[1024:1536]
+ part3 = INPUT[1536:]
+ expected = b"".join(lzma.compress(x) for x in (part1, part2, part3))
+ try:
+ with LZMAFile(TESTFN, "w") as f:
+ f.write(part1)
+ with LZMAFile(TESTFN, "a") as f:
+ f.write(part2)
+ with LZMAFile(TESTFN, "a") as f:
+ f.write(part3)
+ with open(TESTFN, "rb") as f:
+ self.assertEqual(f.read(), expected)
+ finally:
+ unlink(TESTFN)
+
+ def test_write_bad_args(self):
+ f = LZMAFile(fileobj=BytesIO(), mode="w")
+ f.close()
+ self.assertRaises(ValueError, f.write, b"foo")
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="r") as f:
+ self.assertRaises(ValueError, f.write, b"bar")
+ with LZMAFile(fileobj=BytesIO(), mode="w") as f:
+ self.assertRaises(TypeError, f.write, None)
+ self.assertRaises(TypeError, f.write, "text")
+ self.assertRaises(TypeError, f.write, 789)
+
+ def test_writelines(self):
+ with BytesIO(INPUT) as f:
+ lines = f.readlines()
+ with BytesIO() as dst:
+ with LZMAFile(fileobj=dst, mode="w") as f:
+ f.writelines(lines)
+ expected = lzma.compress(INPUT)
+ self.assertEqual(dst.getvalue(), expected)
+
+ def test_seek_forward(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ f.seek(555)
+ self.assertEqual(f.read(), INPUT[555:])
+
+ def test_seek_forward_across_streams(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 2)) as f:
+ f.seek(len(INPUT) + 123)
+ self.assertEqual(f.read(), INPUT[123:])
+
+ def test_seek_forward_relative_to_current(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ f.read(100)
+ f.seek(1236, 1)
+ self.assertEqual(f.read(), INPUT[1336:])
+
+ def test_seek_forward_relative_to_end(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ f.seek(-555, 2)
+ self.assertEqual(f.read(), INPUT[-555:])
+
+ def test_seek_backward(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ f.read(1001)
+ f.seek(211)
+ self.assertEqual(f.read(), INPUT[211:])
+
+ def test_seek_backward_across_streams(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 2)) as f:
+ f.read(len(INPUT) + 333)
+ f.seek(737)
+ self.assertEqual(f.read(), INPUT[737:] + INPUT)
+
+ def test_seek_backward_relative_to_end(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ f.seek(-150, 2)
+ self.assertEqual(f.read(), INPUT[-150:])
+
+ def test_seek_past_end(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ f.seek(len(INPUT) + 9001)
+ self.assertEqual(f.tell(), len(INPUT))
+ self.assertEqual(f.read(), b"")
+
+ def test_seek_past_start(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ f.seek(-88)
+ self.assertEqual(f.tell(), 0)
+ self.assertEqual(f.read(), INPUT)
+
+ def test_seek_bad_args(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ f.close()
+ self.assertRaises(ValueError, f.seek, 0)
+ with LZMAFile(fileobj=BytesIO(), mode="w") as f:
+ self.assertRaises(ValueError, f.seek, 0)
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ self.assertRaises(ValueError, f.seek, 0, 3)
+ self.assertRaises(ValueError, f.seek, 9, ())
+ self.assertRaises(TypeError, f.seek, None)
+ self.assertRaises(TypeError, f.seek, b"derp")
+
+ def test_tell(self):
+ with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f:
+ pos = 0
+ while True:
+ self.assertEqual(f.tell(), pos)
+ result = f.read(183)
+ if not result:
+ break
+ pos += len(result)
+ self.assertEqual(f.tell(), len(INPUT))
+ with LZMAFile(fileobj=BytesIO(), mode="w") as f:
+ for pos in range(0, len(INPUT), 144):
+ self.assertEqual(f.tell(), pos)
+ f.write(INPUT[pos:pos+144])
+ self.assertEqual(f.tell(), len(INPUT))
+
+ def test_tell_bad_args(self):
+ f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ))
+ f.close()
+ self.assertRaises(ValueError, f.tell)
+
+
+class MiscellaneousTestCase(unittest.TestCase):
+
+ def test_is_check_supported(self):
+ # CHECK_NONE and CHECK_CRC32 should always be supported,
+ # regardless of the options liblzma was compiled with.
+ self.assertTrue(lzma.check_is_supported(lzma.CHECK_NONE))
+ self.assertTrue(lzma.check_is_supported(lzma.CHECK_CRC32))
+
+ # The .xz format spec cannot store check IDs above this value.
+ self.assertFalse(lzma.check_is_supported(lzma.CHECK_ID_MAX + 1))
+
+ # This value should not be a valid check ID.
+ self.assertFalse(lzma.check_is_supported(lzma.CHECK_UNKNOWN))
+
+
+# Test data:
+
+INPUT = b"""
+LAERTES
+
+ O, fear me not.
+ I stay too long: but here my father comes.
+
+ Enter POLONIUS
+
+ A double blessing is a double grace,
+ Occasion smiles upon a second leave.
+
+LORD POLONIUS
+
+ Yet here, Laertes! aboard, aboard, for shame!
+ The wind sits in the shoulder of your sail,
+ And you are stay'd for. There; my blessing with thee!
+ And these few precepts in thy memory
+ See thou character. Give thy thoughts no tongue,
+ Nor any unproportioned thought his act.
+ Be thou familiar, but by no means vulgar.
+ Those friends thou hast, and their adoption tried,
+ Grapple them to thy soul with hoops of steel;
+ But do not dull thy palm with entertainment
+ Of each new-hatch'd, unfledged comrade. Beware
+ Of entrance to a quarrel, but being in,
+ Bear't that the opposed may beware of thee.
+ Give every man thy ear, but few thy voice;
+ Take each man's censure, but reserve thy judgment.
+ Costly thy habit as thy purse can buy,
+ But not express'd in fancy; rich, not gaudy;
+ For the apparel oft proclaims the man,
+ And they in France of the best rank and station
+ Are of a most select and generous chief in that.
+ Neither a borrower nor a lender be;
+ For loan oft loses both itself and friend,
+ And borrowing dulls the edge of husbandry.
+ This above all: to thine ownself be true,
+ And it must follow, as the night the day,
+ Thou canst not then be false to any man.
+ Farewell: my blessing season this in thee!
+
+LAERTES
+
+ Most humbly do I take my leave, my lord.
+
+LORD POLONIUS
+
+ The time invites you; go; your servants tend.
+
+LAERTES
+
+ Farewell, Ophelia; and remember well
+ What I have said to you.
+
+OPHELIA
+
+ 'Tis in my memory lock'd,
+ And you yourself shall keep the key of it.
+
+LAERTES
+
+ Farewell.
+"""
+
+COMPRESSED_XZ = (
+ b"\xfd7zXZ\x00\x00\x04\xe6\xd6\xb4F\x02\x00!\x01\x16\x00\x00\x00t/\xe5\xa3"
+ b"\xe0\x07\x80\x03\xdf]\x00\x05\x14\x07bX\x19\xcd\xddn\x98\x15\xe4\xb4\x9d"
+ b"o\x1d\xc4\xe5\n\x03\xcc2h\xc7\\\x86\xff\xf8\xe2\xfc\xe7\xd9\xfe6\xb8("
+ b"\xa8wd\xc2\"u.n\x1e\xc3\xf2\x8e\x8d\x8f\x02\x17/\xa6=\xf0\xa2\xdf/M\x89"
+ b"\xbe\xde\xa7\x1cz\x18-]\xd5\xef\x13\x8frZ\x15\x80\x8c\xf8\x8do\xfa\x12"
+ b"\x9b#z/\xef\xf0\xfaF\x01\x82\xa3M\x8e\xa1t\xca6 BF$\xe5Q\xa4\x98\xee\xde"
+ b"l\xe8\x7f\xf0\x9d,bn\x0b\x13\xd4\xa8\x81\xe4N\xc8\x86\x153\xf5x2\xa2O"
+ b"\x13@Q\xa1\x00/\xa5\xd0O\x97\xdco\xae\xf7z\xc4\xcdS\xb6t<\x16\xf2\x9cI#"
+ b"\x89ud\xc66Y\xd9\xee\xe6\xce\x12]\xe5\xf0\xaa\x96-Pe\xade:\x04\t\x1b\xf7"
+ b"\xdb7\n\x86\x1fp\xc8J\xba\xf4\xf0V\xa9\xdc\xf0\x02%G\xf9\xdf=?\x15\x1b"
+ b"\xe1(\xce\x82=\xd6I\xac3\x12\x0cR\xb7\xae\r\xb1i\x03\x95\x01\xbd\xbe\xfa"
+ b"\x02s\x01P\x9d\x96X\xb12j\xc8L\xa8\x84b\xf6\xc3\xd4c-H\x93oJl\xd0iQ\xe4k"
+ b"\x84\x0b\xc1\xb7\xbc\xb1\x17\x88\xb1\xca?@\xf6\x07\xea\xe6x\xf1H12P\x0f"
+ b"\x8a\xc9\xeauw\xe3\xbe\xaai\xa9W\xd0\x80\xcd#cb5\x99\xd8]\xa9d\x0c\xbd"
+ b"\xa2\xdcWl\xedUG\xbf\x89yF\xf77\x81v\xbd5\x98\xbeh8\x18W\x08\xf0\x1b\x99"
+ b"5:\x1a?rD\x96\xa1\x04\x0f\xae\xba\x85\xeb\x9d5@\xf5\x83\xd37\x83\x8ac"
+ b"\x06\xd4\x97i\xcdt\x16S\x82k\xf6K\x01vy\x88\x91\x9b6T\xdae\r\xfd]:k\xbal"
+ b"\xa9\xbba\xc34\xf9r\xeb}r\xdb\xc7\xdb*\x8f\x03z\xdc8h\xcc\xc9\xd3\xbcl"
+ b"\xa5-\xcb\xeaK\xa2\xc5\x15\xc0\xe3\xc1\x86Z\xfb\xebL\xe13\xcf\x9c\xe3"
+ b"\x1d\xc9\xed\xc2\x06\xcc\xce!\x92\xe5\xfe\x9c^\xa59w \x9bP\xa3PK\x08d"
+ b"\xf9\xe2Z}\xa7\xbf\xed\xeb%$\x0c\x82\xb8/\xb0\x01\xa9&,\xf7qh{Q\x96)\xf2"
+ b"q\x96\xc3\x80\xb4\x12\xb0\xba\xe6o\xf4!\xb4[\xd4\x8aw\x10\xf7t\x0c\xb3"
+ b"\xd9\xd5\xc3`^\x81\x11??\\\xa4\x99\x85R\xd4\x8e\x83\xc9\x1eX\xbfa\xf1"
+ b"\xac\xb0\xea\xea\xd7\xd0\xab\x18\xe2\xf2\xed\xe1\xb7\xc9\x18\xcbS\xe4>"
+ b"\xc9\x95H\xe8\xcb\t\r%\xeb\xc7$.o\xf1\xf3R\x17\x1db\xbb\xd8U\xa5^\xccS"
+ b"\x16\x01\x87\xf3/\x93\xd1\xf0v\xc0r\xd7\xcc\xa2Gkz\xca\x80\x0e\xfd\xd0"
+ b"\x8b\xbb\xd2Ix\xb3\x1ey\xca-0\xe3z^\xd6\xd6\x8f_\xf1\x9dP\x9fi\xa7\xd1"
+ b"\xe8\x90\x84\xdc\xbf\xcdky\x8e\xdc\x81\x7f\xa3\xb2+\xbf\x04\xef\xd8\\"
+ b"\xc4\xdf\xe1\xb0\x01\xe9\x93\xe3Y\xf1\x1dY\xe8h\x81\xcf\xf1w\xcc\xb4\xef"
+ b" \x8b|\x04\xea\x83ej\xbe\x1f\xd4z\x9c`\xd3\x1a\x92A\x06\xe5\x8f\xa9\x13"
+ b"\t\x9e=\xfa\x1c\xe5_\x9f%v\x1bo\x11ZO\xd8\xf4\t\xddM\x16-\x04\xfc\x18<\""
+ b"CM\xddg~b\xf6\xef\x8e\x0c\xd0\xde|\xa0'\x8a\x0c\xd6x\xae!J\xa6F\x88\x15u"
+ b"\x008\x17\xbc7y\xb3\xd8u\xac_\x85\x8d\xe7\xc1@\x9c\xecqc\xa3#\xad\xf1"
+ b"\x935\xb5)_\r\xec3]\x0fo]5\xd0my\x07\x9b\xee\x81\xb5\x0f\xcfK+\x00\xc0"
+ b"\xe4b\x10\xe4\x0c\x1a \x9b\xe0\x97t\xf6\xa1\x9e\x850\xba\x0c\x9a\x8d\xc8"
+ b"\x8f\x07\xd7\xae\xc8\xf9+i\xdc\xb9k\xb0>f\x19\xb8\r\xa8\xf8\x1f$\xa5{p"
+ b"\xc6\x880\xce\xdb\xcf\xca_\x86\xac\x88h6\x8bZ%'\xd0\n\xbf\x0f\x9c\"\xba"
+ b"\xe5\x86\x9f\x0f7X=mNX[\xcc\x19FU\xc9\x860\xbc\x90a+* \xae_$\x03\x1e\xd3"
+ b"\xcd_\xa0\x9c\xde\xaf46q\xa5\xc9\x92\xd7\xca\xe3`\x9d\x85}\xb4\xff\xb3"
+ b"\x83\xfb\xb6\xca\xae`\x0bw\x7f\xfc\xd8\xacVe\x19\xc8\x17\x0bZ\xad\x88"
+ b"\xeb#\x97\x03\x13\xb1d\x0f{\x0c\x04w\x07\r\x97\xbd\xd6\xc1\xc3B:\x95\x08"
+ b"^\x10V\xaeaH\x02\xd9\xe3\n\\\x01X\xf6\x9c\x8a\x06u#%\xbe*\xa1\x18v\x85"
+ b"\xec!\t4\x00\x00\x00\x00Vj?uLU\xf3\xa6\x00\x01\xfb\x07\x81\x0f\x00\x00tw"
+ b"\x99P\xb1\xc4g\xfb\x02\x00\x00\x00\x00\x04YZ"
+)
+
+COMPRESSED_ALONE = (
+ b"]\x00\x00\x80\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x05\x14\x07bX\x19"
+ b"\xcd\xddn\x98\x15\xe4\xb4\x9do\x1d\xc4\xe5\n\x03\xcc2h\xc7\\\x86\xff\xf8"
+ b"\xe2\xfc\xe7\xd9\xfe6\xb8(\xa8wd\xc2\"u.n\x1e\xc3\xf2\x8e\x8d\x8f\x02"
+ b"\x17/\xa6=\xf0\xa2\xdf/M\x89\xbe\xde\xa7\x1cz\x18-]\xd5\xef\x13\x8frZ"
+ b"\x15\x80\x8c\xf8\x8do\xfa\x12\x9b#z/\xef\xf0\xfaF\x01\x82\xa3M\x8e\xa1t"
+ b"\xca6 BF$\xe5Q\xa4\x98\xee\xdel\xe8\x7f\xf0\x9d,bn\x0b\x13\xd4\xa8\x81"
+ b"\xe4N\xc8\x86\x153\xf5x2\xa2O\x13@Q\xa1\x00/\xa5\xd0O\x97\xdco\xae\xf7z"
+ b"\xc4\xcdS\xb6t<\x16\xf2\x9cI#\x89ud\xc66Y\xd9\xee\xe6\xce\x12]\xe5\xf0"
+ b"\xaa\x96-Pe\xade:\x04\t\x1b\xf7\xdb7\n\x86\x1fp\xc8J\xba\xf4\xf0V\xa9"
+ b"\xdc\xf0\x02%G\xf9\xdf=?\x15\x1b\xe1(\xce\x82=\xd6I\xac3\x12\x0cR\xb7"
+ b"\xae\r\xb1i\x03\x95\x01\xbd\xbe\xfa\x02s\x01P\x9d\x96X\xb12j\xc8L\xa8"
+ b"\x84b\xf8\x1epl\xeajr\xd1=\t\x03\xdd\x13\x1b3!E\xf9vV\xdaF\xf3\xd7\xb4"
+ b"\x0c\xa9P~\xec\xdeE\xe37\xf6\x1d\xc6\xbb\xddc%\xb6\x0fI\x07\xf0;\xaf\xe7"
+ b"\xa0\x8b\xa7Z\x99(\xe9\xe2\xf0o\x18>`\xe1\xaa\xa8\xd9\xa1\xb2}\xe7\x8d"
+ b"\x834T\xb6\xef\xc1\xde\xe3\x98\xbcD\x03MA@\xd8\xed\xdc\xc8\x93\x03\x1a"
+ b"\x93\x0b\x7f\x94\x12\x0b\x02Sa\x18\xc9\xc5\x9bTJE}\xf6\xc8g\x17#ZV\x01"
+ b"\xc9\x9dc\x83\x0e>0\x16\x90S\xb8/\x03y_\x18\xfa(\xd7\x0br\xa2\xb0\xba?"
+ b"\x8c\xe6\x83@\x84\xdf\x02:\xc5z\x9e\xa6\x84\xc9\xf5BeyX\x83\x1a\xf1 :\t"
+ b"\xf7\x19\xfexD\\&G\xf3\x85Y\xa2J\xf9\x0bv{\x89\xf6\xe7)A\xaf\x04o\x00"
+ b"\x075\xd3\xe0\x7f\x97\x98F\x0f?v\x93\xedVtTf\xb5\x97\x83\xed\x19\xd7\x1a"
+ b"'k\xd7\xd9\xc5\\Y\xd1\xdc\x07\x15|w\xbc\xacd\x87\x08d\xec\xa7\xf6\x82"
+ b"\xfc\xb3\x93\xeb\xb9 \x8d\xbc ,\xb3X\xb0\xd2s\xd7\xd1\xffv\x05\xdf}\xa2"
+ b"\x96\xfb%\n\xdf\xa2\x7f\x08.\xa16\n\xe0\x19\x93\x7fh\n\x1c\x8c\x0f \x11"
+ b"\xc6Bl\x95\x19U}\xe4s\xb5\x10H\xea\x86pB\xe88\x95\xbe\x8cZ\xdb\xe4\x94A"
+ b"\x92\xb9;z\xaa\xa7{\x1c5!\xc0\xaf\xc1A\xf9\xda\xf0$\xb0\x02qg\xc8\xc7/|"
+ b"\xafr\x99^\x91\x88\xbf\x03\xd9=\xd7n\xda6{>8\n\xc7:\xa9'\xba.\x0b\xe2"
+ b"\xb5\x1d\x0e\n\x9a\x8e\x06\x8f:\xdd\x82'[\xc3\"wD$\xa7w\xecq\x8c,1\x93"
+ b"\xd0,\xae2w\x93\x12$Jd\x19mg\x02\x93\x9cA\x95\x9d&\xca8i\x9c\xb0;\xe7NQ"
+ b"\x1frh\x8beL;\xb0m\xee\x07Q\x9b\xc6\xd8\x03\xb5\xdeN\xd4\xfe\x98\xd0\xdc"
+ b"\x1a[\x04\xde\x1a\xf6\x91j\xf8EOli\x8eB^\x1d\x82\x07\xb2\xb5R]\xb7\xd7"
+ b"\xe9\xa6\xc3.\xfb\xf0-\xb4e\x9b\xde\x03\x88\xc6\xc1iN\x0e\x84wbQ\xdf~"
+ b"\xe9\xa4\x884\x96kM\xbc)T\xf3\x89\x97\x0f\x143\xe7)\xa0\xb3B\x00\xa8\xaf"
+ b"\x82^\xcb\xc7..\xdb\xc7\t\x9dH\xee5\xe9#\xe6NV\x94\xcb$Kk\xe3\x7f\r\xe3t"
+ b"\x12\xcf'\xefR\x8b\xf42\xcf-LH\xac\xe5\x1f0~?SO\xeb\xc1E\x1a\x1c]\xf2"
+ b"\xc4<\x11\x02\x10Z0a*?\xe4r\xff\xfb\xff\xf6\x14nG\xead^\xd6\xef8\xb6uEI"
+ b"\x99\nV\xe2\xb3\x95\x8e\x83\xf6i!\xb5&1F\xb1DP\xf4 SO3D!w\x99_G\x7f+\x90"
+ b".\xab\xbb]\x91>\xc9#h;\x0f5J\x91K\xf4^-[\x9e\x8a\\\x94\xca\xaf\xf6\x19"
+ b"\xd4\xa1\x9b\xc4\xb8p\xa1\xae\x15\xe9r\x84\xe0\xcar.l []\x8b\xaf+0\xf2g"
+ b"\x01aKY\xdfI\xcf,\n\xe8\xf0\xe7V\x80_#\xb2\xf2\xa9\x06\x8c>w\xe2W,\xf4"
+ b"\x8c\r\xf963\xf5J\xcc2\x05=kT\xeaUti\xe5_\xce\x1b\xfa\x8dl\x02h\xef\xa8"
+ b"\xfbf\x7f\xff\xf0\x19\xeax"
+)
+
+FILTERS_RAW_1 = [{"id": lzma.FILTER_LZMA2, "preset": 3}]
+COMPRESSED_RAW_1 = (
+ b"\xe0\x07\x80\x03\xfd]\x00\x05\x14\x07bX\x19\xcd\xddn\x96cyq\xa1\xdd\xee"
+ b"\xf8\xfam\xe3'\x88\xd3\xff\xe4\x9e \xceQ\x91\xa4\x14I\xf6\xb9\x9dVL8\x15"
+ b"_\x0e\x12\xc3\xeb\xbc\xa5\xcd\nW\x1d$=R;\x1d\xf8k8\t\xb1{\xd4\xc5+\x9d"
+ b"\x87c\xe5\xef\x98\xb4\xd7S3\xcd\xcc\xd2\xed\xa4\x0em\xe5\xf4\xdd\xd0b"
+ b"\xbe4*\xaa\x0b\xc5\x08\x10\x85+\x81.\x17\xaf9\xc9b\xeaZrA\xe20\x7fs\"r"
+ b"\xdaG\x81\xde\x90cu\xa5\xdb\xa9.A\x08l\xb0<\xf6\x03\xddOi\xd0\xc5\xb4"
+ b"\xec\xecg4t6\"\xa6\xb8o\xb5?\x18^}\xb6}\x03[:\xeb\x03\xa9\n[\x89l\x19g"
+ b"\x16\xc82\xed\x0b\xfb\x86n\xa2\x857@\x93\xcd6T\xc3u\xb0\t\xf9\x1b\x918"
+ b"\xfc[\x1b\x1e4\xb3\x14\x06PCV\xa8\"\xf5\x81x~\xe9\xb5N\x9cK\x9f\xc6\xc3%"
+ b"\xc8k:{6\xe7\xf7\xbd\x05\x02\xb4\xc4\xc3\xd3\xfd\xc3\xa8\\\xfc@\xb1F_"
+ b"\xc8\x90\xd9sU\x98\xad8\x05\x07\xde7J\x8bM\xd0\xb3;X\xec\x87\xef\xae\xb3"
+ b"eO,\xb1z,d\x11y\xeejlB\x02\x1d\xf28\x1f#\x896\xce\x0b\xf0\xf5\xa9PK\x0f"
+ b"\xb3\x13P\xd8\x88\xd2\xa1\x08\x04C?\xdb\x94_\x9a\"\xe9\xe3e\x1d\xde\x9b"
+ b"\xa1\xe8>H\x98\x10;\xc5\x03#\xb5\x9d4\x01\xe7\xc5\xba%v\xa49\x97A\xe0\""
+ b"\x8c\xc22\xe3i\xc1\x9d\xab3\xdf\xbe\xfdDm7\x1b\x9d\xab\xb5\x15o:J\x92"
+ b"\xdb\x816\x17\xc2O\x99\x1b\x0e\x8d\xf3\tQ\xed\x8e\x95S/\x16M\xb2S\x04"
+ b"\x0f\xc3J\xc6\xc7\xe4\xcb\xc5\xf4\xe7d\x14\xe4=^B\xfb\xd3E\xd3\x1e\xcd"
+ b"\x91\xa5\xd0G\x8f.\xf6\xf9\x0bb&\xd9\x9f\xc2\xfdj\xa2\x9e\xc4\\\x0e\x1dC"
+ b"v\xe8\xd2\x8a?^H\xec\xae\xeb>\xfe\xb8\xab\xd4IqY\x8c\xd4K7\x11\xf4D\xd0W"
+ b"\xa5\xbe\xeaO\xbf\xd0\x04\xfdl\x10\xae5\xd4U\x19\x06\xf9{\xaa\xe0\x81"
+ b"\x0f\xcf\xa3k{\x95\xbd\x19\xa2\xf8\xe4\xa3\x08O*\xf1\xf1B-\xc7(\x0eR\xfd"
+ b"@E\x9f\xd3\x1e:\xfdV\xb7\x04Y\x94\xeb]\x83\xc4\xa5\xd7\xc0gX\x98\xcf\x0f"
+ b"\xcd3\x00]n\x17\xec\xbd\xa3Y\x86\xc5\xf3u\xf6*\xbdT\xedA$A\xd9A\xe7\x98"
+ b"\xef\x14\x02\x9a\xfdiw\xec\xa0\x87\x11\xd9%\xc5\xeb\x8a=\xae\xc0\xc4\xc6"
+ b"D\x80\x8f\xa8\xd1\xbbq\xb2\xc0\xa0\xf5Cqp\xeeL\xe3\xe5\xdc \x84\"\xe9"
+ b"\x80t\x83\x05\xba\xf1\xc5~\x93\xc9\xf0\x01c\xceix\x9d\xed\xc5)l\x16)\xd1"
+ b"\x03@l\x04\x7f\x87\xa5yn\x1b\x01D\xaa:\xd2\x96\xb4\xb3?\xb0\xf9\xce\x07"
+ b"\xeb\x81\x00\xe4\xc3\xf5%_\xae\xd4\xf9\xeb\xe2\rh\xb2#\xd67Q\x16D\x82hn"
+ b"\xd1\xa3_?q\xf0\xe2\xac\xf317\x9e\xd0_\x83|\xf1\xca\xb7\x95S\xabW\x12"
+ b"\xff\xddt\xf69L\x01\xf2|\xdaW\xda\xees\x98L\x18\xb8_\xe8$\x82\xea\xd6"
+ b"\xd1F\xd4\x0b\xcdk\x01vf\x88h\xc3\xae\xb91\xc7Q\x9f\xa5G\xd9\xcc\x1f\xe3"
+ b"5\xb1\xdcy\x7fI\x8bcw\x8e\x10rIp\x02:\x19p_\xc8v\xcea\"\xc1\xd9\x91\x03"
+ b"\xbfe\xbe\xa6\xb3\xa8\x14\x18\xc3\xabH*m}\xc2\xc1\x9a}>l%\xce\x84\x99"
+ b"\xb3d\xaf\xd3\x82\x15\xdf\xc1\xfc5fOg\x9b\xfc\x8e^&\t@\xce\x9f\x06J\xb8"
+ b"\xb5\x86\x1d\xda{\x9f\xae\xb0\xff\x02\x81r\x92z\x8cM\xb7ho\xc9^\x9c\xb6"
+ b"\x9c\xae\xd1\xc9\xf4\xdfU7\xd6\\!\xea\x0b\x94k\xb9Ud~\x98\xe7\x86\x8az"
+ b"\x10;\xe3\x1d\xe5PG\xf8\xa4\x12\x05w\x98^\xc4\xb1\xbb\xfb\xcf\xe0\x7f"
+ b"\x033Sf\x0c \xb1\xf6@\x94\xe5\xa3\xb2\xa7\x10\x9a\xc0\x14\xc3s\xb5xRD"
+ b"\xf4`W\xd9\xe5\xd3\xcf\x91\rTZ-X\xbe\xbf\xb5\xe2\xee|\x1a\xbf\xfb\x08"
+ b"\x91\xe1\xfc\x9a\x18\xa3\x8b\xd6^\x89\xf5[\xef\x87\xd1\x06\x1c7\xd6\xa2"
+ b"\t\tQ5/@S\xc05\xd2VhAK\x03VC\r\x9b\x93\xd6M\xf1xO\xaaO\xed\xb9<\x0c\xdae"
+ b"*\xd0\x07Hk6\x9fG+\xa1)\xcd\x9cl\x87\xdb\xe1\xe7\xefK}\x875\xab\xa0\x19u"
+ b"\xf6*F\xb32\x00\x00\x00"
+)
+
+FILTERS_RAW_2 = [{"id": lzma.FILTER_DELTA, "dist": 2},
+ {"id": lzma.FILTER_LZMA2,
+ "preset": lzma.PRESET_DEFAULT | lzma.PRESET_EXTREME}]
+COMPRESSED_RAW_2 = (
+ b"\xe0\x07\x80\x05\x91]\x00\x05\x14\x06-\xd4\xa8d?\xef\xbe\xafH\xee\x042"
+ b"\xcb.\xb5g\x8f\xfb\x14\xab\xa5\x9f\x025z\xa4\xdd\xd8\t[}W\xf8\x0c\x1dmH"
+ b"\xfa\x05\xfcg\xba\xe5\x01Q\x0b\x83R\xb6A\x885\xc0\xba\xee\n\x1cv~\xde:o"
+ b"\x06:J\xa7\x11Cc\xea\xf7\xe5*o\xf7\x83\\l\xbdE\x19\x1f\r\xa8\x10\xb42"
+ b"\x0caU{\xd7\xb8w\xdc\xbe\x1b\xfc8\xb4\xcc\xd38\\\xf6\x13\xf6\xe7\x98\xfa"
+ b"\xc7[\x17_9\x86%\xa8\xf8\xaa\xb8\x8dfs#\x1e=\xed<\x92\x10\\t\xff\x86\xfb"
+ b"=\x9e7\x18\x1dft\\\xb5\x01\x95Q\xc5\x19\xb38\xe0\xd4\xaa\x07\xc3\x7f\xd8"
+ b"\xa2\x00>-\xd3\x8e\xa1#\xfa\x83ArAm\xdbJ~\x93\xa3B\x82\xe0\xc7\xcc(\x08`"
+ b"WK\xad\x1b\x94kaj\x04 \xde\xfc\xe1\xed\xb0\x82\x91\xefS\x84%\x86\xfbi"
+ b"\x99X\xf1B\xe7\x90;E\xfde\x98\xda\xca\xd6T\xb4bg\xa4\n\x9aj\xd1\x83\x9e]"
+ b"\"\x7fM\xb5\x0fr\xd2\\\xa5j~P\x10GH\xbfN*Z\x10.\x81\tpE\x8a\x08\xbe1\xbd"
+ b"\xcd\xa9\xe1\x8d\x1f\x04\xf9\x0eH\xb9\xae\xd6\xc3\xc1\xa5\xa9\x95P\xdc~"
+ b"\xff\x01\x930\xa9\x04\xf6\x03\xfe\xb5JK\xc3]\xdd9\xb1\xd3\xd7F\xf5\xd1"
+ b"\x1e\xa0\x1c_\xed[\x0c\xae\xd4\x8b\x946\xeb\xbf\xbb\xe3$kS{\xb5\x80,f:Sj"
+ b"\x0f\x08z\x1c\xf5\xe8\xe6\xae\x98\xb0Q~r\x0f\xb0\x05?\xb6\x90\x19\x02&"
+ b"\xcb\x80\t\xc4\xea\x9c|x\xce\x10\x9c\xc5|\xcbdhh+\x0c'\xc5\x81\xc33\xb5"
+ b"\x14q\xd6\xc5\xe3`Z#\xdc\x8a\xab\xdd\xea\x08\xc2I\xe7\x02l{\xec\x196\x06"
+ b"\x91\x8d\xdc\xd5\xb3x\xe1hz%\xd1\xf8\xa5\xdd\x98!\x8c\x1c\xc1\x17RUa\xbb"
+ b"\x95\x0f\xe4X\xea1\x0c\xf1=R\xbe\xc60\xe3\xa4\x9a\x90bd\x97$]B\x01\xdd"
+ b"\x1f\xe3h2c\x1e\xa0L`4\xc6x\xa3Z\x8a\r\x14]T^\xd8\x89\x1b\x92\r;\xedY"
+ b"\x0c\xef\x8d9z\xf3o\xb6)f\xa9]$n\rp\x93\xd0\x10\xa4\x08\xb8\xb2\x8b\xb6"
+ b"\x8f\x80\xae;\xdcQ\xf1\xfa\x9a\x06\x8e\xa5\x0e\x8cK\x9c @\xaa:UcX\n!\xc6"
+ b"\x02\x12\xcb\x1b\"=\x16.\x1f\x176\xf2g=\xe1Wn\xe9\xe1\xd4\xf1O\xad\x15"
+ b"\x86\xe9\xa3T\xaf\xa9\xd7D\xb5\xd1W3pnt\x11\xc7VOj\xb7M\xc4i\xa1\xf1$3"
+ b"\xbb\xdc\x8af\xb0\xc5Y\r\xd1\xfb\xf2\xe7K\xe6\xc5hwO\xfe\x8c2^&\x07\xd5"
+ b"\x1fV\x19\xfd\r\x14\xd2i=yZ\xe6o\xaf\xc6\xb6\x92\x9d\xc4\r\xb3\xafw\xac%"
+ b"\xcfc\x1a\xf1`]\xf2\x1a\x9e\x808\xedm\xedQ\xb2\xfe\xe4h`[q\xae\xe0\x0f"
+ b"\xba0g\xb6\"N\xc3\xfb\xcfR\x11\xc5\x18)(\xc40\\\xa3\x02\xd9G!\xce\x1b"
+ b"\xc1\x96x\xb5\xc8z\x1f\x01\xb4\xaf\xde\xc2\xcd\x07\xe7H\xb3y\xa8M\n\\A\t"
+ b"ar\xddM\x8b\x9a\xea\x84\x9b!\xf1\x8d\xb1\xf1~\x1e\r\xa5H\xba\xf1\x84o"
+ b"\xda\x87\x01h\xe9\xa2\xbe\xbeqN\x9d\x84\x0b!WG\xda\xa1\xa5A\xb7\xc7`j"
+ b"\x15\xf2\xe9\xdd?\x015B\xd2~E\x06\x11\xe0\x91!\x05^\x80\xdd\xa8y\x15}"
+ b"\xa1)\xb1)\x81\x18\xf4\xf4\xf8\xc0\xefD\xe3\xdb2f\x1e\x12\xabu\xc9\x97"
+ b"\xcd\x1e\xa7\x0c\x02x4_6\x03\xc4$t\xf39\x94\x1d=\xcb\xbfv\\\xf5\xa3\x1d"
+ b"\x9d8jk\x95\x13)ff\xf9n\xc4\xa9\xe3\x01\xb8\xda\xfb\xab\xdfM\x99\xfb\x05"
+ b"\xe0\xe9\xb0I\xf4E\xab\xe2\x15\xa3\x035\xe7\xdeT\xee\x82p\xb4\x88\xd3"
+ b"\x893\x9c/\xc0\xd6\x8fou;\xf6\x95PR\xa9\xb2\xc1\xefFj\xe2\xa7$\xf7h\xf1"
+ b"\xdfK(\xc9c\xba7\xe8\xe3)\xdd\xb2,\x83\xfb\x84\x18.y\x18Qi\x88\xf8`h-"
+ b"\xef\xd5\xed\x8c\t\xd8\xc3^\x0f\x00\xb7\xd0[!\xafM\x9b\xd7.\x07\xd8\xfb"
+ b"\xd9\xe2-S+\xaa8,\xa0\x03\x1b \xea\xa8\x00\xc3\xab~\xd0$e\xa5\x7f\xf7"
+ b"\x95P]\x12\x19i\xd9\x7fo\x0c\xd8g^\rE\xa5\x80\x18\xc5\x01\x80\xaek`\xff~"
+ b"\xb6y\xe7+\xe5\x11^D\xa7\x85\x18\"!\xd6\xd2\xa7\xf4\x1eT\xdb\x02\xe15"
+ b"\x02Y\xbc\x174Z\xe7\x9cH\x1c\xbf\x0f\xc6\xe9f]\xcf\x8cx\xbc\xe5\x15\x94"
+ b"\xfc3\xbc\xa7TUH\xf1\x84\x1b\xf7\xa9y\xc07\x84\xf8X\xd8\xef\xfc \x1c\xd8"
+ b"( /\xf2\xb7\xec\xc1\\\x8c\xf6\x95\xa1\x03J\x83vP8\xe1\xe3\xbb~\xc24kA"
+ b"\x98y\xa1\xf2P\xe9\x9d\xc9J\xf8N\x99\xb4\xceaO\xde\x16\x1e\xc2\x19\xa7"
+ b"\x03\xd2\xe0\x8f:\x15\xf3\x84\x9e\xee\xe6e\xb8\x02q\xc7AC\x1emw\xfd\t"
+ b"\x9a\x1eu\xc1\xa9\xcaCwUP\x00\xa5\xf78L4w!\x91L2 \x87\xd0\xf2\x06\x81j"
+ b"\x80;\x03V\x06\x87\x92\xcb\x90lv@E\x8d\x8d\xa5\xa6\xe7Z[\xdf\xd6E\x03`>"
+ b"\x8f\xde\xa1bZ\x84\xd0\xa9`\x05\x0e{\x80;\xe3\xbef\x8d\x1d\xebk1.\xe3"
+ b"\xe9N\x15\xf7\xd4(\xfa\xbb\x15\xbdu\xf7\x7f\x86\xae!\x03L\x1d\xb5\xc1"
+ b"\xb9\x11\xdb\xd0\x93\xe4\x02\xe1\xd2\xcbBjc_\xe8}d\xdb\xc3\xa0Y\xbe\xc9/"
+ b"\x95\x01\xa3,\xe6bl@\x01\xdbp\xc2\xce\x14\x168\xc2q\xe3uH\x89X\xa4\xa9"
+ b"\x19\x1d\xc1}\x7fOX\x19\x9f\xdd\xbe\x85\x83\xff\x96\x1ee\x82O`CF=K\xeb$I"
+ b"\x17_\xefX\x8bJ'v\xde\x1f+\xd9.v\xf8Tv\x17\xf2\x9f5\x19\xe1\xb9\x91\xa8S"
+ b"\x86\xbd\x1a\"(\xa5x\x8dC\x03X\x81\x91\xa8\x11\xc4pS\x13\xbc\xf2'J\xae!"
+ b"\xef\xef\x84G\t\x8d\xc4\x10\x132\x00oS\x9e\xe0\xe4d\x8f\xb8y\xac\xa6\x9f"
+ b",\xb8f\x87\r\xdf\x9eE\x0f\xe1\xd0\\L\x00\xb2\xe1h\x84\xef}\x98\xa8\x11"
+ b"\xccW#\\\x83\x7fo\xbbz\x8f\x00"
+)
+
+FILTERS_RAW_3 = [{"id": lzma.FILTER_IA64, "start_offset": 0x100},
+ {"id": lzma.FILTER_LZMA2}]
+COMPRESSED_RAW_3 = (
+ b"\xe0\x07\x80\x03\xdf]\x00\x05\x14\x07bX\x19\xcd\xddn\x98\x15\xe4\xb4\x9d"
+ b"o\x1d\xc4\xe5\n\x03\xcc2h\xc7\\\x86\xff\xf8\xe2\xfc\xe7\xd9\xfe6\xb8("
+ b"\xa8wd\xc2\"u.n\x1e\xc3\xf2\x8e\x8d\x8f\x02\x17/\xa6=\xf0\xa2\xdf/M\x89"
+ b"\xbe\xde\xa7\x1cz\x18-]\xd5\xef\x13\x8frZ\x15\x80\x8c\xf8\x8do\xfa\x12"
+ b"\x9b#z/\xef\xf0\xfaF\x01\x82\xa3M\x8e\xa1t\xca6 BF$\xe5Q\xa4\x98\xee\xde"
+ b"l\xe8\x7f\xf0\x9d,bn\x0b\x13\xd4\xa8\x81\xe4N\xc8\x86\x153\xf5x2\xa2O"
+ b"\x13@Q\xa1\x00/\xa5\xd0O\x97\xdco\xae\xf7z\xc4\xcdS\xb6t<\x16\xf2\x9cI#"
+ b"\x89ud\xc66Y\xd9\xee\xe6\xce\x12]\xe5\xf0\xaa\x96-Pe\xade:\x04\t\x1b\xf7"
+ b"\xdb7\n\x86\x1fp\xc8J\xba\xf4\xf0V\xa9\xdc\xf0\x02%G\xf9\xdf=?\x15\x1b"
+ b"\xe1(\xce\x82=\xd6I\xac3\x12\x0cR\xb7\xae\r\xb1i\x03\x95\x01\xbd\xbe\xfa"
+ b"\x02s\x01P\x9d\x96X\xb12j\xc8L\xa8\x84b\xf6\xc3\xd4c-H\x93oJl\xd0iQ\xe4k"
+ b"\x84\x0b\xc1\xb7\xbc\xb1\x17\x88\xb1\xca?@\xf6\x07\xea\xe6x\xf1H12P\x0f"
+ b"\x8a\xc9\xeauw\xe3\xbe\xaai\xa9W\xd0\x80\xcd#cb5\x99\xd8]\xa9d\x0c\xbd"
+ b"\xa2\xdcWl\xedUG\xbf\x89yF\xf77\x81v\xbd5\x98\xbeh8\x18W\x08\xf0\x1b\x99"
+ b"5:\x1a?rD\x96\xa1\x04\x0f\xae\xba\x85\xeb\x9d5@\xf5\x83\xd37\x83\x8ac"
+ b"\x06\xd4\x97i\xcdt\x16S\x82k\xf6K\x01vy\x88\x91\x9b6T\xdae\r\xfd]:k\xbal"
+ b"\xa9\xbba\xc34\xf9r\xeb}r\xdb\xc7\xdb*\x8f\x03z\xdc8h\xcc\xc9\xd3\xbcl"
+ b"\xa5-\xcb\xeaK\xa2\xc5\x15\xc0\xe3\xc1\x86Z\xfb\xebL\xe13\xcf\x9c\xe3"
+ b"\x1d\xc9\xed\xc2\x06\xcc\xce!\x92\xe5\xfe\x9c^\xa59w \x9bP\xa3PK\x08d"
+ b"\xf9\xe2Z}\xa7\xbf\xed\xeb%$\x0c\x82\xb8/\xb0\x01\xa9&,\xf7qh{Q\x96)\xf2"
+ b"q\x96\xc3\x80\xb4\x12\xb0\xba\xe6o\xf4!\xb4[\xd4\x8aw\x10\xf7t\x0c\xb3"
+ b"\xd9\xd5\xc3`^\x81\x11??\\\xa4\x99\x85R\xd4\x8e\x83\xc9\x1eX\xbfa\xf1"
+ b"\xac\xb0\xea\xea\xd7\xd0\xab\x18\xe2\xf2\xed\xe1\xb7\xc9\x18\xcbS\xe4>"
+ b"\xc9\x95H\xe8\xcb\t\r%\xeb\xc7$.o\xf1\xf3R\x17\x1db\xbb\xd8U\xa5^\xccS"
+ b"\x16\x01\x87\xf3/\x93\xd1\xf0v\xc0r\xd7\xcc\xa2Gkz\xca\x80\x0e\xfd\xd0"
+ b"\x8b\xbb\xd2Ix\xb3\x1ey\xca-0\xe3z^\xd6\xd6\x8f_\xf1\x9dP\x9fi\xa7\xd1"
+ b"\xe8\x90\x84\xdc\xbf\xcdky\x8e\xdc\x81\x7f\xa3\xb2+\xbf\x04\xef\xd8\\"
+ b"\xc4\xdf\xe1\xb0\x01\xe9\x93\xe3Y\xf1\x1dY\xe8h\x81\xcf\xf1w\xcc\xb4\xef"
+ b" \x8b|\x04\xea\x83ej\xbe\x1f\xd4z\x9c`\xd3\x1a\x92A\x06\xe5\x8f\xa9\x13"
+ b"\t\x9e=\xfa\x1c\xe5_\x9f%v\x1bo\x11ZO\xd8\xf4\t\xddM\x16-\x04\xfc\x18<\""
+ b"CM\xddg~b\xf6\xef\x8e\x0c\xd0\xde|\xa0'\x8a\x0c\xd6x\xae!J\xa6F\x88\x15u"
+ b"\x008\x17\xbc7y\xb3\xd8u\xac_\x85\x8d\xe7\xc1@\x9c\xecqc\xa3#\xad\xf1"
+ b"\x935\xb5)_\r\xec3]\x0fo]5\xd0my\x07\x9b\xee\x81\xb5\x0f\xcfK+\x00\xc0"
+ b"\xe4b\x10\xe4\x0c\x1a \x9b\xe0\x97t\xf6\xa1\x9e\x850\xba\x0c\x9a\x8d\xc8"
+ b"\x8f\x07\xd7\xae\xc8\xf9+i\xdc\xb9k\xb0>f\x19\xb8\r\xa8\xf8\x1f$\xa5{p"
+ b"\xc6\x880\xce\xdb\xcf\xca_\x86\xac\x88h6\x8bZ%'\xd0\n\xbf\x0f\x9c\"\xba"
+ b"\xe5\x86\x9f\x0f7X=mNX[\xcc\x19FU\xc9\x860\xbc\x90a+* \xae_$\x03\x1e\xd3"
+ b"\xcd_\xa0\x9c\xde\xaf46q\xa5\xc9\x92\xd7\xca\xe3`\x9d\x85}\xb4\xff\xb3"
+ b"\x83\xfb\xb6\xca\xae`\x0bw\x7f\xfc\xd8\xacVe\x19\xc8\x17\x0bZ\xad\x88"
+ b"\xeb#\x97\x03\x13\xb1d\x0f{\x0c\x04w\x07\r\x97\xbd\xd6\xc1\xc3B:\x95\x08"
+ b"^\x10V\xaeaH\x02\xd9\xe3\n\\\x01X\xf6\x9c\x8a\x06u#%\xbe*\xa1\x18v\x85"
+ b"\xec!\t4\x00\x00\x00"
+)
+
+FILTERS_RAW_4 = [{"id": lzma.FILTER_DELTA, "dist": 4},
+ {"id": lzma.FILTER_X86, "start_offset": 0x40},
+ {"id": lzma.FILTER_LZMA2, "preset": 4, "lc": 2}]
+COMPRESSED_RAW_4 = (
+ b"\xe0\x07\x80\x06\x0e\\\x00\x05\x14\x07bW\xaah\xdd\x10\xdc'\xd6\x90,\xc6v"
+ b"Jq \x14l\xb7\x83xB\x0b\x97f=&fx\xba\n>Tn\xbf\x8f\xfb\x1dF\xca\xc3v_\xca?"
+ b"\xfbV<\x92#\xd4w\xa6\x8a\xeb\xf6\x03\xc0\x01\x94\xd8\x9e\x13\x12\x98\xd1"
+ b"*\xfa]c\xe8\x1e~\xaf\xb5]Eg\xfb\x9e\x01\"8\xb2\x90\x06=~\xe9\x91W\xcd"
+ b"\xecD\x12\xc7\xfa\xe1\x91\x06\xc7\x99\xb9\xe3\x901\x87\x19u\x0f\x869\xff"
+ b"\xc1\xb0hw|\xb0\xdcl\xcck\xb16o7\x85\xee{Y_b\xbf\xbc$\xf3=\x8d\x8bw\xe5Z"
+ b"\x08@\xc4kmE\xad\xfb\xf6*\xd8\xad\xa1\xfb\xc5{\xdej,)\x1emB\x1f<\xaeca"
+ b"\x80(\xee\x07 \xdf\xe9\xf8\xeb\x0e-\x97\x86\x90c\xf9\xea'B\xf7`\xd7\xb0"
+ b"\x92\xbd\xa0\x82]\xbd\x0e\x0eB\x19\xdc\x96\xc6\x19\xd86D\xf0\xd5\x831"
+ b"\x03\xb7\x1c\xf7&5\x1a\x8f PZ&j\xf8\x98\x1bo\xcc\x86\x9bS\xd3\xa5\xcdu"
+ b"\xf9$\xcc\x97o\xe5V~\xfb\x97\xb5\x0b\x17\x9c\xfdxW\x10\xfep4\x80\xdaHDY"
+ b"\xfa)\xfet\xb5\"\xd4\xd3F\x81\xf4\x13\x1f\xec\xdf\xa5\x13\xfc\"\x91x\xb7"
+ b"\x99\xce\xc8\x92\n\xeb[\x10l*Y\xd8\xb1@\x06\xc8o\x8d7r\xebu\xfd5\x0e\x7f"
+ b"\xf1$U{\t}\x1fQ\xcfxN\x9d\x9fXX\xe9`\x83\xc1\x06\xf4\x87v-f\x11\xdb/\\"
+ b"\x06\xff\xd7)B\xf3g\x06\x88#2\x1eB244\x7f4q\t\xc893?mPX\x95\xa6a\xfb)d"
+ b"\x9b\xfc\x98\x9aj\x04\xae\x9b\x9d\x19w\xba\xf92\xfaA\x11\\\x17\x97C3\xa4"
+ b"\xbc!\x88\xcdo[\xec:\x030\x91.\x85\xe0@\\4\x16\x12\x9d\xcaJv\x97\xb04"
+ b"\xack\xcbkf\xa3ss\xfc\x16^\x8ce\x85a\xa5=&\xecr\xb3p\xd1E\xd5\x80y\xc7"
+ b"\xda\xf6\xfek\xbcT\xbfH\xee\x15o\xc5\x8c\x830\xec\x1d\x01\xae\x0c-e\\"
+ b"\x91\x90\x94\xb2\xf8\x88\x91\xe8\x0b\xae\xa7>\x98\xf6\x9ck\xd2\xc6\x08"
+ b"\xe6\xab\t\x98\xf2!\xa0\x8c^\xacqA\x99<\x1cEG\x97\xc8\xf1\xb6\xb9\x82"
+ b"\x8d\xf7\x08s\x98a\xff\xe3\xcc\x92\x0e\xd2\xb6U\xd7\xd9\x86\x7fa\xe5\x1c"
+ b"\x8dTG@\t\x1e\x0e7*\xfc\xde\xbc]6N\xf7\xf1\x84\x9e\x9f\xcf\xe9\x1e\xb5'"
+ b"\xf4<\xdf\x99sq\xd0\x9d\xbd\x99\x0b\xb4%p4\xbf{\xbb\x8a\xd2\x0b\xbc=M"
+ b"\x94H:\xf5\xa8\xd6\xa4\xc90\xc2D\xb9\xd3\xa8\xb0S\x87 `\xa2\xeb\xf3W\xce"
+ b" 7\xf9N#\r\xe6\xbe\t\x9d\xe7\x811\xf9\x10\xc1\xc2\x14\xf6\xfc\xcba\xb7"
+ b"\xb1\x7f\x95l\xe4\tjA\xec:\x10\xe5\xfe\xc2\\=D\xe2\x0c\x0b3]\xf7\xc1\xf7"
+ b"\xbceZ\xb1A\xea\x16\xe5\xfddgFQ\xed\xaf\x04\xa3\xd3\xf8\xa2q\x19B\xd4r"
+ b"\xc5\x0c\x9a\x14\x94\xea\x91\xc4o\xe4\xbb\xb4\x99\xf4@\xd1\xe6\x0c\xe3"
+ b"\xc6d\xa0Q\n\xf2/\xd8\xb8S5\x8a\x18:\xb5g\xac\x95D\xce\x17\x07\xd4z\xda"
+ b"\x90\xe65\x07\x19H!\t\xfdu\x16\x8e\x0eR\x19\xf4\x8cl\x0c\xf9Q\xf1\x80"
+ b"\xe3\xbf\xd7O\xf8\x8c\x18\x0b\x9c\xf1\x1fb\xe1\tR\xb2\xf1\xe1A\xea \xcf-"
+ b"IGE\xf1\x14\x98$\x83\x15\xc9\xd8j\xbf\x19\x0f\xd5\xd1\xaa\xb3\xf3\xa5I2s"
+ b"\x8d\x145\xca\xd5\xd93\x9c\xb8D0\xe6\xaa%\xd0\xc0P}JO^h\x8e\x08\xadlV."
+ b"\x18\x88\x13\x05o\xb0\x07\xeaw\xe0\xb6\xa4\xd5*\xe4r\xef\x07G+\xc1\xbei["
+ b"w\xe8\xab@_\xef\x15y\xe5\x12\xc9W\x1b.\xad\x85-\xc2\xf7\xe3mU6g\x8eSA"
+ b"\x01(\xd3\xdb\x16\x13=\xde\x92\xf9,D\xb8\x8a\xb2\xb4\xc9\xc3\xefnE\xe8\\"
+ b"\xa6\xe2Y\xd2\xcf\xcb\x8c\xb6\xd5\xe9\x1d\x1e\x9a\x8b~\xe2\xa6\rE\x84uV"
+ b"\xed\xc6\x99\xddm<\x10[\x0fu\x1f\xc1\x1d1\n\xcfw\xb2%!\xf0[\xce\x87\x83B"
+ b"\x08\xaa,\x08%d\xcef\x94\"\xd9g.\xc83\xcbXY+4\xec\x85qA\n\x1d=9\xf0*\xb1"
+ b"\x1f/\xf3s\xd61b\x7f@\xfb\x9d\xe3FQ\\\xbd\x82\x1e\x00\xf3\xce\xd3\xe1"
+ b"\xca,E\xfd7[\xab\xb6\xb7\xac!mA}\xbd\x9d3R5\x9cF\xabH\xeb\x92)cc\x13\xd0"
+ b"\xbd\xee\xe9n{\x1dIJB\xa5\xeb\x11\xe8`w&`\x8b}@Oxe\t\x8a\x07\x02\x95\xf2"
+ b"\xed\xda|\xb1e\xbe\xaa\xbbg\x19@\xe1Y\x878\x84\x0f\x8c\xe3\xc98\xf2\x9e"
+ b"\xd5N\xb5J\xef\xab!\xe2\x8dq\xe1\xe5q\xc5\xee\x11W\xb7\xe4k*\x027\xa0"
+ b"\xa3J\xf4\xd8m\xd0q\x94\xcb\x07\n:\xb6`.\xe4\x9c\x15+\xc0)\xde\x80X\xd4"
+ b"\xcfQm\x01\xc2cP\x1cA\x85'\xc9\xac\x8b\xe6\xb2)\xe6\x84t\x1c\x92\xe4Z"
+ b"\x1cR\xb0\x9e\x96\xd1\xfb\x1c\xa6\x8b\xcb`\x10\x12]\xf2gR\x9bFT\xe0\xc8H"
+ b"S\xfb\xac<\x04\xc7\xc1\xe8\xedP\xf4\x16\xdb\xc0\xd7e\xc2\x17J^\x1f\xab"
+ b"\xff[\x08\x19\xb4\xf5\xfb\x19\xb4\x04\xe5c~']\xcb\xc2A\xec\x90\xd0\xed"
+ b"\x06,\xc5K{\x86\x03\xb1\xcdMx\xdeQ\x8c3\xf9\x8a\xea=\x89\xaba\xd2\xc89a"
+ b"\xd72\xf0\xc3\x19\x8a\xdfs\xd4\xfd\xbb\x81b\xeaE\"\xd8\xf4d\x0cD\xf7IJ!"
+ b"\xe5d\xbbG\xe9\xcam\xaa\x0f_r\x95\x91NBq\xcaP\xce\xa7\xa9\xb5\x10\x94eP!"
+ b"|\x856\xcd\xbfIir\xb8e\x9bjP\x97q\xabwS7\x1a\x0ehM\xe7\xca\x86?\xdeP}y~"
+ b"\x0f\x95I\xfc\x13\xe1<Q\x1b\x868\x1d\x11\xdf\x94\xf4\x82>r\xa9k\x88\xcb"
+ b"\xfd\xc3v\xe2\xb9\x8a\x02\x8eq\x92I\xf8\xf6\xf1\x03s\x9b\xb8\xe3\"\xe3"
+ b"\xa9\xa5>D\xb8\x96;\xe7\x92\xd133\xe8\xdd'e\xc9.\xdc;\x17\x1f\xf5H\x13q"
+ b"\xa4W\x0c\xdb~\x98\x01\xeb\xdf\xe32\x13\x0f\xddx\n6\xa0\t\x10\xb6\xbb"
+ b"\xb0\xc3\x18\xb6;\x9fj[\xd9\xd5\xc9\x06\x8a\x87\xcd\xe5\xee\xfc\x9c-%@"
+ b"\xee\xe0\xeb\xd2\xe3\xe8\xfb\xc0\x122\\\xc7\xaf\xc2\xa1Oth\xb3\x8f\x82"
+ b"\xb3\x18\xa8\x07\xd5\xee_\xbe\xe0\x1cA\x1e_\r\x9a\xb0\x17W&\xa2D\x91\x94"
+ b"\x1a\xb2\xef\xf2\xdc\x85;X\xb0,\xeb>-7S\xe5\xca\x07)\x1fp\x7f\xcaQBL\xca"
+ b"\xf3\xb9d\xfc\xb5su\xb0\xc8\x95\x90\xeb*)\xa0v\xe4\x9a{FW\xf4l\xde\xcdj"
+ b"\x00"
+)
+
+
+def test_main():
+ run_unittest(
+ CompressorDecompressorTestCase,
+ CompressDecompressFunctionTestCase,
+ FileTestCase,
+ MiscellaneousTestCase,
+ )
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py
index 8f76e18..54963a9 100644
--- a/Lib/test/test_mailbox.py
+++ b/Lib/test/test_mailbox.py
@@ -91,14 +91,14 @@ class TestMailbox(TestBase):
""")
def test_add_invalid_8bit_bytes_header(self):
- key = self._box.add(self._nonascii_msg.encode('latin1'))
+ key = self._box.add(self._nonascii_msg.encode('latin-1'))
self.assertEqual(len(self._box), 1)
self.assertEqual(self._box.get_bytes(key),
- self._nonascii_msg.encode('latin1'))
+ self._nonascii_msg.encode('latin-1'))
def test_invalid_nonascii_header_as_string(self):
subj = self._nonascii_msg.splitlines()[1]
- key = self._box.add(subj.encode('latin1'))
+ key = self._box.add(subj.encode('latin-1'))
self.assertEqual(self._box.get_string(key),
'Subject: =?unknown-8bit?b?RmFsaW5hcHThciBo4Xpob3pzeuFsbO104XNz'
'YWwuIE3hciByZW5kZWx06Ww/?=\n\n')
@@ -925,8 +925,7 @@ class TestMaildir(TestMailbox):
# the mtime and should cause a re-read. Note that "sleep
# emulation" is still in effect, as skewfactor is -3.
filename = os.path.join(self._path, 'cur', 'stray-file')
- f = open(filename, 'w')
- f.close()
+ support.create_empty_file(filename)
os.unlink(filename)
self._box._refresh()
self.assertTrue(refreshed())
diff --git a/Lib/test/test_mailcap.py b/Lib/test/test_mailcap.py
new file mode 100644
index 0000000..a4cd09c
--- /dev/null
+++ b/Lib/test/test_mailcap.py
@@ -0,0 +1,221 @@
+import mailcap
+import os
+import shutil
+import test.support
+import unittest
+
+# Location of mailcap file
+MAILCAPFILE = test.support.findfile("mailcap.txt")
+
+# Dict to act as mock mailcap entry for this test
+# The keys and values should match the contents of MAILCAPFILE
+MAILCAPDICT = {
+ 'application/x-movie':
+ [{'compose': 'moviemaker %s',
+ 'x11-bitmap': '"/usr/lib/Zmail/bitmaps/movie.xbm"',
+ 'description': '"Movie"',
+ 'view': 'movieplayer %s'}],
+ 'application/*':
+ [{'copiousoutput': '',
+ 'view': 'echo "This is \\"%t\\" but is 50 \\% Greek to me" \\; cat %s'}],
+ 'audio/basic':
+ [{'edit': 'audiocompose %s',
+ 'compose': 'audiocompose %s',
+ 'description': '"An audio fragment"',
+ 'view': 'showaudio %s'}],
+ 'video/mpeg':
+ [{'view': 'mpeg_play %s'}],
+ 'application/postscript':
+ [{'needsterminal': '', 'view': 'ps-to-terminal %s'},
+ {'compose': 'idraw %s', 'view': 'ps-to-terminal %s'}],
+ 'application/x-dvi':
+ [{'view': 'xdvi %s'}],
+ 'message/external-body':
+ [{'composetyped': 'extcompose %s',
+ 'description': '"A reference to data stored in an external location"',
+ 'needsterminal': '',
+ 'view': 'showexternal %s %{access-type} %{name} %{site} %{directory} %{mode} %{server}'}],
+ 'text/richtext':
+ [{'test': 'test "`echo %{charset} | tr \'[A-Z]\' \'[a-z]\'`" = iso-8859-8',
+ 'copiousoutput': '',
+ 'view': 'shownonascii iso-8859-8 -e richtext -p %s'}],
+ 'image/x-xwindowdump':
+ [{'view': 'display %s'}],
+ 'audio/*':
+ [{'view': '/usr/local/bin/showaudio %t'}],
+ 'video/*':
+ [{'view': 'animate %s'}],
+ 'application/frame':
+ [{'print': '"cat %s | lp"', 'view': 'showframe %s'}],
+ 'image/rgb':
+ [{'view': 'display %s'}]
+}
+
+
+class HelperFunctionTest(unittest.TestCase):
+
+ def test_listmailcapfiles(self):
+ # The return value for listmailcapfiles() will vary by system.
+ # So verify that listmailcapfiles() returns a list of strings that is of
+ # non-zero length.
+ mcfiles = mailcap.listmailcapfiles()
+ self.assertIsInstance(mcfiles, list)
+ for m in mcfiles:
+ self.assertIsInstance(m, str)
+ with test.support.EnvironmentVarGuard() as env:
+ # According to RFC 1524, if MAILCAPS env variable exists, use that
+ # and only that.
+ if "MAILCAPS" in env:
+ env_mailcaps = env["MAILCAPS"].split(os.pathsep)
+ else:
+ env_mailcaps = ["/testdir1/.mailcap", "/testdir2/mailcap"]
+ env["MAILCAPS"] = os.pathsep.join(env_mailcaps)
+ mcfiles = mailcap.listmailcapfiles()
+ self.assertEqual(env_mailcaps, mcfiles)
+
+ def test_readmailcapfile(self):
+ # Test readmailcapfile() using test file. It should match MAILCAPDICT.
+ with open(MAILCAPFILE, 'r') as mcf:
+ d = mailcap.readmailcapfile(mcf)
+ self.assertDictEqual(d, MAILCAPDICT)
+
+ def test_lookup(self):
+ # Test without key
+ expected = [{'view': 'mpeg_play %s'}, {'view': 'animate %s'}]
+ actual = mailcap.lookup(MAILCAPDICT, 'video/mpeg')
+ self.assertListEqual(expected, actual)
+
+ # Test with key
+ key = 'compose'
+ expected = [{'edit': 'audiocompose %s',
+ 'compose': 'audiocompose %s',
+ 'description': '"An audio fragment"',
+ 'view': 'showaudio %s'}]
+ actual = mailcap.lookup(MAILCAPDICT, 'audio/basic', key)
+ self.assertListEqual(expected, actual)
+
+ def test_subst(self):
+ plist = ['id=1', 'number=2', 'total=3']
+ # test case: ([field, MIMEtype, filename, plist=[]], <expected string>)
+ test_cases = [
+ (["", "audio/*", "foo.txt"], ""),
+ (["echo foo", "audio/*", "foo.txt"], "echo foo"),
+ (["echo %s", "audio/*", "foo.txt"], "echo foo.txt"),
+ (["echo %t", "audio/*", "foo.txt"], "echo audio/*"),
+ (["echo \%t", "audio/*", "foo.txt"], "echo %t"),
+ (["echo foo", "audio/*", "foo.txt", plist], "echo foo"),
+ (["echo %{total}", "audio/*", "foo.txt", plist], "echo 3")
+ ]
+ for tc in test_cases:
+ self.assertEqual(mailcap.subst(*tc[0]), tc[1])
+
+
+class GetcapsTest(unittest.TestCase):
+
+ def test_mock_getcaps(self):
+ # Test mailcap.getcaps() using mock mailcap file in this dir.
+ # Temporarily override any existing system mailcap file by pointing the
+ # MAILCAPS environment variable to our mock file.
+ with test.support.EnvironmentVarGuard() as env:
+ env["MAILCAPS"] = MAILCAPFILE
+ caps = mailcap.getcaps()
+ self.assertDictEqual(caps, MAILCAPDICT)
+
+ def test_system_mailcap(self):
+ # Test mailcap.getcaps() with mailcap file(s) on system, if any.
+ caps = mailcap.getcaps()
+ self.assertIsInstance(caps, dict)
+ mailcapfiles = mailcap.listmailcapfiles()
+ existingmcfiles = [mcf for mcf in mailcapfiles if os.path.exists(mcf)]
+ if existingmcfiles:
+ # At least 1 mailcap file exists, so test that.
+ for (k, v) in caps.items():
+ self.assertIsInstance(k, str)
+ self.assertIsInstance(v, list)
+ for e in v:
+ self.assertIsInstance(e, dict)
+ else:
+ # No mailcap files on system. getcaps() should return empty dict.
+ self.assertEqual({}, caps)
+
+
+class FindmatchTest(unittest.TestCase):
+
+ def test_findmatch(self):
+
+ # default findmatch arguments
+ c = MAILCAPDICT
+ fname = "foo.txt"
+ plist = ["access-type=default", "name=john", "site=python.org",
+ "directory=/tmp", "mode=foo", "server=bar"]
+ audio_basic_entry = {
+ 'edit': 'audiocompose %s',
+ 'compose': 'audiocompose %s',
+ 'description': '"An audio fragment"',
+ 'view': 'showaudio %s'
+ }
+ audio_entry = {"view": "/usr/local/bin/showaudio %t"}
+ video_entry = {'view': 'animate %s'}
+ message_entry = {
+ 'composetyped': 'extcompose %s',
+ 'description': '"A reference to data stored in an external location"', 'needsterminal': '',
+ 'view': 'showexternal %s %{access-type} %{name} %{site} %{directory} %{mode} %{server}'
+ }
+
+ # test case: (findmatch args, findmatch keyword args, expected output)
+ # positional args: caps, MIMEtype
+ # keyword args: key="view", filename="/dev/null", plist=[]
+ # output: (command line, mailcap entry)
+ cases = [
+ ([{}, "video/mpeg"], {}, (None, None)),
+ ([c, "foo/bar"], {}, (None, None)),
+ ([c, "video/mpeg"], {}, ('mpeg_play /dev/null', {'view': 'mpeg_play %s'})),
+ ([c, "audio/basic", "edit"], {}, ("audiocompose /dev/null", audio_basic_entry)),
+ ([c, "audio/basic", "compose"], {}, ("audiocompose /dev/null", audio_basic_entry)),
+ ([c, "audio/basic", "description"], {}, ('"An audio fragment"', audio_basic_entry)),
+ ([c, "audio/basic", "foobar"], {}, (None, None)),
+ ([c, "video/*"], {"filename": fname}, ("animate %s" % fname, video_entry)),
+ ([c, "audio/basic", "compose"],
+ {"filename": fname},
+ ("audiocompose %s" % fname, audio_basic_entry)),
+ ([c, "audio/basic"],
+ {"key": "description", "filename": fname},
+ ('"An audio fragment"', audio_basic_entry)),
+ ([c, "audio/*"],
+ {"filename": fname},
+ ("/usr/local/bin/showaudio audio/*", audio_entry)),
+ ([c, "message/external-body"],
+ {"plist": plist},
+ ("showexternal /dev/null default john python.org /tmp foo bar", message_entry))
+ ]
+ self._run_cases(cases)
+
+ @unittest.skipUnless(os.name == "posix", "Requires 'test' command on system")
+ def test_test(self):
+ # findmatch() will automatically check any "test" conditions and skip
+ # the entry if the check fails.
+ caps = {"test/pass": [{"test": "test 1 -eq 1"}],
+ "test/fail": [{"test": "test 1 -eq 0"}]}
+ # test case: (findmatch args, findmatch keyword args, expected output)
+ # positional args: caps, MIMEtype, key ("test")
+ # keyword args: N/A
+ # output: (command line, mailcap entry)
+ cases = [
+ # findmatch will return the mailcap entry for test/pass because it evaluates to true
+ ([caps, "test/pass", "test"], {}, ("test 1 -eq 1", {"test": "test 1 -eq 1"})),
+ # findmatch will return None because test/fail evaluates to false
+ ([caps, "test/fail", "test"], {}, (None, None))
+ ]
+ self._run_cases(cases)
+
+ def _run_cases(self, cases):
+ for c in cases:
+ self.assertEqual(mailcap.findmatch(*c[0], **c[1]), c[2])
+
+
+def test_main():
+ test.support.run_unittest(HelperFunctionTest, GetcapsTest, FindmatchTest)
+
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py
index 96a70ec..83c348c 100644
--- a/Lib/test/test_marshal.py
+++ b/Lib/test/test_marshal.py
@@ -6,6 +6,7 @@ import marshal
import sys
import unittest
import os
+import types
class HelperMixin:
def helper(self, sample, *extra):
@@ -114,6 +115,22 @@ class CodeTestCase(unittest.TestCase):
codes = (ExceptionTestCase.test_exceptions.__code__,) * count
marshal.loads(marshal.dumps(codes))
+ def test_different_filenames(self):
+ co1 = compile("x", "f1", "exec")
+ co2 = compile("y", "f2", "exec")
+ co1, co2 = marshal.loads(marshal.dumps((co1, co2)))
+ self.assertEqual(co1.co_filename, "f1")
+ self.assertEqual(co2.co_filename, "f2")
+
+ @support.cpython_only
+ def test_same_filename_used(self):
+ s = """def f(): pass\ndef g(): pass"""
+ co = compile(s, "myfile", "exec")
+ co = marshal.loads(marshal.dumps(co))
+ for obj in co.co_consts:
+ if isinstance(obj, types.CodeType):
+ self.assertIs(co.co_filename, obj.co_filename)
+
class ContainerTestCase(unittest.TestCase, HelperMixin):
d = {'astring': 'foo@bar.baz.spam',
'afloat': 7283.43,
diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py
index dddc889..715003a 100644
--- a/Lib/test/test_math.py
+++ b/Lib/test/test_math.py
@@ -2,11 +2,12 @@
# XXXX Should not do tests around zero only
from test.support import run_unittest, verbose, requires_IEEE_754
+from test import support
import unittest
import math
import os
+import platform
import sys
-import random
import struct
import sysconfig
@@ -457,12 +458,12 @@ class MathTests(unittest.TestCase):
def testFmod(self):
self.assertRaises(TypeError, math.fmod)
- self.ftest('fmod(10,1)', math.fmod(10,1), 0)
- self.ftest('fmod(10,0.5)', math.fmod(10,0.5), 0)
- self.ftest('fmod(10,1.5)', math.fmod(10,1.5), 1)
- self.ftest('fmod(-10,1)', math.fmod(-10,1), 0)
- self.ftest('fmod(-10,0.5)', math.fmod(-10,0.5), 0)
- self.ftest('fmod(-10,1.5)', math.fmod(-10,1.5), -1)
+ self.ftest('fmod(10, 1)', math.fmod(10, 1), 0.0)
+ self.ftest('fmod(10, 0.5)', math.fmod(10, 0.5), 0.0)
+ self.ftest('fmod(10, 1.5)', math.fmod(10, 1.5), 1.0)
+ self.ftest('fmod(-10, 1)', math.fmod(-10, 1), -0.0)
+ self.ftest('fmod(-10, 0.5)', math.fmod(-10, 0.5), -0.0)
+ self.ftest('fmod(-10, 1.5)', math.fmod(-10, 1.5), -1.0)
self.assertTrue(math.isnan(math.fmod(NAN, 1.)))
self.assertTrue(math.isnan(math.fmod(1., NAN)))
self.assertTrue(math.isnan(math.fmod(NAN, NAN)))
@@ -650,6 +651,33 @@ class MathTests(unittest.TestCase):
n= 2**90
self.assertAlmostEqual(math.log1p(n), math.log1p(float(n)))
+ @requires_IEEE_754
+ def testLog2(self):
+ self.assertRaises(TypeError, math.log2)
+
+ # Check some integer values
+ self.assertEqual(math.log2(1), 0.0)
+ self.assertEqual(math.log2(2), 1.0)
+ self.assertEqual(math.log2(4), 2.0)
+
+ # Large integer values
+ self.assertEqual(math.log2(2**1023), 1023.0)
+ self.assertEqual(math.log2(2**1024), 1024.0)
+ self.assertEqual(math.log2(2**2000), 2000.0)
+
+ self.assertRaises(ValueError, math.log2, -1.5)
+ self.assertRaises(ValueError, math.log2, NINF)
+ self.assertTrue(math.isnan(math.log2(NAN)))
+
+ @requires_IEEE_754
+ # log2() is not accurate enough on Mac OS X Tiger (10.4)
+ @support.requires_mac_ver(10, 5)
+ def testLog2Exact(self):
+ # Check that we get exact equality for log2 of powers of 2.
+ actual = [math.log2(math.ldexp(1.0, n)) for n in range(-1074, 1024)]
+ expected = [float(n) for n in range(-1074, 1024)]
+ self.assertEqual(actual, expected)
+
def testLog10(self):
self.assertRaises(TypeError, math.log10)
self.ftest('log10(0.1)', math.log10(0.1), -1)
@@ -1010,7 +1038,6 @@ class MathTests(unittest.TestCase):
@requires_IEEE_754
def test_mtestfile(self):
- ALLOWED_ERROR = 20 # permitted error, in ulps
fail_fmt = "{}:{}({!r}): expected {!r}, got {!r}"
failures = []
diff --git a/Lib/test/test_memoryview.py b/Lib/test/test_memoryview.py
index 0bfddd9..8809930 100644
--- a/Lib/test/test_memoryview.py
+++ b/Lib/test/test_memoryview.py
@@ -24,15 +24,14 @@ class AbstractMemoryTests:
return filter(None, [self.ro_type, self.rw_type])
def check_getitem_with_type(self, tp):
- item = self.getitem_type
b = tp(self._source)
oldrefcount = sys.getrefcount(b)
m = self._view(b)
- self.assertEqual(m[0], item(b"a"))
- self.assertIsInstance(m[0], bytes)
- self.assertEqual(m[5], item(b"f"))
- self.assertEqual(m[-1], item(b"f"))
- self.assertEqual(m[-6], item(b"a"))
+ self.assertEqual(m[0], ord(b"a"))
+ self.assertIsInstance(m[0], int)
+ self.assertEqual(m[5], ord(b"f"))
+ self.assertEqual(m[-1], ord(b"f"))
+ self.assertEqual(m[-6], ord(b"a"))
# Bounds checking
self.assertRaises(IndexError, lambda: m[6])
self.assertRaises(IndexError, lambda: m[-7])
@@ -76,7 +75,9 @@ class AbstractMemoryTests:
b = self.rw_type(self._source)
oldrefcount = sys.getrefcount(b)
m = self._view(b)
- m[0] = tp(b"0")
+ m[0] = ord(b'1')
+ self._check_contents(tp, b, b"1bcdef")
+ m[0:1] = tp(b"0")
self._check_contents(tp, b, b"0bcdef")
m[1:3] = tp(b"12")
self._check_contents(tp, b, b"012def")
@@ -102,10 +103,17 @@ class AbstractMemoryTests:
# Wrong index/slice types
self.assertRaises(TypeError, setitem, 0.0, b"a")
self.assertRaises(TypeError, setitem, (0,), b"a")
+ self.assertRaises(TypeError, setitem, (slice(0,1,1), 0), b"a")
+ self.assertRaises(TypeError, setitem, (0, slice(0,1,1)), b"a")
+ self.assertRaises(TypeError, setitem, (0,), b"a")
self.assertRaises(TypeError, setitem, "a", b"a")
+ # Not implemented: multidimensional slices
+ slices = (slice(0,1,1), slice(0,1,2))
+ self.assertRaises(NotImplementedError, setitem, slices, b"a")
# Trying to resize the memory object
- self.assertRaises(ValueError, setitem, 0, b"")
- self.assertRaises(ValueError, setitem, 0, b"ab")
+ exc = ValueError if m.format == 'c' else TypeError
+ self.assertRaises(exc, setitem, 0, b"")
+ self.assertRaises(exc, setitem, 0, b"ab")
self.assertRaises(ValueError, setitem, slice(1,1), b"a")
self.assertRaises(ValueError, setitem, slice(0,2), b"a")
@@ -175,7 +183,7 @@ class AbstractMemoryTests:
self.assertEqual(m.shape, (6,))
self.assertEqual(len(m), 6)
self.assertEqual(m.strides, (self.itemsize,))
- self.assertEqual(m.suboffsets, None)
+ self.assertEqual(m.suboffsets, ())
return m
def test_attributes_readonly(self):
@@ -209,12 +217,16 @@ class AbstractMemoryTests:
# If tp is a factory rather than a plain type, skip
continue
+ class MyView():
+ def __init__(self, base):
+ self.m = memoryview(base)
class MySource(tp):
pass
class MyObject:
pass
- # Create a reference cycle through a memoryview object
+ # Create a reference cycle through a memoryview object.
+ # This exercises mbuf_clear().
b = MySource(tp(b'abc'))
m = self._view(b)
o = MyObject()
@@ -226,6 +238,17 @@ class AbstractMemoryTests:
gc.collect()
self.assertTrue(wr() is None, wr())
+ # This exercises memory_clear().
+ m = MyView(tp(b'abc'))
+ o = MyObject()
+ m.x = m
+ m.o = o
+ wr = weakref.ref(o)
+ m = o = None
+ # The cycle must be broken
+ gc.collect()
+ self.assertTrue(wr() is None, wr())
+
def _check_released(self, m, tp):
check = self.assertRaisesRegex(ValueError, "released")
with check: bytes(m)
@@ -283,6 +306,36 @@ class AbstractMemoryTests:
i = io.BytesIO(b'ZZZZ')
self.assertRaises(TypeError, i.readinto, m)
+ def test_getbuf_fail(self):
+ self.assertRaises(TypeError, self._view, {})
+
+ def test_hash(self):
+ # Memoryviews of readonly (hashable) types are hashable, and they
+ # hash as hash(obj.tobytes()).
+ tp = self.ro_type
+ if tp is None:
+ self.skipTest("no read-only type to test")
+ b = tp(self._source)
+ m = self._view(b)
+ self.assertEqual(hash(m), hash(b"abcdef"))
+ # Releasing the memoryview keeps the stored hash value (as with weakrefs)
+ m.release()
+ self.assertEqual(hash(m), hash(b"abcdef"))
+ # Hashing a memoryview for the first time after it is released
+ # results in an error (as with weakrefs).
+ m = self._view(b)
+ m.release()
+ self.assertRaises(ValueError, hash, m)
+
+ def test_hash_writable(self):
+ # Memoryviews of writable types are unhashable
+ tp = self.rw_type
+ if tp is None:
+ self.skipTest("no writable type to test")
+ b = tp(self._source)
+ m = self._view(b)
+ self.assertRaises(ValueError, hash, m)
+
# Variations on source objects for the buffer: bytes-like objects, then arrays
# with itemsize > 1.
# NOTE: support for multi-dimensional objects is unimplemented.
diff --git a/Lib/test/test_metaclass.py b/Lib/test/test_metaclass.py
index 219ab99..e6fe20a 100644
--- a/Lib/test/test_metaclass.py
+++ b/Lib/test/test_metaclass.py
@@ -159,6 +159,7 @@ Use a __prepare__ method that returns an instrumented dict.
... bar = 123
...
d['__module__'] = 'test.test_metaclass'
+ d['__qualname__'] = 'C'
d['foo'] = 4
d['foo'] = 42
d['bar'] = 123
@@ -177,12 +178,12 @@ Use a metaclass that doesn't derive from type.
... b = 24
...
meta: C ()
- ns: [('__module__', 'test.test_metaclass'), ('a', 42), ('b', 24)]
+ ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 42), ('b', 24)]
kw: []
>>> type(C) is dict
True
>>> print(sorted(C.items()))
- [('__module__', 'test.test_metaclass'), ('a', 42), ('b', 24)]
+ [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 42), ('b', 24)]
>>>
And again, with a __prepare__ attribute.
@@ -199,11 +200,12 @@ And again, with a __prepare__ attribute.
...
prepare: C () [('other', 'booh')]
d['__module__'] = 'test.test_metaclass'
+ d['__qualname__'] = 'C'
d['a'] = 1
d['a'] = 2
d['b'] = 3
meta: C ()
- ns: [('__module__', 'test.test_metaclass'), ('a', 2), ('b', 3)]
+ ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 2), ('b', 3)]
kw: [('other', 'booh')]
>>>
@@ -246,7 +248,13 @@ Test failures in looking up the __prepare__ method work.
"""
-__test__ = {'doctests' : doctests}
+import sys
+
+# Trace function introduces __locals__ which causes various tests to fail.
+if hasattr(sys, 'gettrace') and sys.gettrace():
+ __test__ = {}
+else:
+ __test__ = {'doctests' : doctests}
def test_main(verbose=False):
from test import support
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
index 4a69b00..cc4c95b 100644
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -4,9 +4,7 @@ import pickle
from test.support import verbose, run_unittest, findfile
import unittest
-import xml.dom
import xml.dom.minidom
-import xml.parsers.expat
from xml.dom.minidom import parse, Node, Document, parseString
from xml.dom.minidom import getDOMImplementation
@@ -14,7 +12,6 @@ from xml.dom.minidom import getDOMImplementation
tstfile = findfile("test.xml", subdir="xmltestdata")
-
# The tests of DocumentType importing use these helpers to construct
# the documents to work with, since not all DOM builders actually
# create the DocumentType nodes.
@@ -50,7 +47,7 @@ class MinidomTest(unittest.TestCase):
def checkWholeText(self, node, s):
t = node.wholeText
- self.confirm(t == s, "looking for %s, found %s" % (repr(s), repr(t)))
+ self.confirm(t == s, "looking for %r, found %r" % (s, t))
def testParseFromFile(self):
with open(tstfile) as file:
@@ -282,6 +279,7 @@ class MinidomTest(unittest.TestCase):
child.setAttribute("def", "ghi")
self.confirm(len(child.attributes) == 1)
+ self.assertRaises(xml.dom.NotFoundErr, child.removeAttribute, "foo")
child.removeAttribute("def")
self.confirm(len(child.attributes) == 0)
dom.unlink()
@@ -293,6 +291,8 @@ class MinidomTest(unittest.TestCase):
child.setAttributeNS("http://www.w3.org", "xmlns:python",
"http://www.python.org")
child.setAttributeNS("http://www.python.org", "python:abcattr", "foo")
+ self.assertRaises(xml.dom.NotFoundErr, child.removeAttributeNS,
+ "foo", "http://www.python.org")
self.confirm(len(child.attributes) == 2)
child.removeAttributeNS("http://www.python.org", "abcattr")
self.confirm(len(child.attributes) == 1)
@@ -304,11 +304,23 @@ class MinidomTest(unittest.TestCase):
child.setAttribute("spam", "jam")
self.confirm(len(child.attributes) == 1)
node = child.getAttributeNode("spam")
+ self.assertRaises(xml.dom.NotFoundErr, child.removeAttributeNode,
+ None)
child.removeAttributeNode(node)
self.confirm(len(child.attributes) == 0
and child.getAttributeNode("spam") is None)
+ dom2 = Document()
+ child2 = dom2.appendChild(dom.createElement("foo"))
+ self.assertRaises(xml.dom.NotFoundErr, child.removeAttributeNode,
+ node)
dom.unlink()
+ def testHasAttribute(self):
+ dom = Document()
+ child = dom.appendChild(dom.createElement("foo"))
+ child.setAttribute("spam", "jam")
+ self.confirm(child.hasAttribute("spam"))
+
def testChangeAttr(self):
dom = parseString("<abc/>")
el = dom.documentElement
@@ -592,7 +604,13 @@ class MinidomTest(unittest.TestCase):
def testFirstChild(self): pass
- def testHasChildNodes(self): pass
+ def testHasChildNodes(self):
+ dom = parseString("<doc><foo/></doc>")
+ doc = dom.documentElement
+ self.assertTrue(dom.hasChildNodes())
+ dom2 = parseString("<doc/>")
+ doc2 = dom2.documentElement
+ self.assertFalse(doc2.hasChildNodes())
def _testCloneElementCopiesAttributes(self, e1, e2, test):
attrs1 = e1.attributes
@@ -1041,41 +1059,6 @@ class MinidomTest(unittest.TestCase):
"test NodeList.item()")
doc.unlink()
- def testSAX2DOM(self):
- from xml.dom import pulldom
-
- sax2dom = pulldom.SAX2DOM()
- sax2dom.startDocument()
- sax2dom.startElement("doc", {})
- sax2dom.characters("text")
- sax2dom.startElement("subelm", {})
- sax2dom.characters("text")
- sax2dom.endElement("subelm")
- sax2dom.characters("text")
- sax2dom.endElement("doc")
- sax2dom.endDocument()
-
- doc = sax2dom.document
- root = doc.documentElement
- (text1, elm1, text2) = root.childNodes
- text3 = elm1.childNodes[0]
-
- self.confirm(text1.previousSibling is None and
- text1.nextSibling is elm1 and
- elm1.previousSibling is text1 and
- elm1.nextSibling is text2 and
- text2.previousSibling is elm1 and
- text2.nextSibling is None and
- text3.previousSibling is None and
- text3.nextSibling is None, "testSAX2DOM - siblings")
-
- self.confirm(root.parentNode is doc and
- text1.parentNode is root and
- elm1.parentNode is root and
- text2.parentNode is root and
- text3.parentNode is elm1, "testSAX2DOM - parents")
- doc.unlink()
-
def testEncodings(self):
doc = parseString('<foo>&#x20ac;</foo>')
self.assertEqual(doc.toxml(),
@@ -1522,12 +1505,21 @@ class MinidomTest(unittest.TestCase):
doc.appendChild(doc.createComment("foo--bar"))
self.assertRaises(ValueError, doc.toxml)
+
def testEmptyXMLNSValue(self):
doc = parseString("<element xmlns=''>\n"
"<foo/>\n</element>")
doc2 = parseString(doc.toxml())
self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE)
+ def testDocRemoveChild(self):
+ doc = parse(tstfile)
+ title_tag = doc.documentElement.getElementsByTagName("TITLE")[0]
+ self.assertRaises( xml.dom.NotFoundErr, doc.removeChild, title_tag)
+ num_children_before = len(doc.childNodes)
+ doc.removeChild(doc.childNodes[0])
+ num_children_after = len(doc.childNodes)
+ self.assertTrue(num_children_after == num_children_before - 1)
def test_main():
run_unittest(MinidomTest)
diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py
index 0e18aab..2230028 100644
--- a/Lib/test/test_mmap.py
+++ b/Lib/test/test_mmap.py
@@ -417,6 +417,35 @@ class MmapTests(unittest.TestCase):
m[x] = b
self.assertEqual(m[x], b)
+ def test_read_all(self):
+ m = mmap.mmap(-1, 16)
+ self.addCleanup(m.close)
+
+ # With no parameters, or None or a negative argument, reads all
+ m.write(bytes(range(16)))
+ m.seek(0)
+ self.assertEqual(m.read(), bytes(range(16)))
+ m.seek(8)
+ self.assertEqual(m.read(), bytes(range(8, 16)))
+ m.seek(16)
+ self.assertEqual(m.read(), b'')
+ m.seek(3)
+ self.assertEqual(m.read(None), bytes(range(3, 16)))
+ m.seek(4)
+ self.assertEqual(m.read(-1), bytes(range(4, 16)))
+ m.seek(5)
+ self.assertEqual(m.read(-2), bytes(range(5, 16)))
+ m.seek(9)
+ self.assertEqual(m.read(-42), bytes(range(9, 16)))
+
+ def test_read_invalid_arg(self):
+ m = mmap.mmap(-1, 16)
+ self.addCleanup(m.close)
+
+ self.assertRaises(TypeError, m.read, 'foo')
+ self.assertRaises(TypeError, m.read, 5.5)
+ self.assertRaises(TypeError, m.read, [1, 2, 3])
+
def test_extended_getslice(self):
# Test extended slicing by comparing with list slicing.
s = bytes(reversed(range(256)))
@@ -534,8 +563,7 @@ class MmapTests(unittest.TestCase):
f.close()
def test_error(self):
- self.assertTrue(issubclass(mmap.error, EnvironmentError))
- self.assertIn("mmap.error", str(mmap.error))
+ self.assertIs(mmap.error, OSError)
def test_io_methods(self):
data = b"0123456789"
diff --git a/Lib/test/test_module.py b/Lib/test/test_module.py
index 15836ca..5617789 100644
--- a/Lib/test/test_module.py
+++ b/Lib/test/test_module.py
@@ -70,9 +70,11 @@ class ModuleTests(unittest.TestCase):
m = ModuleType("foo")
m.destroyed = destroyed
s = """class A:
- def __del__(self, destroyed=destroyed):
- destroyed.append(1)
-a = A()"""
+ def __init__(self, l):
+ self.l = l
+ def __del__(self):
+ self.l.append(1)
+a = A(destroyed)"""
exec(s, m.__dict__)
del m
gc_collect()
diff --git a/Lib/test/test_modulefinder.py b/Lib/test/test_modulefinder.py
index a184217..c5fc320 100644
--- a/Lib/test/test_modulefinder.py
+++ b/Lib/test/test_modulefinder.py
@@ -1,7 +1,7 @@
-import __future__
import os
+import errno
+import shutil
import unittest
-import distutils.dir_util
import tempfile
from test import support
@@ -9,7 +9,7 @@ from test import support
import modulefinder
TEST_DIR = tempfile.mkdtemp()
-TEST_PATH = [TEST_DIR, os.path.dirname(__future__.__file__)]
+TEST_PATH = [TEST_DIR, os.path.dirname(tempfile.__file__)]
# Each test description is a list of 5 items:
#
@@ -196,12 +196,17 @@ a/module.py
from . import bar
"""]
+
def open_file(path):
- ##print "#", os.path.abspath(path)
dirname = os.path.dirname(path)
- distutils.dir_util.mkpath(dirname)
+ try:
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
return open(path, "w")
+
def create_package(source):
ofi = None
try:
@@ -216,6 +221,7 @@ def create_package(source):
if ofi:
ofi.close()
+
class ModuleFinderTest(unittest.TestCase):
def _do_test(self, info, report=False):
import_this, modules, missing, maybe_missing, source = info
@@ -234,19 +240,17 @@ class ModuleFinderTest(unittest.TestCase):
## import traceback; traceback.print_exc()
## sys.path = opath
## return
- modules = set(modules)
- found = set(mf.modules.keys())
- more = list(found - modules)
- less = list(modules - found)
+ modules = sorted(set(modules))
+ found = sorted(mf.modules)
# check if we found what we expected, not more, not less
- self.assertEqual((more, less), ([], []))
+ self.assertEqual(found, modules)
# check for missing and maybe missing modules
bad, maybe = mf.any_missing_maybe()
self.assertEqual(bad, missing)
self.assertEqual(maybe, maybe_missing)
finally:
- distutils.dir_util.remove_tree(TEST_DIR)
+ shutil.rmtree(TEST_DIR)
def test_package(self):
self._do_test(package_test)
@@ -254,25 +258,23 @@ class ModuleFinderTest(unittest.TestCase):
def test_maybe(self):
self._do_test(maybe_test)
- if getattr(__future__, "absolute_import", None):
+ def test_maybe_new(self):
+ self._do_test(maybe_test_new)
- def test_maybe_new(self):
- self._do_test(maybe_test_new)
+ def test_absolute_imports(self):
+ self._do_test(absolute_import_test)
- def test_absolute_imports(self):
- self._do_test(absolute_import_test)
+ def test_relative_imports(self):
+ self._do_test(relative_import_test)
- def test_relative_imports(self):
- self._do_test(relative_import_test)
+ def test_relative_imports_2(self):
+ self._do_test(relative_import_test_2)
- def test_relative_imports_2(self):
- self._do_test(relative_import_test_2)
+ def test_relative_imports_3(self):
+ self._do_test(relative_import_test_3)
- def test_relative_imports_3(self):
- self._do_test(relative_import_test_3)
def test_main():
- distutils.log.set_threshold(distutils.log.WARN)
support.run_unittest(ModuleFinderTest)
if __name__ == "__main__":
diff --git a/Lib/test/test_multibytecodec.py b/Lib/test/test_multibytecodec.py
index 86c68dc..feb7bd5 100644
--- a/Lib/test/test_multibytecodec.py
+++ b/Lib/test/test_multibytecodec.py
@@ -247,20 +247,16 @@ class Test_ISO2022(unittest.TestCase):
self.assertFalse(any(x > 0x80 for x in e))
def test_bug1572832(self):
- if sys.maxunicode >= 0x10000:
- myunichr = chr
- else:
- myunichr = lambda x: chr(0xD7C0+(x>>10)) + chr(0xDC00+(x&0x3FF))
-
for x in range(0x10000, 0x110000):
# Any ISO 2022 codec will cause the segfault
- myunichr(x).encode('iso_2022_jp', 'ignore')
+ chr(x).encode('iso_2022_jp', 'ignore')
class TestStateful(unittest.TestCase):
text = '\u4E16\u4E16'
encoding = 'iso-2022-jp'
expected = b'\x1b$B@$@$'
- expected_reset = b'\x1b$B@$@$\x1b(B'
+ reset = b'\x1b(B'
+ expected_reset = expected + reset
def test_encode(self):
self.assertEqual(self.text.encode(self.encoding), self.expected_reset)
@@ -271,6 +267,8 @@ class TestStateful(unittest.TestCase):
encoder.encode(char)
for char in self.text)
self.assertEqual(output, self.expected)
+ self.assertEqual(encoder.encode('', final=True), self.reset)
+ self.assertEqual(encoder.encode('', final=True), b'')
def test_incrementalencoder_final(self):
encoder = codecs.getincrementalencoder(self.encoding)()
@@ -279,12 +277,14 @@ class TestStateful(unittest.TestCase):
encoder.encode(char, index == last_index)
for index, char in enumerate(self.text))
self.assertEqual(output, self.expected_reset)
+ self.assertEqual(encoder.encode('', final=True), b'')
class TestHZStateful(TestStateful):
text = '\u804a\u804a'
encoding = 'hz'
expected = b'~{ADAD'
- expected_reset = b'~{ADAD~}'
+ reset = b'~}'
+ expected_reset = expected + reset
def test_main():
support.run_unittest(__name__)
diff --git a/Lib/test/test_multibytecodec_support.py b/Lib/test/test_multibytecodec_support.py
index ef63b69..ac3b92b 100644
--- a/Lib/test/test_multibytecodec_support.py
+++ b/Lib/test/test_multibytecodec_support.py
@@ -264,21 +264,6 @@ class TestBase:
self.assertEqual(ostream.getvalue(), self.tstring[0])
-if len('\U00012345') == 2: # ucs2 build
- _unichr = chr
- def chr(v):
- if v >= 0x10000:
- return _unichr(0xd800 + ((v - 0x10000) >> 10)) + \
- _unichr(0xdc00 + ((v - 0x10000) & 0x3ff))
- else:
- return _unichr(v)
- _ord = ord
- def ord(c):
- if len(c) == 2:
- return 0x10000 + ((_ord(c[0]) - 0xd800) << 10) + \
- (ord(c[1]) - 0xdc00)
- else:
- return _ord(c)
class TestBase_Mapping(unittest.TestCase):
pass_enctest = []
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py
index 8edb420..d65cf6e 100644
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -8,6 +8,7 @@ import unittest
import queue as pyqueue
import time
import io
+import itertools
import sys
import os
import gc
@@ -82,6 +83,23 @@ HAVE_GETVALUE = not getattr(_multiprocessing,
'HAVE_BROKEN_SEM_GETVALUE', False)
WIN32 = (sys.platform == "win32")
+if WIN32:
+ from _subprocess import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
+
+ def wait_for_handle(handle, timeout):
+ if timeout is None or timeout < 0.0:
+ timeout = INFINITE
+ else:
+ timeout = int(1000 * timeout)
+ return WaitForSingleObject(handle, timeout) == WAIT_OBJECT_0
+else:
+ from select import select
+ _select = util._eintr_retry(select)
+
+ def wait_for_handle(handle, timeout):
+ if timeout is not None and timeout < 0.0:
+ timeout = None
+ return handle in _select([handle], [], [], timeout)[0]
try:
MAXFD = os.sysconf("SC_OPEN_MAX")
@@ -196,6 +214,18 @@ class _TestProcess(BaseTestCase):
self.assertEqual(current.ident, os.getpid())
self.assertEqual(current.exitcode, None)
+ def test_daemon_argument(self):
+ if self.TYPE == "threads":
+ return
+
+ # By default uses the current process's daemon flag.
+ proc0 = self.Process(target=self._test)
+ self.assertEqual(proc0.daemon, self.current_process().daemon)
+ proc1 = self.Process(target=self._test, daemon=True)
+ self.assertTrue(proc1.daemon)
+ proc2 = self.Process(target=self._test, daemon=False)
+ self.assertFalse(proc2.daemon)
+
@classmethod
def _test(cls, q, *args, **kwds):
current = cls.current_process()
@@ -328,6 +358,26 @@ class _TestProcess(BaseTestCase):
]
self.assertEqual(result, expected)
+ @classmethod
+ def _test_sentinel(cls, event):
+ event.wait(10.0)
+
+ def test_sentinel(self):
+ if self.TYPE == "threads":
+ return
+ event = self.Event()
+ p = self.Process(target=self._test_sentinel, args=(event,))
+ with self.assertRaises(ValueError):
+ p.sentinel
+ p.start()
+ self.addCleanup(p.join)
+ sentinel = p.sentinel
+ self.assertIsInstance(sentinel, int)
+ self.assertFalse(wait_for_handle(sentinel, timeout=0.0))
+ event.set()
+ p.join()
+ self.assertTrue(wait_for_handle(sentinel, timeout=DELTA))
+
#
#
#
@@ -1099,6 +1149,9 @@ def sqr(x, wait=0.0):
time.sleep(wait)
return x*x
+def mul(x, y):
+ return x*y
+
class _TestPool(BaseTestCase):
def test_apply(self):
@@ -1112,6 +1165,20 @@ class _TestPool(BaseTestCase):
self.assertEqual(pmap(sqr, list(range(100)), chunksize=20),
list(map(sqr, list(range(100)))))
+ def test_starmap(self):
+ psmap = self.pool.starmap
+ tuples = list(zip(range(10), range(9,-1, -1)))
+ self.assertEqual(psmap(mul, tuples),
+ list(itertools.starmap(mul, tuples)))
+ tuples = list(zip(range(100), range(99,-1, -1)))
+ self.assertEqual(psmap(mul, tuples, chunksize=20),
+ list(itertools.starmap(mul, tuples)))
+
+ def test_starmap_async(self):
+ tuples = list(zip(range(100), range(99,-1, -1)))
+ self.assertEqual(self.pool.starmap_async(mul, tuples).get(),
+ list(itertools.starmap(mul, tuples)))
+
def test_map_chunksize(self):
try:
self.pool.map_async(sqr, [], chunksize=1).get(timeout=TIMEOUT1)
@@ -1712,6 +1779,17 @@ class _TestConnection(BaseTestCase):
self.assertRaises(RuntimeError, reduction.recv_handle, conn)
p.join()
+class _TestListener(BaseTestCase):
+
+ ALLOWED_TYPES = ('processes')
+
+ def test_multiple_bind(self):
+ for family in self.connection.families:
+ l = self.connection.Listener(family=family)
+ self.addCleanup(l.close)
+ self.assertRaises(OSError, self.connection.Listener,
+ l.address, family)
+
class _TestListenerClient(BaseTestCase):
ALLOWED_TYPES = ('processes', 'threads')
@@ -1732,6 +1810,85 @@ class _TestListenerClient(BaseTestCase):
self.assertEqual(conn.recv(), 'hello')
p.join()
l.close()
+
+class _TestPoll(unittest.TestCase):
+
+ ALLOWED_TYPES = ('processes', 'threads')
+
+ def test_empty_string(self):
+ a, b = self.Pipe()
+ self.assertEqual(a.poll(), False)
+ b.send_bytes(b'')
+ self.assertEqual(a.poll(), True)
+ self.assertEqual(a.poll(), True)
+
+ @classmethod
+ def _child_strings(cls, conn, strings):
+ for s in strings:
+ time.sleep(0.1)
+ conn.send_bytes(s)
+ conn.close()
+
+ def test_strings(self):
+ strings = (b'hello', b'', b'a', b'b', b'', b'bye', b'', b'lop')
+ a, b = self.Pipe()
+ p = self.Process(target=self._child_strings, args=(b, strings))
+ p.start()
+
+ for s in strings:
+ for i in range(200):
+ if a.poll(0.01):
+ break
+ x = a.recv_bytes()
+ self.assertEqual(s, x)
+
+ p.join()
+
+ @classmethod
+ def _child_boundaries(cls, r):
+ # Polling may "pull" a message in to the child process, but we
+ # don't want it to pull only part of a message, as that would
+ # corrupt the pipe for any other processes which might later
+ # read from it.
+ r.poll(5)
+
+ def test_boundaries(self):
+ r, w = self.Pipe(False)
+ p = self.Process(target=self._child_boundaries, args=(r,))
+ p.start()
+ time.sleep(2)
+ L = [b"first", b"second"]
+ for obj in L:
+ w.send_bytes(obj)
+ w.close()
+ p.join()
+ self.assertIn(r.recv_bytes(), L)
+
+ @classmethod
+ def _child_dont_merge(cls, b):
+ b.send_bytes(b'a')
+ b.send_bytes(b'b')
+ b.send_bytes(b'cd')
+
+ def test_dont_merge(self):
+ a, b = self.Pipe()
+ self.assertEqual(a.poll(0.0), False)
+ self.assertEqual(a.poll(0.1), False)
+
+ p = self.Process(target=self._child_dont_merge, args=(b,))
+ p.start()
+
+ self.assertEqual(a.recv_bytes(), b'a')
+ self.assertEqual(a.poll(1.0), True)
+ self.assertEqual(a.poll(1.0), True)
+ self.assertEqual(a.recv_bytes(), b'b')
+ self.assertEqual(a.poll(1.0), True)
+ self.assertEqual(a.poll(1.0), True)
+ self.assertEqual(a.poll(0.0), True)
+ self.assertEqual(a.recv_bytes(), b'cd')
+
+ p.join()
+
#
# Test of sending connection and socket objects between processes
#
@@ -2114,9 +2271,15 @@ class TestInvalidHandle(unittest.TestCase):
@unittest.skipIf(WIN32, "skipped on Windows")
def test_invalid_handles(self):
- conn = _multiprocessing.Connection(44977608)
- self.assertRaises(IOError, conn.poll)
- self.assertRaises(IOError, _multiprocessing.Connection, -1)
+ conn = multiprocessing.connection.Connection(44977608)
+ try:
+ self.assertRaises((ValueError, IOError), conn.poll)
+ finally:
+ # Hack private attribute _handle to avoid printing an error
+ # in conn.__del__
+ conn._handle = None
+ self.assertRaises((ValueError, IOError),
+ multiprocessing.connection.Connection, -1)
#
# Functions used to create test cases from the base ones in this module
@@ -2319,8 +2482,164 @@ class TestStdinBadfiledescriptor(unittest.TestCase):
flike.flush()
assert sio.getvalue() == 'foo'
+
+class TestWait(unittest.TestCase):
+
+ @classmethod
+ def _child_test_wait(cls, w, slow):
+ for i in range(10):
+ if slow:
+ time.sleep(random.random()*0.1)
+ w.send((i, os.getpid()))
+ w.close()
+
+ def test_wait(self, slow=False):
+ from multiprocessing.connection import wait
+ readers = []
+ procs = []
+ messages = []
+
+ for i in range(4):
+ r, w = multiprocessing.Pipe(duplex=False)
+ p = multiprocessing.Process(target=self._child_test_wait, args=(w, slow))
+ p.daemon = True
+ p.start()
+ w.close()
+ readers.append(r)
+ procs.append(p)
+ self.addCleanup(p.join)
+
+ while readers:
+ for r in wait(readers):
+ try:
+ msg = r.recv()
+ except EOFError:
+ readers.remove(r)
+ r.close()
+ else:
+ messages.append(msg)
+
+ messages.sort()
+ expected = sorted((i, p.pid) for i in range(10) for p in procs)
+ self.assertEqual(messages, expected)
+
+ @classmethod
+ def _child_test_wait_socket(cls, address, slow):
+ s = socket.socket()
+ s.connect(address)
+ for i in range(10):
+ if slow:
+ time.sleep(random.random()*0.1)
+ s.sendall(('%s\n' % i).encode('ascii'))
+ s.close()
+
+ def test_wait_socket(self, slow=False):
+ from multiprocessing.connection import wait
+ l = socket.socket()
+ l.bind(('', 0))
+ l.listen(4)
+ addr = ('localhost', l.getsockname()[1])
+ readers = []
+ procs = []
+ dic = {}
+
+ for i in range(4):
+ p = multiprocessing.Process(target=self._child_test_wait_socket,
+ args=(addr, slow))
+ p.daemon = True
+ p.start()
+ procs.append(p)
+ self.addCleanup(p.join)
+
+ for i in range(4):
+ r, _ = l.accept()
+ readers.append(r)
+ dic[r] = []
+ l.close()
+
+ while readers:
+ for r in wait(readers):
+ msg = r.recv(32)
+ if not msg:
+ readers.remove(r)
+ r.close()
+ else:
+ dic[r].append(msg)
+
+ expected = ''.join('%s\n' % i for i in range(10)).encode('ascii')
+ for v in dic.values():
+ self.assertEqual(b''.join(v), expected)
+
+ def test_wait_slow(self):
+ self.test_wait(True)
+
+ def test_wait_socket_slow(self):
+ self.test_wait(True)
+
+ def test_wait_timeout(self):
+ from multiprocessing.connection import wait
+
+ expected = 1
+ a, b = multiprocessing.Pipe()
+
+ start = time.time()
+ res = wait([a, b], 1)
+ delta = time.time() - start
+
+ self.assertEqual(res, [])
+ self.assertLess(delta, expected + 0.5)
+ self.assertGreater(delta, expected - 0.5)
+
+ b.send(None)
+
+ start = time.time()
+ res = wait([a, b], 1)
+ delta = time.time() - start
+
+ self.assertEqual(res, [a])
+ self.assertLess(delta, 0.4)
+
+ def test_wait_integer(self):
+ from multiprocessing.connection import wait
+
+ expected = 5
+ a, b = multiprocessing.Pipe()
+ p = multiprocessing.Process(target=time.sleep, args=(expected,))
+
+ p.start()
+ self.assertIsInstance(p.sentinel, int)
+
+ start = time.time()
+ res = wait([a, p.sentinel, b], expected + 20)
+ delta = time.time() - start
+
+ self.assertEqual(res, [p.sentinel])
+ self.assertLess(delta, expected + 2)
+ self.assertGreater(delta, expected - 2)
+
+ a.send(None)
+
+ start = time.time()
+ res = wait([a, p.sentinel, b], 20)
+ delta = time.time() - start
+
+ self.assertEqual(res, [p.sentinel, b])
+ self.assertLess(delta, 0.4)
+
+ b.send(None)
+
+ start = time.time()
+ res = wait([a, p.sentinel, b], 20)
+ delta = time.time() - start
+
+ self.assertEqual(res, [a, p.sentinel, b])
+ self.assertLess(delta, 0.4)
+
+ p.join()
+
+
testcases_other = [OtherTest, TestInvalidHandle, TestInitializers,
- TestStdinBadfiledescriptor]
+ TestStdinBadfiledescriptor, TestWait]
#
#
diff --git a/Lib/test/test_mutants.py b/Lib/test/test_mutants.py
deleted file mode 100644
index b43fa47..0000000
--- a/Lib/test/test_mutants.py
+++ /dev/null
@@ -1,291 +0,0 @@
-from test.support import verbose, TESTFN
-import random
-import os
-
-# From SF bug #422121: Insecurities in dict comparison.
-
-# Safety of code doing comparisons has been an historical Python weak spot.
-# The problem is that comparison of structures written in C *naturally*
-# wants to hold on to things like the size of the container, or "the
-# biggest" containee so far, across a traversal of the container; but
-# code to do containee comparisons can call back into Python and mutate
-# the container in arbitrary ways while the C loop is in midstream. If the
-# C code isn't extremely paranoid about digging things out of memory on
-# each trip, and artificially boosting refcounts for the duration, anything
-# from infinite loops to OS crashes can result (yes, I use Windows <wink>).
-#
-# The other problem is that code designed to provoke a weakness is usually
-# white-box code, and so catches only the particular vulnerabilities the
-# author knew to protect against. For example, Python's list.sort() code
-# went thru many iterations as one "new" vulnerability after another was
-# discovered.
-#
-# So the dict comparison test here uses a black-box approach instead,
-# generating dicts of various sizes at random, and performing random
-# mutations on them at random times. This proved very effective,
-# triggering at least six distinct failure modes the first 20 times I
-# ran it. Indeed, at the start, the driver never got beyond 6 iterations
-# before the test died.
-
-# The dicts are global to make it easy to mutate tham from within functions.
-dict1 = {}
-dict2 = {}
-
-# The current set of keys in dict1 and dict2. These are materialized as
-# lists to make it easy to pick a dict key at random.
-dict1keys = []
-dict2keys = []
-
-# Global flag telling maybe_mutate() whether to *consider* mutating.
-mutate = 0
-
-# If global mutate is true, consider mutating a dict. May or may not
-# mutate a dict even if mutate is true. If it does decide to mutate a
-# dict, it picks one of {dict1, dict2} at random, and deletes a random
-# entry from it; or, more rarely, adds a random element.
-
-def maybe_mutate():
- global mutate
- if not mutate:
- return
- if random.random() < 0.5:
- return
-
- if random.random() < 0.5:
- target, keys = dict1, dict1keys
- else:
- target, keys = dict2, dict2keys
-
- if random.random() < 0.2:
- # Insert a new key.
- mutate = 0 # disable mutation until key inserted
- while 1:
- newkey = Horrid(random.randrange(100))
- if newkey not in target:
- break
- target[newkey] = Horrid(random.randrange(100))
- keys.append(newkey)
- mutate = 1
-
- elif keys:
- # Delete a key at random.
- mutate = 0 # disable mutation until key deleted
- i = random.randrange(len(keys))
- key = keys[i]
- del target[key]
- del keys[i]
- mutate = 1
-
-# A horrid class that triggers random mutations of dict1 and dict2 when
-# instances are compared.
-
-class Horrid:
- def __init__(self, i):
- # Comparison outcomes are determined by the value of i.
- self.i = i
-
- # An artificial hashcode is selected at random so that we don't
- # have any systematic relationship between comparison outcomes
- # (based on self.i and other.i) and relative position within the
- # hash vector (based on hashcode).
- # XXX This is no longer effective.
- ##self.hashcode = random.randrange(1000000000)
-
- def __hash__(self):
- return 42
- return self.hashcode
-
- def __eq__(self, other):
- maybe_mutate() # The point of the test.
- return self.i == other.i
-
- def __ne__(self, other):
- raise RuntimeError("I didn't expect some kind of Spanish inquisition!")
-
- __lt__ = __le__ = __gt__ = __ge__ = __ne__
-
- def __repr__(self):
- return "Horrid(%d)" % self.i
-
-# Fill dict d with numentries (Horrid(i), Horrid(j)) key-value pairs,
-# where i and j are selected at random from the candidates list.
-# Return d.keys() after filling.
-
-def fill_dict(d, candidates, numentries):
- d.clear()
- for i in range(numentries):
- d[Horrid(random.choice(candidates))] = \
- Horrid(random.choice(candidates))
- return list(d.keys())
-
-# Test one pair of randomly generated dicts, each with n entries.
-# Note that dict comparison is trivial if they don't have the same number
-# of entires (then the "shorter" dict is instantly considered to be the
-# smaller one, without even looking at the entries).
-
-def test_one(n):
- global mutate, dict1, dict2, dict1keys, dict2keys
-
- # Fill the dicts without mutating them.
- mutate = 0
- dict1keys = fill_dict(dict1, range(n), n)
- dict2keys = fill_dict(dict2, range(n), n)
-
- # Enable mutation, then compare the dicts so long as they have the
- # same size.
- mutate = 1
- if verbose:
- print("trying w/ lengths", len(dict1), len(dict2), end=' ')
- while dict1 and len(dict1) == len(dict2):
- if verbose:
- print(".", end=' ')
- c = dict1 == dict2
- if verbose:
- print()
-
-# Run test_one n times. At the start (before the bugs were fixed), 20
-# consecutive runs of this test each blew up on or before the sixth time
-# test_one was run. So n doesn't have to be large to get an interesting
-# test.
-# OTOH, calling with large n is also interesting, to ensure that the fixed
-# code doesn't hold on to refcounts *too* long (in which case memory would
-# leak).
-
-def test(n):
- for i in range(n):
- test_one(random.randrange(1, 100))
-
-# See last comment block for clues about good values for n.
-test(100)
-
-##########################################################################
-# Another segfault bug, distilled by Michael Hudson from a c.l.py post.
-
-class Child:
- def __init__(self, parent):
- self.__dict__['parent'] = parent
- def __getattr__(self, attr):
- self.parent.a = 1
- self.parent.b = 1
- self.parent.c = 1
- self.parent.d = 1
- self.parent.e = 1
- self.parent.f = 1
- self.parent.g = 1
- self.parent.h = 1
- self.parent.i = 1
- return getattr(self.parent, attr)
-
-class Parent:
- def __init__(self):
- self.a = Child(self)
-
-# Hard to say what this will print! May vary from time to time. But
-# we're specifically trying to test the tp_print slot here, and this is
-# the clearest way to do it. We print the result to a temp file so that
-# the expected-output file doesn't need to change.
-
-f = open(TESTFN, "w")
-print(Parent().__dict__, file=f)
-f.close()
-os.unlink(TESTFN)
-
-##########################################################################
-# And another core-dumper from Michael Hudson.
-
-dict = {}
-
-# Force dict to malloc its table.
-for i in range(1, 10):
- dict[i] = i
-
-f = open(TESTFN, "w")
-
-class Machiavelli:
- def __repr__(self):
- dict.clear()
-
- # Michael sez: "doesn't crash without this. don't know why."
- # Tim sez: "luck of the draw; crashes with or without for me."
- print(file=f)
-
- return repr("machiavelli")
-
- def __hash__(self):
- return 0
-
-dict[Machiavelli()] = Machiavelli()
-
-print(str(dict), file=f)
-f.close()
-os.unlink(TESTFN)
-del f, dict
-
-
-##########################################################################
-# And another core-dumper from Michael Hudson.
-
-dict = {}
-
-# let's force dict to malloc its table
-for i in range(1, 10):
- dict[i] = i
-
-class Machiavelli2:
- def __eq__(self, other):
- dict.clear()
- return 1
-
- def __hash__(self):
- return 0
-
-dict[Machiavelli2()] = Machiavelli2()
-
-try:
- dict[Machiavelli2()]
-except KeyError:
- pass
-
-del dict
-
-##########################################################################
-# And another core-dumper from Michael Hudson.
-
-dict = {}
-
-# let's force dict to malloc its table
-for i in range(1, 10):
- dict[i] = i
-
-class Machiavelli3:
- def __init__(self, id):
- self.id = id
-
- def __eq__(self, other):
- if self.id == other.id:
- dict.clear()
- return 1
- else:
- return 0
-
- def __repr__(self):
- return "%s(%s)"%(self.__class__.__name__, self.id)
-
- def __hash__(self):
- return 0
-
-dict[Machiavelli3(1)] = Machiavelli3(0)
-dict[Machiavelli3(2)] = Machiavelli3(0)
-
-f = open(TESTFN, "w")
-try:
- try:
- print(dict[Machiavelli3(2)], file=f)
- except KeyError:
- pass
-finally:
- f.close()
- os.unlink(TESTFN)
-
-del dict
-del dict1, dict2, dict1keys, dict2keys
diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py
index cc490f1..c5146c1 100644
--- a/Lib/test/test_nntplib.py
+++ b/Lib/test/test_nntplib.py
@@ -1,4 +1,5 @@
import io
+import socket
import datetime
import textwrap
import unittest
@@ -251,6 +252,26 @@ class NetworkedNNTPTestsMixin:
# value
setattr(cls, name, wrap_meth(meth))
+ def test_with_statement(self):
+ def is_connected():
+ if not hasattr(server, 'file'):
+ return False
+ try:
+ server.help()
+ except (socket.error, EOFError):
+ return False
+ return True
+
+ with self.NNTP_CLASS(self.NNTP_HOST, timeout=TIMEOUT, usenetrc=False) as server:
+ self.assertTrue(is_connected())
+ self.assertTrue(server.help())
+ self.assertFalse(is_connected())
+
+ with self.NNTP_CLASS(self.NNTP_HOST, timeout=TIMEOUT, usenetrc=False) as server:
+ server.quit()
+ self.assertFalse(is_connected())
+
+
NetworkedNNTPTestsMixin.wrap_methods()
@@ -888,7 +909,7 @@ class NNTPv1v2TestsMixin:
def _check_article_body(self, lines):
self.assertEqual(len(lines), 4)
- self.assertEqual(lines[-1].decode('utf8'), "-- Signed by André.")
+ self.assertEqual(lines[-1].decode('utf-8'), "-- Signed by André.")
self.assertEqual(lines[-2], b"")
self.assertEqual(lines[-3], b".Here is a dot-starting line.")
self.assertEqual(lines[-4], b"This is just a test article.")
@@ -1127,12 +1148,12 @@ class NNTPv1v2TestsMixin:
self.assertEqual(resp, success_resp)
# With an iterable of terminated lines
def iterlines(b):
- return iter(b.splitlines(True))
+ return iter(b.splitlines(keepends=True))
resp = self._check_post_ihave_sub(func, *args, file_factory=iterlines)
self.assertEqual(resp, success_resp)
# With an iterable of non-terminated lines
def iterlines(b):
- return iter(b.splitlines(False))
+ return iter(b.splitlines(keepends=False))
resp = self._check_post_ihave_sub(func, *args, file_factory=iterlines)
self.assertEqual(resp, success_resp)
diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py
index 6464950..2c9dab9 100644
--- a/Lib/test/test_ntpath.py
+++ b/Lib/test/test_ntpath.py
@@ -1,10 +1,11 @@
import ntpath
import os
import sys
+import unittest
+import warnings
from test.support import TestFailed
from test import support, test_genericpath
from tempfile import TemporaryFile
-import unittest
def tester(fn, wantResult):
@@ -21,7 +22,9 @@ def tester(fn, wantResult):
fn = fn.replace('["', '[b"')
fn = fn.replace(", '", ", b'")
fn = fn.replace(', "', ', b"')
- gotResult = eval(fn)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ gotResult = eval(fn)
if isinstance(wantResult, str):
wantResult = wantResult.encode('ascii')
elif isinstance(wantResult, tuple):
diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py
index 7b95612..d1ae757 100644
--- a/Lib/test/test_optparse.py
+++ b/Lib/test/test_optparse.py
@@ -631,7 +631,7 @@ class TestStandard(BaseTest):
option_list=options)
def test_required_value(self):
- self.assertParseFail(["-a"], "-a option requires an argument")
+ self.assertParseFail(["-a"], "-a option requires 1 argument")
def test_invalid_integer(self):
self.assertParseFail(["-b", "5x"],
@@ -1023,7 +1023,7 @@ class TestExtendAddTypes(BaseTest):
TYPE_CHECKER["file"] = check_file
def test_filetype_ok(self):
- open(support.TESTFN, "w").close()
+ support.create_empty_file(support.TESTFN)
self.assertParseOK(["--file", support.TESTFN, "-afoo"],
{'file': support.TESTFN, 'a': 'foo'},
[])
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index 8bc8ba9..d259515 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -14,19 +14,38 @@ import shutil
from test import support
import contextlib
import mmap
+import platform
+import re
import uuid
+import asyncore
+import asynchat
+import socket
+import itertools
+import stat
+import locale
+import codecs
+try:
+ import threading
+except ImportError:
+ threading = None
from test.script_helper import assert_python_ok
+os.stat_float_times(True)
+st = os.stat(__file__)
+stat_supports_subsecond = (
+ # check if float and int timestamps are different
+ (st.st_atime != st[7])
+ or (st.st_mtime != st[8])
+ or (st.st_ctime != st[9]))
+
# Detect whether we're on a Linux system that uses the (now outdated
# and unmaintained) linuxthreads threading library. There's an issue
# when combining linuxthreads with a failed execv call: see
# http://bugs.python.org/issue4970.
-if (hasattr(os, "confstr_names") and
- "CS_GNU_LIBPTHREAD_VERSION" in os.confstr_names):
- libpthread = os.confstr("CS_GNU_LIBPTHREAD_VERSION")
- USING_LINUXTHREADS= libpthread.startswith("linuxthreads")
+if hasattr(sys, 'thread_info') and sys.thread_info.version:
+ USING_LINUXTHREADS = sys.thread_info.version.startswith("linuxthreads")
else:
- USING_LINUXTHREADS= False
+ USING_LINUXTHREADS = False
# Tests creating TESTFN
class FileTests(unittest.TestCase):
@@ -123,6 +142,18 @@ class FileTests(unittest.TestCase):
self.fdopen_helper('r')
self.fdopen_helper('r', 100)
+ def test_replace(self):
+ TESTFN2 = support.TESTFN + ".2"
+ with open(support.TESTFN, 'w') as f:
+ f.write("1")
+ with open(TESTFN2, 'w') as f:
+ f.write("2")
+ self.addCleanup(os.unlink, TESTFN2)
+ os.replace(support.TESTFN, TESTFN2)
+ self.assertRaises(FileNotFoundError, os.stat, support.TESTFN)
+ with open(TESTFN2, 'r') as f:
+ self.assertEqual(f.read(), "1")
+
# Test attributes on return values from os.*stat* family.
class StatAttributeTests(unittest.TestCase):
@@ -141,7 +172,6 @@ class StatAttributeTests(unittest.TestCase):
if not hasattr(os, "stat"):
return
- import stat
result = os.stat(fname)
# Make sure direct access works
@@ -207,7 +237,9 @@ class StatAttributeTests(unittest.TestCase):
fname = self.fname.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
self.skipTest("cannot encode %a for the filesystem" % self.fname)
- self.check_stat_attributes(fname)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ self.check_stat_attributes(fname)
def test_statvfs_attributes(self):
if not hasattr(os, "statvfs"):
@@ -264,6 +296,105 @@ class StatAttributeTests(unittest.TestCase):
st2 = os.stat(support.TESTFN)
self.assertEqual(st2.st_mtime, int(st.st_mtime-delta))
+ def test_utime_noargs(self):
+ # Issue #13327 removed the requirement to pass None as the
+ # second argument. Check that the previous methods of passing
+ # a time tuple or None work in addition to no argument.
+ st = os.stat(support.TESTFN)
+ # Doesn't set anything new, but sets the time tuple way
+ os.utime(support.TESTFN, (st.st_atime, st.st_mtime))
+ # Set to the current time in the old explicit way.
+ os.utime(support.TESTFN, None)
+ st1 = os.stat(support.TESTFN)
+ # Set to the current time in the new way
+ os.utime(support.TESTFN)
+ st2 = os.stat(support.TESTFN)
+ self.assertAlmostEqual(st1.st_mtime, st2.st_mtime, delta=10)
+
+ @unittest.skipUnless(stat_supports_subsecond,
+ "os.stat() doesn't has a subsecond resolution")
+ def _test_utime_subsecond(self, set_time_func):
+ asec, amsec = 1, 901
+ atime = asec + amsec * 1e-3
+ msec, mmsec = 2, 901
+ mtime = msec + mmsec * 1e-3
+ filename = self.fname
+ os.utime(filename, (0, 0))
+ set_time_func(filename, atime, mtime)
+ os.stat_float_times(True)
+ st = os.stat(filename)
+ self.assertAlmostEqual(st.st_atime, atime, places=3)
+ self.assertAlmostEqual(st.st_mtime, mtime, places=3)
+
+ def test_utime_subsecond(self):
+ def set_time(filename, atime, mtime):
+ os.utime(filename, (atime, mtime))
+ self._test_utime_subsecond(set_time)
+
+ @unittest.skipUnless(hasattr(os, 'futimes'),
+ "os.futimes required for this test.")
+ def test_futimes_subsecond(self):
+ def set_time(filename, atime, mtime):
+ with open(filename, "wb") as f:
+ os.futimes(f.fileno(), (atime, mtime))
+ self._test_utime_subsecond(set_time)
+
+ @unittest.skipUnless(hasattr(os, 'futimens'),
+ "os.futimens required for this test.")
+ def test_futimens_subsecond(self):
+ def set_time(filename, atime, mtime):
+ with open(filename, "wb") as f:
+ asec, ansec = divmod(atime, 1.0)
+ asec = int(asec)
+ ansec = int(ansec * 1e9)
+ msec, mnsec = divmod(mtime, 1.0)
+ msec = int(msec)
+ mnsec = int(mnsec * 1e9)
+ os.futimens(f.fileno(),
+ (asec, ansec),
+ (msec, mnsec))
+ self._test_utime_subsecond(set_time)
+
+ @unittest.skipUnless(hasattr(os, 'futimesat'),
+ "os.futimesat required for this test.")
+ def test_futimesat_subsecond(self):
+ def set_time(filename, atime, mtime):
+ dirname = os.path.dirname(filename)
+ dirfd = os.open(dirname, os.O_RDONLY)
+ try:
+ os.futimesat(dirfd, os.path.basename(filename),
+ (atime, mtime))
+ finally:
+ os.close(dirfd)
+ self._test_utime_subsecond(set_time)
+
+ @unittest.skipUnless(hasattr(os, 'lutimes'),
+ "os.lutimes required for this test.")
+ def test_lutimes_subsecond(self):
+ def set_time(filename, atime, mtime):
+ os.lutimes(filename, (atime, mtime))
+ self._test_utime_subsecond(set_time)
+
+ @unittest.skipUnless(hasattr(os, 'utimensat'),
+ "os.utimensat required for this test.")
+ def test_utimensat_subsecond(self):
+ def set_time(filename, atime, mtime):
+ dirname = os.path.dirname(filename)
+ dirfd = os.open(dirname, os.O_RDONLY)
+ try:
+ asec, ansec = divmod(atime, 1.0)
+ asec = int(asec)
+ ansec = int(ansec * 1e9)
+ msec, mnsec = divmod(mtime, 1.0)
+ msec = int(msec)
+ mnsec = int(mnsec * 1e9)
+ os.utimensat(dirfd, os.path.basename(filename),
+ (asec, ansec),
+ (msec, mnsec))
+ finally:
+ os.close(dirfd)
+ self._test_utime_subsecond(set_time)
+
# Restrict test to Win32, since there is no guarantee other
# systems support centiseconds
if sys.platform == 'win32':
@@ -426,8 +557,8 @@ class EnvironTests(mapping_tests.BasicTestMappingProtocol):
# On FreeBSD < 7 and OS X < 10.6, unsetenv() doesn't return a value (issue
# #13415).
- @unittest.skipIf(sys.platform.startswith(('freebsd', 'darwin')),
- "due to known OS bug: see issue #13415")
+ @support.requires_freebsd_version(7)
+ @support.requires_mac_ver(10, 6)
def test_unset_error(self):
if sys.platform == "win32":
# an environment variable is limited to 32,767 characters
@@ -441,7 +572,7 @@ class EnvironTests(mapping_tests.BasicTestMappingProtocol):
class WalkTests(unittest.TestCase):
"""Tests for os.walk()."""
- def test_traversal(self):
+ def setUp(self):
import os
from os.path import join
@@ -551,6 +682,60 @@ class WalkTests(unittest.TestCase):
os.remove(dirname)
os.rmdir(support.TESTFN)
+
+@unittest.skipUnless(hasattr(os, 'fwalk'), "Test needs os.fwalk()")
+class FwalkTests(WalkTests):
+ """Tests for os.fwalk()."""
+
+ def test_compare_to_walk(self):
+ # compare with walk() results
+ for topdown, followlinks in itertools.product((True, False), repeat=2):
+ args = support.TESTFN, topdown, None, followlinks
+ expected = {}
+ for root, dirs, files in os.walk(*args):
+ expected[root] = (set(dirs), set(files))
+
+ for root, dirs, files, rootfd in os.fwalk(*args):
+ self.assertIn(root, expected)
+ self.assertEqual(expected[root], (set(dirs), set(files)))
+
+ def test_dir_fd(self):
+ # check returned file descriptors
+ for topdown, followlinks in itertools.product((True, False), repeat=2):
+ args = support.TESTFN, topdown, None, followlinks
+ for root, dirs, files, rootfd in os.fwalk(*args):
+ # check that the FD is valid
+ os.fstat(rootfd)
+ # check that flistdir() returns consistent information
+ self.assertEqual(set(os.flistdir(rootfd)), set(dirs) | set(files))
+
+ def test_fd_leak(self):
+ # Since we're opening a lot of FDs, we must be careful to avoid leaks:
+ # we both check that calling fwalk() a large number of times doesn't
+ # yield EMFILE, and that the minimum allocated FD hasn't changed.
+ minfd = os.dup(1)
+ os.close(minfd)
+ for i in range(256):
+ for x in os.fwalk(support.TESTFN):
+ pass
+ newfd = os.dup(1)
+ self.addCleanup(os.close, newfd)
+ self.assertEqual(newfd, minfd)
+
+ def tearDown(self):
+ # cleanup
+ for root, dirs, files, rootfd in os.fwalk(support.TESTFN, topdown=False):
+ for name in files:
+ os.unlinkat(rootfd, name)
+ for name in dirs:
+ st = os.fstatat(rootfd, name, os.AT_SYMLINK_NOFOLLOW)
+ if stat.S_ISDIR(st.st_mode):
+ os.unlinkat(rootfd, name, os.AT_REMOVEDIR)
+ else:
+ os.unlinkat(rootfd, name)
+ os.rmdir(support.TESTFN)
+
+
class MakedirTests(unittest.TestCase):
def setUp(self):
os.mkdir(support.TESTFN)
@@ -855,7 +1040,9 @@ class LinkTests(unittest.TestCase):
with open(file1, "w") as f1:
f1.write("test")
- os.link(file1, file2)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ os.link(file1, file2)
with open(file1, "r") as f1, open(file2, "r") as f2:
self.assertTrue(os.path.sameopenfile(f1.fileno(), f2.fileno()))
@@ -958,8 +1145,7 @@ if sys.platform != 'win32':
os.mkdir(self.dir)
try:
for fn in bytesfn:
- f = open(os.path.join(self.bdir, fn), "w")
- f.close()
+ support.create_empty_file(os.path.join(self.bdir, fn))
fn = os.fsdecode(fn)
if fn in self.unicodefn:
raise ValueError("duplicate filename")
@@ -1178,8 +1364,10 @@ class Win32SymlinkTests(unittest.TestCase):
self.assertNotEqual(os.lstat(link), os.stat(link))
bytes_link = os.fsencode(link)
- self.assertEqual(os.stat(bytes_link), os.stat(target))
- self.assertNotEqual(os.lstat(bytes_link), os.stat(bytes_link))
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ self.assertEqual(os.stat(bytes_link), os.stat(target))
+ self.assertNotEqual(os.lstat(bytes_link), os.stat(bytes_link))
def test_12084(self):
level1 = os.path.abspath(support.TESTFN)
@@ -1238,6 +1426,22 @@ class FSEncodingTests(unittest.TestCase):
self.assertEqual(os.fsdecode(bytesfn), fn)
+
+class DeviceEncodingTests(unittest.TestCase):
+
+ def test_bad_fd(self):
+ # Return None when an fd doesn't actually exist.
+ self.assertIsNone(os.device_encoding(123456))
+
+ @unittest.skipUnless(os.isatty(0) and (sys.platform.startswith('win') or
+ (hasattr(locale, 'nl_langinfo') and hasattr(locale, 'CODESET'))),
+ 'test requires a tty and either Windows or nl_langinfo(CODESET)')
+ def test_device_encoding(self):
+ encoding = os.device_encoding(0)
+ self.assertIsNotNone(encoding)
+ self.assertTrue(codecs.lookup(encoding))
+
+
class PidTests(unittest.TestCase):
@unittest.skipUnless(hasattr(os, 'getppid'), "test needs os.getppid")
def test_getppid(self):
@@ -1259,12 +1463,473 @@ class LoginTests(unittest.TestCase):
self.assertNotEqual(len(user_name), 0)
+@unittest.skipUnless(hasattr(os, 'getpriority') and hasattr(os, 'setpriority'),
+ "needs os.getpriority and os.setpriority")
+class ProgramPriorityTests(unittest.TestCase):
+ """Tests for os.getpriority() and os.setpriority()."""
+
+ def test_set_get_priority(self):
+
+ base = os.getpriority(os.PRIO_PROCESS, os.getpid())
+ os.setpriority(os.PRIO_PROCESS, os.getpid(), base + 1)
+ try:
+ new_prio = os.getpriority(os.PRIO_PROCESS, os.getpid())
+ if base >= 19 and new_prio <= 19:
+ raise unittest.SkipTest(
+ "unable to reliably test setpriority at current nice level of %s" % base)
+ else:
+ self.assertEqual(new_prio, base + 1)
+ finally:
+ try:
+ os.setpriority(os.PRIO_PROCESS, os.getpid(), base)
+ except OSError as err:
+ if err.errno != errno.EACCES:
+ raise
+
+
+if threading is not None:
+ class SendfileTestServer(asyncore.dispatcher, threading.Thread):
+
+ class Handler(asynchat.async_chat):
+
+ def __init__(self, conn):
+ asynchat.async_chat.__init__(self, conn)
+ self.in_buffer = []
+ self.closed = False
+ self.push(b"220 ready\r\n")
+
+ def handle_read(self):
+ data = self.recv(4096)
+ self.in_buffer.append(data)
+
+ def get_data(self):
+ return b''.join(self.in_buffer)
+
+ def handle_close(self):
+ self.close()
+ self.closed = True
+
+ def handle_error(self):
+ raise
+
+ def __init__(self, address):
+ threading.Thread.__init__(self)
+ asyncore.dispatcher.__init__(self)
+ self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.bind(address)
+ self.listen(5)
+ self.host, self.port = self.socket.getsockname()[:2]
+ self.handler_instance = None
+ self._active = False
+ self._active_lock = threading.Lock()
+
+ # --- public API
+
+ @property
+ def running(self):
+ return self._active
+
+ def start(self):
+ assert not self.running
+ self.__flag = threading.Event()
+ threading.Thread.start(self)
+ self.__flag.wait()
+
+ def stop(self):
+ assert self.running
+ self._active = False
+ self.join()
+
+ def wait(self):
+ # wait for handler connection to be closed, then stop the server
+ while not getattr(self.handler_instance, "closed", False):
+ time.sleep(0.001)
+ self.stop()
+
+ # --- internals
+
+ def run(self):
+ self._active = True
+ self.__flag.set()
+ while self._active and asyncore.socket_map:
+ self._active_lock.acquire()
+ asyncore.loop(timeout=0.001, count=1)
+ self._active_lock.release()
+ asyncore.close_all()
+
+ def handle_accept(self):
+ conn, addr = self.accept()
+ self.handler_instance = self.Handler(conn)
+
+ def handle_connect(self):
+ self.close()
+ handle_read = handle_connect
+
+ def writable(self):
+ return 0
+
+ def handle_error(self):
+ raise
+
+
+@unittest.skipUnless(threading is not None, "test needs threading module")
+@unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()")
+class TestSendfile(unittest.TestCase):
+
+ DATA = b"12345abcde" * 16 * 1024 # 160 KB
+ SUPPORT_HEADERS_TRAILERS = not sys.platform.startswith("linux") and \
+ not sys.platform.startswith("solaris") and \
+ not sys.platform.startswith("sunos")
+
+ @classmethod
+ def setUpClass(cls):
+ with open(support.TESTFN, "wb") as f:
+ f.write(cls.DATA)
+
+ @classmethod
+ def tearDownClass(cls):
+ support.unlink(support.TESTFN)
+
+ def setUp(self):
+ self.server = SendfileTestServer((support.HOST, 0))
+ self.server.start()
+ self.client = socket.socket()
+ self.client.connect((self.server.host, self.server.port))
+ self.client.settimeout(1)
+ # synchronize by waiting for "220 ready" response
+ self.client.recv(1024)
+ self.sockno = self.client.fileno()
+ self.file = open(support.TESTFN, 'rb')
+ self.fileno = self.file.fileno()
+
+ def tearDown(self):
+ self.file.close()
+ self.client.close()
+ if self.server.running:
+ self.server.stop()
+
+ def sendfile_wrapper(self, sock, file, offset, nbytes, headers=[], trailers=[]):
+ """A higher level wrapper representing how an application is
+ supposed to use sendfile().
+ """
+ while 1:
+ try:
+ if self.SUPPORT_HEADERS_TRAILERS:
+ return os.sendfile(sock, file, offset, nbytes, headers,
+ trailers)
+ else:
+ return os.sendfile(sock, file, offset, nbytes)
+ except OSError as err:
+ if err.errno == errno.ECONNRESET:
+ # disconnected
+ raise
+ elif err.errno in (errno.EAGAIN, errno.EBUSY):
+ # we have to retry send data
+ continue
+ else:
+ raise
+
+ def test_send_whole_file(self):
+ # normal send
+ total_sent = 0
+ offset = 0
+ nbytes = 4096
+ while total_sent < len(self.DATA):
+ sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
+ if sent == 0:
+ break
+ offset += sent
+ total_sent += sent
+ self.assertTrue(sent <= nbytes)
+ self.assertEqual(offset, total_sent)
+
+ self.assertEqual(total_sent, len(self.DATA))
+ self.client.shutdown(socket.SHUT_RDWR)
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(len(data), len(self.DATA))
+ self.assertEqual(data, self.DATA)
+
+ def test_send_at_certain_offset(self):
+ # start sending a file at a certain offset
+ total_sent = 0
+ offset = len(self.DATA) // 2
+ must_send = len(self.DATA) - offset
+ nbytes = 4096
+ while total_sent < must_send:
+ sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
+ if sent == 0:
+ break
+ offset += sent
+ total_sent += sent
+ self.assertTrue(sent <= nbytes)
+
+ self.client.shutdown(socket.SHUT_RDWR)
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ expected = self.DATA[len(self.DATA) // 2:]
+ self.assertEqual(total_sent, len(expected))
+ self.assertEqual(len(data), len(expected))
+ self.assertEqual(data, expected)
+
+ def test_offset_overflow(self):
+ # specify an offset > file size
+ offset = len(self.DATA) + 4096
+ try:
+ sent = os.sendfile(self.sockno, self.fileno, offset, 4096)
+ except OSError as e:
+ # Solaris can raise EINVAL if offset >= file length, ignore.
+ if e.errno != errno.EINVAL:
+ raise
+ else:
+ self.assertEqual(sent, 0)
+ self.client.shutdown(socket.SHUT_RDWR)
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(data, b'')
+
+ def test_invalid_offset(self):
+ with self.assertRaises(OSError) as cm:
+ os.sendfile(self.sockno, self.fileno, -1, 4096)
+ self.assertEqual(cm.exception.errno, errno.EINVAL)
+
+ # --- headers / trailers tests
+
+ if SUPPORT_HEADERS_TRAILERS:
+
+ def test_headers(self):
+ total_sent = 0
+ sent = os.sendfile(self.sockno, self.fileno, 0, 4096,
+ headers=[b"x" * 512])
+ total_sent += sent
+ offset = 4096
+ nbytes = 4096
+ while 1:
+ sent = self.sendfile_wrapper(self.sockno, self.fileno,
+ offset, nbytes)
+ if sent == 0:
+ break
+ total_sent += sent
+ offset += sent
+
+ expected_data = b"x" * 512 + self.DATA
+ self.assertEqual(total_sent, len(expected_data))
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(hash(data), hash(expected_data))
+
+ def test_trailers(self):
+ TESTFN2 = support.TESTFN + "2"
+ with open(TESTFN2, 'wb') as f:
+ f.write(b"abcde")
+ with open(TESTFN2, 'rb')as f:
+ self.addCleanup(os.remove, TESTFN2)
+ os.sendfile(self.sockno, f.fileno(), 0, 4096,
+ trailers=[b"12345"])
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(data, b"abcde12345")
+
+ if hasattr(os, "SF_NODISKIO"):
+ def test_flags(self):
+ try:
+ os.sendfile(self.sockno, self.fileno, 0, 4096,
+ flags=os.SF_NODISKIO)
+ except OSError as err:
+ if err.errno not in (errno.EBUSY, errno.EAGAIN):
+ raise
+
+
+def supports_extended_attributes():
+ if not hasattr(os, "setxattr"):
+ return False
+ try:
+ with open(support.TESTFN, "wb") as fp:
+ try:
+ os.fsetxattr(fp.fileno(), b"user.test", b"")
+ except OSError as e:
+ if e.errno != errno.ENOTSUP:
+ raise
+ return False
+ finally:
+ support.unlink(support.TESTFN)
+ # Kernels < 2.6.39 don't respect setxattr flags.
+ kernel_version = platform.release()
+ m = re.match("2.6.(\d{1,2})", kernel_version)
+ return m is None or int(m.group(1)) >= 39
+
+
+@unittest.skipUnless(supports_extended_attributes(),
+ "no non-broken extended attribute support")
+class ExtendedAttributeTests(unittest.TestCase):
+
+ def tearDown(self):
+ support.unlink(support.TESTFN)
+
+ def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr):
+ fn = support.TESTFN
+ open(fn, "wb").close()
+ with self.assertRaises(OSError) as cm:
+ getxattr(fn, s("user.test"))
+ self.assertEqual(cm.exception.errno, errno.ENODATA)
+ init_xattr = listxattr(fn)
+ self.assertIsInstance(init_xattr, list)
+ setxattr(fn, s("user.test"), b"")
+ xattr = set(init_xattr)
+ xattr.add("user.test")
+ self.assertEqual(set(listxattr(fn)), xattr)
+ self.assertEqual(getxattr(fn, b"user.test"), b"")
+ setxattr(fn, s("user.test"), b"hello", os.XATTR_REPLACE)
+ self.assertEqual(getxattr(fn, b"user.test"), b"hello")
+ with self.assertRaises(OSError) as cm:
+ setxattr(fn, s("user.test"), b"bye", os.XATTR_CREATE)
+ self.assertEqual(cm.exception.errno, errno.EEXIST)
+ with self.assertRaises(OSError) as cm:
+ setxattr(fn, s("user.test2"), b"bye", os.XATTR_REPLACE)
+ self.assertEqual(cm.exception.errno, errno.ENODATA)
+ setxattr(fn, s("user.test2"), b"foo", os.XATTR_CREATE)
+ xattr.add("user.test2")
+ self.assertEqual(set(listxattr(fn)), xattr)
+ removexattr(fn, s("user.test"))
+ with self.assertRaises(OSError) as cm:
+ getxattr(fn, s("user.test"))
+ self.assertEqual(cm.exception.errno, errno.ENODATA)
+ xattr.remove("user.test")
+ self.assertEqual(set(listxattr(fn)), xattr)
+ self.assertEqual(getxattr(fn, s("user.test2")), b"foo")
+ setxattr(fn, s("user.test"), b"a"*1024)
+ self.assertEqual(getxattr(fn, s("user.test")), b"a"*1024)
+ removexattr(fn, s("user.test"))
+ many = sorted("user.test{}".format(i) for i in range(100))
+ for thing in many:
+ setxattr(fn, thing, b"x")
+ self.assertEqual(set(listxattr(fn)), set(init_xattr) | set(many))
+
+ def _check_xattrs(self, *args):
+ def make_bytes(s):
+ return bytes(s, "ascii")
+ self._check_xattrs_str(str, *args)
+ support.unlink(support.TESTFN)
+ self._check_xattrs_str(make_bytes, *args)
+
+ def test_simple(self):
+ self._check_xattrs(os.getxattr, os.setxattr, os.removexattr,
+ os.listxattr)
+
+ def test_lpath(self):
+ self._check_xattrs(os.lgetxattr, os.lsetxattr, os.lremovexattr,
+ os.llistxattr)
+
+ def test_fds(self):
+ def getxattr(path, *args):
+ with open(path, "rb") as fp:
+ return os.fgetxattr(fp.fileno(), *args)
+ def setxattr(path, *args):
+ with open(path, "wb") as fp:
+ os.fsetxattr(fp.fileno(), *args)
+ def removexattr(path, *args):
+ with open(path, "wb") as fp:
+ os.fremovexattr(fp.fileno(), *args)
+ def listxattr(path, *args):
+ with open(path, "rb") as fp:
+ return os.flistxattr(fp.fileno(), *args)
+ self._check_xattrs(getxattr, setxattr, removexattr, listxattr)
+
+
+@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
+class Win32DeprecatedBytesAPI(unittest.TestCase):
+ def test_deprecated(self):
+ import nt
+ filename = os.fsencode(support.TESTFN)
+ with warnings.catch_warnings():
+ warnings.simplefilter("error", DeprecationWarning)
+ for func, *args in (
+ (nt._getfullpathname, filename),
+ (nt._isdir, filename),
+ (os.access, filename, os.R_OK),
+ (os.chdir, filename),
+ (os.chmod, filename, 0o777),
+ (os.getcwdb,),
+ (os.link, filename, filename),
+ (os.listdir, filename),
+ (os.lstat, filename),
+ (os.mkdir, filename),
+ (os.open, filename, os.O_RDONLY),
+ (os.rename, filename, filename),
+ (os.rmdir, filename),
+ (os.startfile, filename),
+ (os.stat, filename),
+ (os.unlink, filename),
+ (os.utime, filename),
+ ):
+ self.assertRaises(DeprecationWarning, func, *args)
+
+ @support.skip_unless_symlink
+ def test_symlink(self):
+ filename = os.fsencode(support.TESTFN)
+ with warnings.catch_warnings():
+ warnings.simplefilter("error", DeprecationWarning)
+ self.assertRaises(DeprecationWarning,
+ os.symlink, filename, filename)
+
+
+@unittest.skipUnless(hasattr(os, 'get_terminal_size'), "requires os.get_terminal_size")
+class TermsizeTests(unittest.TestCase):
+ def test_does_not_crash(self):
+ """Check if get_terminal_size() returns a meaningful value.
+
+ There's no easy portable way to actually check the size of the
+ terminal, so let's check if it returns something sensible instead.
+ """
+ try:
+ size = os.get_terminal_size()
+ except OSError as e:
+ if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
+ # Under win32 a generic OSError can be thrown if the
+ # handle cannot be retrieved
+ self.skipTest("failed to query terminal size")
+ raise
+
+ self.assertGreaterEqual(size.columns, 0)
+ self.assertGreaterEqual(size.lines, 0)
+
+ def test_stty_match(self):
+ """Check if stty returns the same results
+
+ stty actually tests stdin, so get_terminal_size is invoked on
+ stdin explicitly. If stty succeeded, then get_terminal_size()
+ should work too.
+ """
+ try:
+ size = subprocess.check_output(['stty', 'size']).decode().split()
+ except (FileNotFoundError, subprocess.CalledProcessError):
+ self.skipTest("stty invocation failed")
+ expected = (int(size[1]), int(size[0])) # reversed order
+
+ try:
+ actual = os.get_terminal_size(sys.__stdin__.fileno())
+ except OSError as e:
+ if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
+ # Under win32 a generic OSError can be thrown if the
+ # handle cannot be retrieved
+ self.skipTest("failed to query terminal size")
+ raise
+ self.assertEqual(expected, actual)
+
+
+@support.reap_threads
def test_main():
support.run_unittest(
FileTests,
StatAttributeTests,
EnvironTests,
WalkTests,
+ FwalkTests,
MakedirTests,
DevNullTests,
URandomTests,
@@ -1276,9 +1941,15 @@ def test_main():
Win32KillTests,
Win32SymlinkTests,
FSEncodingTests,
+ DeviceEncodingTests,
PidTests,
LoginTests,
LinkTests,
+ TestSendfile,
+ ProgramPriorityTests,
+ ExtendedAttributeTests,
+ Win32DeprecatedBytesAPI,
+ TermsizeTests,
)
if __name__ == "__main__":
diff --git a/Lib/test/test_ossaudiodev.py b/Lib/test/test_ossaudiodev.py
index 9cb89d6..3908a05 100644
--- a/Lib/test/test_ossaudiodev.py
+++ b/Lib/test/test_ossaudiodev.py
@@ -170,6 +170,22 @@ class OSSAudioDevTests(unittest.TestCase):
pass
self.assertTrue(dsp.closed)
+ def test_on_closed(self):
+ dsp = ossaudiodev.open('w')
+ dsp.close()
+ self.assertRaises(ValueError, dsp.fileno)
+ self.assertRaises(ValueError, dsp.read, 1)
+ self.assertRaises(ValueError, dsp.write, b'x')
+ self.assertRaises(ValueError, dsp.writeall, b'x')
+ self.assertRaises(ValueError, dsp.bufsize)
+ self.assertRaises(ValueError, dsp.obufcount)
+ self.assertRaises(ValueError, dsp.obufcount)
+ self.assertRaises(ValueError, dsp.obuffree)
+ self.assertRaises(ValueError, dsp.getptr)
+
+ mixer = ossaudiodev.openmixer()
+ mixer.close()
+ self.assertRaises(ValueError, mixer.fileno)
def test_main():
try:
diff --git a/Lib/test/test_osx_env.py b/Lib/test/test_osx_env.py
index 8b3df37..24ec2b4 100644
--- a/Lib/test/test_osx_env.py
+++ b/Lib/test/test_osx_env.py
@@ -5,6 +5,7 @@ Test suite for OS X interpreter environment variables.
from test.support import EnvironmentVarGuard, run_unittest
import subprocess
import sys
+import sysconfig
import unittest
class OSXEnvironmentVariableTestCase(unittest.TestCase):
@@ -27,8 +28,6 @@ class OSXEnvironmentVariableTestCase(unittest.TestCase):
self._check_sys('PYTHONEXECUTABLE', '==', 'sys.executable')
def test_main():
- from distutils import sysconfig
-
if sys.platform == 'darwin' and sysconfig.get_config_var('WITH_NEXT_FRAMEWORK'):
run_unittest(OSXEnvironmentVariableTestCase)
diff --git a/Lib/test/test_packaging.py b/Lib/test/test_packaging.py
new file mode 100644
index 0000000..250d661
--- /dev/null
+++ b/Lib/test/test_packaging.py
@@ -0,0 +1,5 @@
+import sys
+from packaging.tests.__main__ import test_main
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/test/test_parser.py b/Lib/test/test_parser.py
index 020acd5..b6f81fb 100644
--- a/Lib/test/test_parser.py
+++ b/Lib/test/test_parser.py
@@ -50,6 +50,10 @@ class RoundtripLegalSyntaxTestCase(unittest.TestCase):
self.check_suite("def f(): (yield 1)*2")
self.check_suite("def f(): return; yield 1")
self.check_suite("def f(): yield 1; return")
+ self.check_suite("def f(): yield from 1")
+ self.check_suite("def f(): x = yield from 1")
+ self.check_suite("def f(): f((yield from 1))")
+ self.check_suite("def f(): yield 1; return 1")
self.check_suite("def f():\n"
" for x in range(30):\n"
" yield x\n")
@@ -614,6 +618,12 @@ class STObjectTestCase(unittest.TestCase):
# XXX tests for pickling and unpickling of ST objects should go here
+class OtherParserCase(unittest.TestCase):
+
+ def test_two_args_to_expr(self):
+ # See bug #12264
+ with self.assertRaises(TypeError):
+ parser.expr("a", "b")
def test_main():
support.run_unittest(
@@ -622,6 +632,7 @@ def test_main():
CompileTestCase,
ParserStackLimitTestCase,
STObjectTestCase,
+ OtherParserCase,
)
diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py
index d861df5..c197aff 100644
--- a/Lib/test/test_pdb.py
+++ b/Lib/test/test_pdb.py
@@ -20,9 +20,12 @@ class PdbTestInput(object):
def __enter__(self):
self.real_stdin = sys.stdin
sys.stdin = _FakeInput(self.input)
+ self.orig_trace = sys.gettrace() if hasattr(sys, 'gettrace') else None
def __exit__(self, *exc):
sys.stdin = self.real_stdin
+ if self.orig_trace:
+ sys.settrace(self.orig_trace)
def test_pdb_displayhook():
diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py
index 53719d3..1e782cf 100644
--- a/Lib/test/test_peepholer.py
+++ b/Lib/test/test_peepholer.py
@@ -3,13 +3,16 @@ import re
import sys
from io import StringIO
import unittest
+from math import copysign
def disassemble(func):
f = StringIO()
tmp = sys.stdout
sys.stdout = f
- dis.dis(func)
- sys.stdout = tmp
+ try:
+ dis.dis(func)
+ finally:
+ sys.stdout = tmp
result = f.getvalue()
f.close()
return result
@@ -17,6 +20,7 @@ def disassemble(func):
def dis_single(line):
return disassemble(compile(line, '', 'single'))
+
class TestTranforms(unittest.TestCase):
def test_unot(self):
@@ -99,6 +103,12 @@ class TestTranforms(unittest.TestCase):
self.assertIn(elem, asm)
self.assertNotIn('BUILD_TUPLE', asm)
+ # Long tuples should be folded too.
+ asm = dis_single(repr(tuple(range(10000))))
+ # One LOAD_CONST for the tuple, one for the None return value
+ self.assertEqual(asm.count('LOAD_CONST'), 2)
+ self.assertNotIn('BUILD_TUPLE', asm)
+
# Bug 1053819: Tuple of constants misidentified when presented with:
# . . . opcode_with_arg 100 unary_opcode BUILD_TUPLE 1 . . .
# The following would segfault upon compilation
@@ -208,14 +218,13 @@ class TestTranforms(unittest.TestCase):
# out of range
asm = dis_single('"fuu"[10]')
self.assertIn('BINARY_SUBSCR', asm)
- # non-BMP char (see #5057)
- asm = dis_single('"\U00012345"[0]')
- self.assertIn('BINARY_SUBSCR', asm)
-
def test_folding_of_unaryops_on_constants(self):
for line, elem in (
('-0.5', '(-0.5)'), # unary negative
+ ('-0.0', '(-0.0)'), # -0.0
+ ('-(1.0-1.0)','(-0.0)'), # -0.0 after folding
+ ('-0', '(0)'), # -0
('~-2', '(1)'), # unary invert
('+1', '(1)'), # unary positive
):
@@ -223,6 +232,13 @@ class TestTranforms(unittest.TestCase):
self.assertIn(elem, asm, asm)
self.assertNotIn('UNARY_', asm)
+ # Check that -0.0 works after marshaling
+ def negzero():
+ return -(1.0-1.0)
+
+ self.assertNotIn('UNARY_', disassemble(negzero))
+ self.assertTrue(copysign(1.0, negzero()) < 0)
+
# Verify that unfoldables are skipped
for line, elem in (
('-"abc"', "('abc')"), # unary negative
@@ -285,6 +301,25 @@ class TestTranforms(unittest.TestCase):
asm = disassemble(f)
self.assertNotIn('BINARY_ADD', asm)
+ def test_constant_folding(self):
+ # Issue #11244: aggressive constant folding.
+ exprs = [
+ "3 * -5",
+ "-3 * 5",
+ "2 * (3 * 4)",
+ "(2 * 3) * 4",
+ "(-1, 2, 3)",
+ "(1, -2, 3)",
+ "(1, 2, -3)",
+ "(1, 2, -3) * 6",
+ "lambda x: x in {(3 * -5) + (-1 - 6), (1, -2, 3) * 2, None}",
+ ]
+ for e in exprs:
+ asm = dis_single(e)
+ self.assertNotIn('UNARY_', asm, e)
+ self.assertNotIn('BINARY_', asm, e)
+ self.assertNotIn('BUILD_', asm, e)
+
class TestBuglets(unittest.TestCase):
def test_bug_11510(self):
diff --git a/Lib/test/test_pep277.py b/Lib/test/test_pep277.py
index 6d891e5..4b16cbb 100644
--- a/Lib/test/test_pep277.py
+++ b/Lib/test/test_pep277.py
@@ -1,6 +1,9 @@
# Test the Unicode versions of normal file functions
# open, os.open, os.stat. os.listdir, os.rename, os.remove, os.mkdir, os.chdir, os.rmdir
-import sys, os, unittest
+import os
+import sys
+import unittest
+import warnings
from unicodedata import normalize
from test import support
@@ -38,8 +41,8 @@ if sys.platform != 'darwin':
'17_\u2001\u2001\u2001A',
'18_\u2003\u2003\u2003A', # == NFC('\u2001\u2001\u2001A')
'19_\u0020\u0020\u0020A', # '\u0020' == ' ' == NFKC('\u2000') ==
- # NFKC('\u2001') == NFKC('\u2003')
-])
+ # NFKC('\u2001') == NFKC('\u2003')
+ ])
# Is it Unicode-friendly?
@@ -71,7 +74,7 @@ class UnicodeFileTests(unittest.TestCase):
def setUp(self):
try:
os.mkdir(support.TESTFN)
- except OSError:
+ except FileExistsError:
pass
files = set()
for name in self.files:
@@ -90,15 +93,17 @@ class UnicodeFileTests(unittest.TestCase):
return normalize(self.normal_form, s)
return s
- def _apply_failure(self, fn, filename, expected_exception,
- check_fn_in_exception = True):
+ def _apply_failure(self, fn, filename,
+ expected_exception=FileNotFoundError,
+ check_filename=True):
with self.assertRaises(expected_exception) as c:
fn(filename)
exc_filename = c.exception.filename
- # the "filename" exception attribute may be encoded
- if isinstance(exc_filename, bytes):
- filename = filename.encode(sys.getfilesystemencoding())
- if check_fn_in_exception:
+ # listdir may append a wildcard to the filename
+ if fn is os.listdir and sys.platform == 'win32':
+ exc_filename, _, wildcard = exc_filename.rpartition(os.sep)
+ self.assertEqual(wildcard, '*.*')
+ if check_filename:
self.assertEqual(exc_filename, filename, "Function '%s(%a) failed "
"with bad filename in the exception: %a" %
(fn.__name__, filename, exc_filename))
@@ -107,13 +112,18 @@ class UnicodeFileTests(unittest.TestCase):
# Pass non-existing Unicode filenames all over the place.
for name in self.files:
name = "not_" + name
- self._apply_failure(open, name, IOError)
- self._apply_failure(os.stat, name, OSError)
- self._apply_failure(os.chdir, name, OSError)
- self._apply_failure(os.rmdir, name, OSError)
- self._apply_failure(os.remove, name, OSError)
- # listdir may append a wildcard to the filename, so dont check
- self._apply_failure(os.listdir, name, OSError, False)
+ self._apply_failure(open, name)
+ self._apply_failure(os.stat, name)
+ self._apply_failure(os.chdir, name)
+ self._apply_failure(os.rmdir, name)
+ self._apply_failure(os.remove, name)
+ self._apply_failure(os.listdir, name)
+
+ if sys.platform == 'win32':
+ # Windows is lunatic. Issue #13366.
+ _listdir_failure = NotADirectoryError, FileNotFoundError
+ else:
+ _listdir_failure = NotADirectoryError
def test_open(self):
for name in self.files:
@@ -121,12 +131,13 @@ class UnicodeFileTests(unittest.TestCase):
f.write((name+'\n').encode("utf-8"))
f.close()
os.stat(name)
+ self._apply_failure(os.listdir, name, self._listdir_failure)
# Skip the test on darwin, because darwin does normalize the filename to
# NFD (a variant of Unicode NFD form). Normalize the filename to NFC, NFKC,
# NFKD in Python is useless, because darwin will normalize it later and so
# open(), os.stat(), etc. don't raise any exception.
- @unittest.skipIf(sys.platform == 'darwin', 'irrevelant test on Mac OS X')
+ @unittest.skipIf(sys.platform == 'darwin', 'irrelevant test on Mac OS X')
def test_normalize(self):
files = set(self.files)
others = set()
@@ -134,21 +145,22 @@ class UnicodeFileTests(unittest.TestCase):
others |= set(normalize(nf, file) for file in files)
others -= files
for name in others:
- self._apply_failure(open, name, IOError)
- self._apply_failure(os.stat, name, OSError)
- self._apply_failure(os.chdir, name, OSError)
- self._apply_failure(os.rmdir, name, OSError)
- self._apply_failure(os.remove, name, OSError)
- # listdir may append a wildcard to the filename, so dont check
- self._apply_failure(os.listdir, name, OSError, False)
+ self._apply_failure(open, name)
+ self._apply_failure(os.stat, name)
+ self._apply_failure(os.chdir, name)
+ self._apply_failure(os.rmdir, name)
+ self._apply_failure(os.remove, name)
+ self._apply_failure(os.listdir, name)
# Skip the test on darwin, because darwin uses a normalization different
# than Python NFD normalization: filenames are different even if we use
# Python NFD normalization.
- @unittest.skipIf(sys.platform == 'darwin', 'irrevelant test on Mac OS X')
+ @unittest.skipIf(sys.platform == 'darwin', 'irrelevant test on Mac OS X')
def test_listdir(self):
sf0 = set(self.files)
- f1 = os.listdir(support.TESTFN.encode(sys.getfilesystemencoding()))
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ f1 = os.listdir(support.TESTFN.encode(sys.getfilesystemencoding()))
f2 = os.listdir(support.TESTFN)
sf2 = set(os.path.join(support.TESTFN, f) for f in f2)
self.assertEqual(sf0, sf2, "%a != %a" % (sf0, sf2))
diff --git a/Lib/test/test_pep292.py b/Lib/test/test_pep292.py
index 119c7ea..6da8d2e 100644
--- a/Lib/test/test_pep292.py
+++ b/Lib/test/test_pep292.py
@@ -42,19 +42,6 @@ class TestTemplate(unittest.TestCase):
s = Template('$who likes $$')
eq(s.substitute(dict(who='tim', what='ham')), 'tim likes $')
- def test_invalid(self):
- class MyPattern(Template):
- pattern = r"""
- (?:
- (?P<invalid>) |
- (?P<escaped>%(delim)s) |
- @(?P<named>%(id)s) |
- @{(?P<braced>%(id)s)}
- )
- """
- s = MyPattern('$')
- self.assertRaises(ValueError, s.substitute, dict())
-
def test_percents(self):
eq = self.assertEqual
s = Template('%(foo)s $foo ${foo}')
@@ -172,6 +159,26 @@ class TestTemplate(unittest.TestCase):
val = t.safe_substitute({'location': 'Cleveland'})
self.assertEqual(val, 'PyCon in Cleveland')
+ def test_invalid_with_no_lines(self):
+ # The error formatting for invalid templates
+ # has a special case for no data that the default
+ # pattern can't trigger (always has at least '$')
+ # So we craft a pattern that is always invalid
+ # with no leading data.
+ class MyTemplate(Template):
+ pattern = r"""
+ (?P<invalid>) |
+ unreachable(
+ (?P<named>) |
+ (?P<braced>) |
+ (?P<escaped>)
+ )
+ """
+ s = MyTemplate('')
+ with self.assertRaises(ValueError) as err:
+ s.substitute({})
+ self.assertIn('line 1, col 1', str(err.exception))
+
def test_unicode_values(self):
s = Template('$who likes $what')
d = dict(who='t\xffm', what='f\xfe\fed')
diff --git a/Lib/test/test_pep3131.py b/Lib/test/test_pep3131.py
index df0f64d..2e6b90a 100644
--- a/Lib/test/test_pep3131.py
+++ b/Lib/test/test_pep3131.py
@@ -17,12 +17,7 @@ class PEP3131Test(unittest.TestCase):
def test_non_bmp_normalized(self):
𝔘𝔫𝔦𝔠𝔬𝔡𝔢 = 1
- # On wide builds, this is normalized, but on narrow ones it is not. See
- # #12746.
- try:
- self.assertIn("𝔘𝔫𝔦𝔠𝔬𝔡𝔢", dir())
- except AssertionError:
- raise unittest.case._ExpectedFailure(sys.exc_info())
+ self.assertIn("Unicode", dir())
def test_invalid(self):
try:
diff --git a/Lib/test/test_pep3151.py b/Lib/test/test_pep3151.py
new file mode 100644
index 0000000..8af9e0c
--- /dev/null
+++ b/Lib/test/test_pep3151.py
@@ -0,0 +1,201 @@
+import builtins
+import os
+import select
+import socket
+import sys
+import unittest
+import errno
+from errno import EEXIST
+
+from test import support
+
+class SubOSError(OSError):
+ pass
+
+class SubOSErrorWithInit(OSError):
+ def __init__(self, message, bar):
+ self.bar = bar
+ super().__init__(message)
+
+class SubOSErrorWithNew(OSError):
+ def __new__(cls, message, baz):
+ self = super().__new__(cls, message)
+ self.baz = baz
+ return self
+
+class SubOSErrorCombinedInitFirst(SubOSErrorWithInit, SubOSErrorWithNew):
+ pass
+
+class SubOSErrorCombinedNewFirst(SubOSErrorWithNew, SubOSErrorWithInit):
+ pass
+
+
+class HierarchyTest(unittest.TestCase):
+
+ def test_builtin_errors(self):
+ self.assertEqual(OSError.__name__, 'OSError')
+ self.assertIs(IOError, OSError)
+ self.assertIs(EnvironmentError, OSError)
+
+ def test_socket_errors(self):
+ self.assertIs(socket.error, IOError)
+ self.assertIs(socket.gaierror.__base__, OSError)
+ self.assertIs(socket.herror.__base__, OSError)
+ self.assertIs(socket.timeout.__base__, OSError)
+
+ def test_select_error(self):
+ self.assertIs(select.error, OSError)
+
+ # mmap.error is tested in test_mmap
+
+ _pep_map = """
+ +-- BlockingIOError EAGAIN, EALREADY, EWOULDBLOCK, EINPROGRESS
+ +-- ChildProcessError ECHILD
+ +-- ConnectionError
+ +-- BrokenPipeError EPIPE, ESHUTDOWN
+ +-- ConnectionAbortedError ECONNABORTED
+ +-- ConnectionRefusedError ECONNREFUSED
+ +-- ConnectionResetError ECONNRESET
+ +-- FileExistsError EEXIST
+ +-- FileNotFoundError ENOENT
+ +-- InterruptedError EINTR
+ +-- IsADirectoryError EISDIR
+ +-- NotADirectoryError ENOTDIR
+ +-- PermissionError EACCES, EPERM
+ +-- ProcessLookupError ESRCH
+ +-- TimeoutError ETIMEDOUT
+ """
+ def _make_map(s):
+ _map = {}
+ for line in s.splitlines():
+ line = line.strip('+- ')
+ if not line:
+ continue
+ excname, _, errnames = line.partition(' ')
+ for errname in filter(None, errnames.strip().split(', ')):
+ _map[getattr(errno, errname)] = getattr(builtins, excname)
+ return _map
+ _map = _make_map(_pep_map)
+
+ def test_errno_mapping(self):
+ # The OSError constructor maps errnos to subclasses
+ # A sample test for the basic functionality
+ e = OSError(EEXIST, "Bad file descriptor")
+ self.assertIs(type(e), FileExistsError)
+ # Exhaustive testing
+ for errcode, exc in self._map.items():
+ e = OSError(errcode, "Some message")
+ self.assertIs(type(e), exc)
+ othercodes = set(errno.errorcode) - set(self._map)
+ for errcode in othercodes:
+ e = OSError(errcode, "Some message")
+ self.assertIs(type(e), OSError)
+
+ def test_try_except(self):
+ filename = "some_hopefully_non_existing_file"
+
+ # This checks that try .. except checks the concrete exception
+ # (FileNotFoundError) and not the base type specified when
+ # PyErr_SetFromErrnoWithFilenameObject was called.
+ # (it is therefore deliberate that it doesn't use assertRaises)
+ try:
+ open(filename)
+ except FileNotFoundError:
+ pass
+ else:
+ self.fail("should have raised a FileNotFoundError")
+
+ # Another test for PyErr_SetExcFromWindowsErrWithFilenameObject()
+ self.assertFalse(os.path.exists(filename))
+ try:
+ os.unlink(filename)
+ except FileNotFoundError:
+ pass
+ else:
+ self.fail("should have raised a FileNotFoundError")
+
+
+class AttributesTest(unittest.TestCase):
+
+ def test_windows_error(self):
+ if os.name == "nt":
+ self.assertIn('winerror', dir(OSError))
+ else:
+ self.assertNotIn('winerror', dir(OSError))
+
+ def test_posix_error(self):
+ e = OSError(EEXIST, "File already exists", "foo.txt")
+ self.assertEqual(e.errno, EEXIST)
+ self.assertEqual(e.args[0], EEXIST)
+ self.assertEqual(e.strerror, "File already exists")
+ self.assertEqual(e.filename, "foo.txt")
+ if os.name == "nt":
+ self.assertEqual(e.winerror, None)
+
+ @unittest.skipUnless(os.name == "nt", "Windows-specific test")
+ def test_errno_translation(self):
+ # ERROR_ALREADY_EXISTS (183) -> EEXIST
+ e = OSError(0, "File already exists", "foo.txt", 183)
+ self.assertEqual(e.winerror, 183)
+ self.assertEqual(e.errno, EEXIST)
+ self.assertEqual(e.args[0], EEXIST)
+ self.assertEqual(e.strerror, "File already exists")
+ self.assertEqual(e.filename, "foo.txt")
+
+ def test_blockingioerror(self):
+ args = ("a", "b", "c", "d", "e")
+ for n in range(6):
+ e = BlockingIOError(*args[:n])
+ with self.assertRaises(AttributeError):
+ e.characters_written
+ e = BlockingIOError("a", "b", 3)
+ self.assertEqual(e.characters_written, 3)
+ e.characters_written = 5
+ self.assertEqual(e.characters_written, 5)
+
+ # XXX VMSError not tested
+
+
+class ExplicitSubclassingTest(unittest.TestCase):
+
+ def test_errno_mapping(self):
+ # When constructing an OSError subclass, errno mapping isn't done
+ e = SubOSError(EEXIST, "Bad file descriptor")
+ self.assertIs(type(e), SubOSError)
+
+ def test_init_overriden(self):
+ e = SubOSErrorWithInit("some message", "baz")
+ self.assertEqual(e.bar, "baz")
+ self.assertEqual(e.args, ("some message",))
+
+ def test_init_kwdargs(self):
+ e = SubOSErrorWithInit("some message", bar="baz")
+ self.assertEqual(e.bar, "baz")
+ self.assertEqual(e.args, ("some message",))
+
+ def test_new_overriden(self):
+ e = SubOSErrorWithNew("some message", "baz")
+ self.assertEqual(e.baz, "baz")
+ self.assertEqual(e.args, ("some message",))
+
+ def test_new_kwdargs(self):
+ e = SubOSErrorWithNew("some message", baz="baz")
+ self.assertEqual(e.baz, "baz")
+ self.assertEqual(e.args, ("some message",))
+
+ def test_init_new_overriden(self):
+ e = SubOSErrorCombinedInitFirst("some message", "baz")
+ self.assertEqual(e.bar, "baz")
+ self.assertEqual(e.baz, "baz")
+ self.assertEqual(e.args, ("some message",))
+ e = SubOSErrorCombinedNewFirst("some message", "baz")
+ self.assertEqual(e.bar, "baz")
+ self.assertEqual(e.baz, "baz")
+ self.assertEqual(e.args, ("some message",))
+
+
+def test_main():
+ support.run_unittest(__name__)
+
+if __name__=="__main__":
+ test_main()
diff --git a/Lib/test/test_pep380.py b/Lib/test/test_pep380.py
new file mode 100644
index 0000000..658bcb9
--- /dev/null
+++ b/Lib/test/test_pep380.py
@@ -0,0 +1,951 @@
+# -*- coding: utf-8 -*-
+
+"""
+Test suite for PEP 380 implementation
+
+adapted from original tests written by Greg Ewing
+see <http://www.cosc.canterbury.ac.nz/greg.ewing/python/yield-from/YieldFrom-Python3.1.2-rev5.zip>
+"""
+
+import unittest
+import io
+import sys
+import inspect
+import parser
+
+from test.support import captured_stderr
+
+class TestPEP380Operation(unittest.TestCase):
+ """
+ Test semantics.
+ """
+
+ def test_delegation_of_initial_next_to_subgenerator(self):
+ """
+ Test delegation of initial next() call to subgenerator
+ """
+ trace = []
+ def g1():
+ trace.append("Starting g1")
+ yield from g2()
+ trace.append("Finishing g1")
+ def g2():
+ trace.append("Starting g2")
+ yield 42
+ trace.append("Finishing g2")
+ for x in g1():
+ trace.append("Yielded %s" % (x,))
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Starting g2",
+ "Yielded 42",
+ "Finishing g2",
+ "Finishing g1",
+ ])
+
+ def test_raising_exception_in_initial_next_call(self):
+ """
+ Test raising exception in initial next() call
+ """
+ trace = []
+ def g1():
+ try:
+ trace.append("Starting g1")
+ yield from g2()
+ finally:
+ trace.append("Finishing g1")
+ def g2():
+ try:
+ trace.append("Starting g2")
+ raise ValueError("spanish inquisition occurred")
+ finally:
+ trace.append("Finishing g2")
+ try:
+ for x in g1():
+ trace.append("Yielded %s" % (x,))
+ except ValueError as e:
+ self.assertEqual(e.args[0], "spanish inquisition occurred")
+ else:
+ self.fail("subgenerator failed to raise ValueError")
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Starting g2",
+ "Finishing g2",
+ "Finishing g1",
+ ])
+
+ def test_delegation_of_next_call_to_subgenerator(self):
+ """
+ Test delegation of next() call to subgenerator
+ """
+ trace = []
+ def g1():
+ trace.append("Starting g1")
+ yield "g1 ham"
+ yield from g2()
+ yield "g1 eggs"
+ trace.append("Finishing g1")
+ def g2():
+ trace.append("Starting g2")
+ yield "g2 spam"
+ yield "g2 more spam"
+ trace.append("Finishing g2")
+ for x in g1():
+ trace.append("Yielded %s" % (x,))
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Yielded g1 ham",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Yielded g2 more spam",
+ "Finishing g2",
+ "Yielded g1 eggs",
+ "Finishing g1",
+ ])
+
+ def test_raising_exception_in_delegated_next_call(self):
+ """
+ Test raising exception in delegated next() call
+ """
+ trace = []
+ def g1():
+ try:
+ trace.append("Starting g1")
+ yield "g1 ham"
+ yield from g2()
+ yield "g1 eggs"
+ finally:
+ trace.append("Finishing g1")
+ def g2():
+ try:
+ trace.append("Starting g2")
+ yield "g2 spam"
+ raise ValueError("hovercraft is full of eels")
+ yield "g2 more spam"
+ finally:
+ trace.append("Finishing g2")
+ try:
+ for x in g1():
+ trace.append("Yielded %s" % (x,))
+ except ValueError as e:
+ self.assertEqual(e.args[0], "hovercraft is full of eels")
+ else:
+ self.fail("subgenerator failed to raise ValueError")
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Yielded g1 ham",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Finishing g2",
+ "Finishing g1",
+ ])
+
+ def test_delegation_of_send(self):
+ """
+ Test delegation of send()
+ """
+ trace = []
+ def g1():
+ trace.append("Starting g1")
+ x = yield "g1 ham"
+ trace.append("g1 received %s" % (x,))
+ yield from g2()
+ x = yield "g1 eggs"
+ trace.append("g1 received %s" % (x,))
+ trace.append("Finishing g1")
+ def g2():
+ trace.append("Starting g2")
+ x = yield "g2 spam"
+ trace.append("g2 received %s" % (x,))
+ x = yield "g2 more spam"
+ trace.append("g2 received %s" % (x,))
+ trace.append("Finishing g2")
+ g = g1()
+ y = next(g)
+ x = 1
+ try:
+ while 1:
+ y = g.send(x)
+ trace.append("Yielded %s" % (y,))
+ x += 1
+ except StopIteration:
+ pass
+ self.assertEqual(trace,[
+ "Starting g1",
+ "g1 received 1",
+ "Starting g2",
+ "Yielded g2 spam",
+ "g2 received 2",
+ "Yielded g2 more spam",
+ "g2 received 3",
+ "Finishing g2",
+ "Yielded g1 eggs",
+ "g1 received 4",
+ "Finishing g1",
+ ])
+
+ def test_handling_exception_while_delegating_send(self):
+ """
+ Test handling exception while delegating 'send'
+ """
+ trace = []
+ def g1():
+ trace.append("Starting g1")
+ x = yield "g1 ham"
+ trace.append("g1 received %s" % (x,))
+ yield from g2()
+ x = yield "g1 eggs"
+ trace.append("g1 received %s" % (x,))
+ trace.append("Finishing g1")
+ def g2():
+ trace.append("Starting g2")
+ x = yield "g2 spam"
+ trace.append("g2 received %s" % (x,))
+ raise ValueError("hovercraft is full of eels")
+ x = yield "g2 more spam"
+ trace.append("g2 received %s" % (x,))
+ trace.append("Finishing g2")
+ def run():
+ g = g1()
+ y = next(g)
+ x = 1
+ try:
+ while 1:
+ y = g.send(x)
+ trace.append("Yielded %s" % (y,))
+ x += 1
+ except StopIteration:
+ trace.append("StopIteration")
+ self.assertRaises(ValueError,run)
+ self.assertEqual(trace,[
+ "Starting g1",
+ "g1 received 1",
+ "Starting g2",
+ "Yielded g2 spam",
+ "g2 received 2",
+ ])
+
+ def test_delegating_close(self):
+ """
+ Test delegating 'close'
+ """
+ trace = []
+ def g1():
+ try:
+ trace.append("Starting g1")
+ yield "g1 ham"
+ yield from g2()
+ yield "g1 eggs"
+ finally:
+ trace.append("Finishing g1")
+ def g2():
+ try:
+ trace.append("Starting g2")
+ yield "g2 spam"
+ yield "g2 more spam"
+ finally:
+ trace.append("Finishing g2")
+ g = g1()
+ for i in range(2):
+ x = next(g)
+ trace.append("Yielded %s" % (x,))
+ g.close()
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Yielded g1 ham",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Finishing g2",
+ "Finishing g1"
+ ])
+
+ def test_handing_exception_while_delegating_close(self):
+ """
+ Test handling exception while delegating 'close'
+ """
+ trace = []
+ def g1():
+ try:
+ trace.append("Starting g1")
+ yield "g1 ham"
+ yield from g2()
+ yield "g1 eggs"
+ finally:
+ trace.append("Finishing g1")
+ def g2():
+ try:
+ trace.append("Starting g2")
+ yield "g2 spam"
+ yield "g2 more spam"
+ finally:
+ trace.append("Finishing g2")
+ raise ValueError("nybbles have exploded with delight")
+ try:
+ g = g1()
+ for i in range(2):
+ x = next(g)
+ trace.append("Yielded %s" % (x,))
+ g.close()
+ except ValueError as e:
+ self.assertEqual(e.args[0], "nybbles have exploded with delight")
+ self.assertIsInstance(e.__context__, GeneratorExit)
+ else:
+ self.fail("subgenerator failed to raise ValueError")
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Yielded g1 ham",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Finishing g2",
+ "Finishing g1",
+ ])
+
+ def test_delegating_throw(self):
+ """
+ Test delegating 'throw'
+ """
+ trace = []
+ def g1():
+ try:
+ trace.append("Starting g1")
+ yield "g1 ham"
+ yield from g2()
+ yield "g1 eggs"
+ finally:
+ trace.append("Finishing g1")
+ def g2():
+ try:
+ trace.append("Starting g2")
+ yield "g2 spam"
+ yield "g2 more spam"
+ finally:
+ trace.append("Finishing g2")
+ try:
+ g = g1()
+ for i in range(2):
+ x = next(g)
+ trace.append("Yielded %s" % (x,))
+ e = ValueError("tomato ejected")
+ g.throw(e)
+ except ValueError as e:
+ self.assertEqual(e.args[0], "tomato ejected")
+ else:
+ self.fail("subgenerator failed to raise ValueError")
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Yielded g1 ham",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Finishing g2",
+ "Finishing g1",
+ ])
+
+ def test_value_attribute_of_StopIteration_exception(self):
+ """
+ Test 'value' attribute of StopIteration exception
+ """
+ trace = []
+ def pex(e):
+ trace.append("%s: %s" % (e.__class__.__name__, e))
+ trace.append("value = %s" % (e.value,))
+ e = StopIteration()
+ pex(e)
+ e = StopIteration("spam")
+ pex(e)
+ e.value = "eggs"
+ pex(e)
+ self.assertEqual(trace,[
+ "StopIteration: ",
+ "value = None",
+ "StopIteration: spam",
+ "value = spam",
+ "StopIteration: spam",
+ "value = eggs",
+ ])
+
+
+ def test_exception_value_crash(self):
+ # There used to be a refcount error when the return value
+ # stored in the StopIteration has a refcount of 1.
+ def g1():
+ yield from g2()
+ def g2():
+ yield "g2"
+ return [42]
+ self.assertEqual(list(g1()), ["g2"])
+
+
+ def test_generator_return_value(self):
+ """
+ Test generator return value
+ """
+ trace = []
+ def g1():
+ trace.append("Starting g1")
+ yield "g1 ham"
+ ret = yield from g2()
+ trace.append("g2 returned %s" % (ret,))
+ ret = yield from g2(42)
+ trace.append("g2 returned %s" % (ret,))
+ yield "g1 eggs"
+ trace.append("Finishing g1")
+ def g2(v = None):
+ trace.append("Starting g2")
+ yield "g2 spam"
+ yield "g2 more spam"
+ trace.append("Finishing g2")
+ if v:
+ return v
+ for x in g1():
+ trace.append("Yielded %s" % (x,))
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Yielded g1 ham",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Yielded g2 more spam",
+ "Finishing g2",
+ "g2 returned None",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Yielded g2 more spam",
+ "Finishing g2",
+ "g2 returned 42",
+ "Yielded g1 eggs",
+ "Finishing g1",
+ ])
+
+ def test_delegation_of_next_to_non_generator(self):
+ """
+ Test delegation of next() to non-generator
+ """
+ trace = []
+ def g():
+ yield from range(3)
+ for x in g():
+ trace.append("Yielded %s" % (x,))
+ self.assertEqual(trace,[
+ "Yielded 0",
+ "Yielded 1",
+ "Yielded 2",
+ ])
+
+
+ def test_conversion_of_sendNone_to_next(self):
+ """
+ Test conversion of send(None) to next()
+ """
+ trace = []
+ def g():
+ yield from range(3)
+ gi = g()
+ for x in range(3):
+ y = gi.send(None)
+ trace.append("Yielded: %s" % (y,))
+ self.assertEqual(trace,[
+ "Yielded: 0",
+ "Yielded: 1",
+ "Yielded: 2",
+ ])
+
+ def test_delegation_of_close_to_non_generator(self):
+ """
+ Test delegation of close() to non-generator
+ """
+ trace = []
+ def g():
+ try:
+ trace.append("starting g")
+ yield from range(3)
+ trace.append("g should not be here")
+ finally:
+ trace.append("finishing g")
+ gi = g()
+ next(gi)
+ with captured_stderr() as output:
+ gi.close()
+ self.assertEqual(output.getvalue(), '')
+ self.assertEqual(trace,[
+ "starting g",
+ "finishing g",
+ ])
+
+ def test_delegating_throw_to_non_generator(self):
+ """
+ Test delegating 'throw' to non-generator
+ """
+ trace = []
+ def g():
+ try:
+ trace.append("Starting g")
+ yield from range(10)
+ finally:
+ trace.append("Finishing g")
+ try:
+ gi = g()
+ for i in range(5):
+ x = next(gi)
+ trace.append("Yielded %s" % (x,))
+ e = ValueError("tomato ejected")
+ gi.throw(e)
+ except ValueError as e:
+ self.assertEqual(e.args[0],"tomato ejected")
+ else:
+ self.fail("subgenerator failed to raise ValueError")
+ self.assertEqual(trace,[
+ "Starting g",
+ "Yielded 0",
+ "Yielded 1",
+ "Yielded 2",
+ "Yielded 3",
+ "Yielded 4",
+ "Finishing g",
+ ])
+
+ def test_attempting_to_send_to_non_generator(self):
+ """
+ Test attempting to send to non-generator
+ """
+ trace = []
+ def g():
+ try:
+ trace.append("starting g")
+ yield from range(3)
+ trace.append("g should not be here")
+ finally:
+ trace.append("finishing g")
+ try:
+ gi = g()
+ next(gi)
+ for x in range(3):
+ y = gi.send(42)
+ trace.append("Should not have yielded:", y)
+ except AttributeError as e:
+ self.assertIn("send", e.args[0])
+ else:
+ self.fail("was able to send into non-generator")
+ self.assertEqual(trace,[
+ "starting g",
+ "finishing g",
+ ])
+
+ def test_broken_getattr_handling(self):
+ """
+ Test subiterator with a broken getattr implementation
+ """
+ class Broken:
+ def __iter__(self):
+ return self
+ def __next__(self):
+ return 1
+ def __getattr__(self, attr):
+ 1/0
+
+ def g():
+ yield from Broken()
+
+ with self.assertRaises(ZeroDivisionError):
+ gi = g()
+ self.assertEqual(next(gi), 1)
+ gi.send(1)
+
+ with self.assertRaises(ZeroDivisionError):
+ gi = g()
+ self.assertEqual(next(gi), 1)
+ gi.throw(AttributeError)
+
+ with captured_stderr() as output:
+ gi = g()
+ self.assertEqual(next(gi), 1)
+ gi.close()
+ self.assertIn('ZeroDivisionError', output.getvalue())
+
+ def test_exception_in_initial_next_call(self):
+ """
+ Test exception in initial next() call
+ """
+ trace = []
+ def g1():
+ trace.append("g1 about to yield from g2")
+ yield from g2()
+ trace.append("g1 should not be here")
+ def g2():
+ yield 1/0
+ def run():
+ gi = g1()
+ next(gi)
+ self.assertRaises(ZeroDivisionError,run)
+ self.assertEqual(trace,[
+ "g1 about to yield from g2"
+ ])
+
+ def test_attempted_yield_from_loop(self):
+ """
+ Test attempted yield-from loop
+ """
+ trace = []
+ def g1():
+ trace.append("g1: starting")
+ yield "y1"
+ trace.append("g1: about to yield from g2")
+ yield from g2()
+ trace.append("g1 should not be here")
+
+ def g2():
+ trace.append("g2: starting")
+ yield "y2"
+ trace.append("g2: about to yield from g1")
+ yield from gi
+ trace.append("g2 should not be here")
+ try:
+ gi = g1()
+ for y in gi:
+ trace.append("Yielded: %s" % (y,))
+ except ValueError as e:
+ self.assertEqual(e.args[0],"generator already executing")
+ else:
+ self.fail("subgenerator didn't raise ValueError")
+ self.assertEqual(trace,[
+ "g1: starting",
+ "Yielded: y1",
+ "g1: about to yield from g2",
+ "g2: starting",
+ "Yielded: y2",
+ "g2: about to yield from g1",
+ ])
+
+ def test_returning_value_from_delegated_throw(self):
+ """
+ Test returning value from delegated 'throw'
+ """
+ trace = []
+ def g1():
+ try:
+ trace.append("Starting g1")
+ yield "g1 ham"
+ yield from g2()
+ yield "g1 eggs"
+ finally:
+ trace.append("Finishing g1")
+ def g2():
+ try:
+ trace.append("Starting g2")
+ yield "g2 spam"
+ yield "g2 more spam"
+ except LunchError:
+ trace.append("Caught LunchError in g2")
+ yield "g2 lunch saved"
+ yield "g2 yet more spam"
+ class LunchError(Exception):
+ pass
+ g = g1()
+ for i in range(2):
+ x = next(g)
+ trace.append("Yielded %s" % (x,))
+ e = LunchError("tomato ejected")
+ g.throw(e)
+ for x in g:
+ trace.append("Yielded %s" % (x,))
+ self.assertEqual(trace,[
+ "Starting g1",
+ "Yielded g1 ham",
+ "Starting g2",
+ "Yielded g2 spam",
+ "Caught LunchError in g2",
+ "Yielded g2 yet more spam",
+ "Yielded g1 eggs",
+ "Finishing g1",
+ ])
+
+ def test_next_and_return_with_value(self):
+ """
+ Test next and return with value
+ """
+ trace = []
+ def f(r):
+ gi = g(r)
+ next(gi)
+ try:
+ trace.append("f resuming g")
+ next(gi)
+ trace.append("f SHOULD NOT BE HERE")
+ except StopIteration as e:
+ trace.append("f caught %s" % (repr(e),))
+ def g(r):
+ trace.append("g starting")
+ yield
+ trace.append("g returning %s" % (r,))
+ return r
+ f(None)
+ f(42)
+ self.assertEqual(trace,[
+ "g starting",
+ "f resuming g",
+ "g returning None",
+ "f caught StopIteration()",
+ "g starting",
+ "f resuming g",
+ "g returning 42",
+ "f caught StopIteration(42,)",
+ ])
+
+ def test_send_and_return_with_value(self):
+ """
+ Test send and return with value
+ """
+ trace = []
+ def f(r):
+ gi = g(r)
+ next(gi)
+ try:
+ trace.append("f sending spam to g")
+ gi.send("spam")
+ trace.append("f SHOULD NOT BE HERE")
+ except StopIteration as e:
+ trace.append("f caught %r" % (e,))
+ def g(r):
+ trace.append("g starting")
+ x = yield
+ trace.append("g received %s" % (x,))
+ trace.append("g returning %s" % (r,))
+ return r
+ f(None)
+ f(42)
+ self.assertEqual(trace,[
+ "g starting",
+ "f sending spam to g",
+ "g received spam",
+ "g returning None",
+ "f caught StopIteration()",
+ "g starting",
+ "f sending spam to g",
+ "g received spam",
+ "g returning 42",
+ "f caught StopIteration(42,)",
+ ])
+
+ def test_catching_exception_from_subgen_and_returning(self):
+ """
+ Test catching an exception thrown into a
+ subgenerator and returning a value
+ """
+ trace = []
+ def inner():
+ try:
+ yield 1
+ except ValueError:
+ trace.append("inner caught ValueError")
+ return 2
+
+ def outer():
+ v = yield from inner()
+ trace.append("inner returned %r to outer" % v)
+ yield v
+ g = outer()
+ trace.append(next(g))
+ trace.append(g.throw(ValueError))
+ self.assertEqual(trace,[
+ 1,
+ "inner caught ValueError",
+ "inner returned 2 to outer",
+ 2,
+ ])
+
+ def test_throwing_GeneratorExit_into_subgen_that_returns(self):
+ """
+ Test throwing GeneratorExit into a subgenerator that
+ catches it and returns normally.
+ """
+ trace = []
+ def f():
+ try:
+ trace.append("Enter f")
+ yield
+ trace.append("Exit f")
+ except GeneratorExit:
+ return
+ def g():
+ trace.append("Enter g")
+ yield from f()
+ trace.append("Exit g")
+ try:
+ gi = g()
+ next(gi)
+ gi.throw(GeneratorExit)
+ except GeneratorExit:
+ pass
+ else:
+ self.fail("subgenerator failed to raise GeneratorExit")
+ self.assertEqual(trace,[
+ "Enter g",
+ "Enter f",
+ ])
+
+ def test_throwing_GeneratorExit_into_subgenerator_that_yields(self):
+ """
+ Test throwing GeneratorExit into a subgenerator that
+ catches it and yields.
+ """
+ trace = []
+ def f():
+ try:
+ trace.append("Enter f")
+ yield
+ trace.append("Exit f")
+ except GeneratorExit:
+ yield
+ def g():
+ trace.append("Enter g")
+ yield from f()
+ trace.append("Exit g")
+ try:
+ gi = g()
+ next(gi)
+ gi.throw(GeneratorExit)
+ except RuntimeError as e:
+ self.assertEqual(e.args[0], "generator ignored GeneratorExit")
+ else:
+ self.fail("subgenerator failed to raise GeneratorExit")
+ self.assertEqual(trace,[
+ "Enter g",
+ "Enter f",
+ ])
+
+ def test_throwing_GeneratorExit_into_subgen_that_raises(self):
+ """
+ Test throwing GeneratorExit into a subgenerator that
+ catches it and raises a different exception.
+ """
+ trace = []
+ def f():
+ try:
+ trace.append("Enter f")
+ yield
+ trace.append("Exit f")
+ except GeneratorExit:
+ raise ValueError("Vorpal bunny encountered")
+ def g():
+ trace.append("Enter g")
+ yield from f()
+ trace.append("Exit g")
+ try:
+ gi = g()
+ next(gi)
+ gi.throw(GeneratorExit)
+ except ValueError as e:
+ self.assertEqual(e.args[0], "Vorpal bunny encountered")
+ self.assertIsInstance(e.__context__, GeneratorExit)
+ else:
+ self.fail("subgenerator failed to raise ValueError")
+ self.assertEqual(trace,[
+ "Enter g",
+ "Enter f",
+ ])
+
+ def test_yield_from_empty(self):
+ def g():
+ yield from ()
+ self.assertRaises(StopIteration, next, g())
+
+ def test_delegating_generators_claim_to_be_running(self):
+ # Check with basic iteration
+ def one():
+ yield 0
+ yield from two()
+ yield 3
+ def two():
+ yield 1
+ try:
+ yield from g1
+ except ValueError:
+ pass
+ yield 2
+ g1 = one()
+ self.assertEqual(list(g1), [0, 1, 2, 3])
+ # Check with send
+ g1 = one()
+ res = [next(g1)]
+ try:
+ while True:
+ res.append(g1.send(42))
+ except StopIteration:
+ pass
+ self.assertEqual(res, [0, 1, 2, 3])
+ # Check with throw
+ class MyErr(Exception):
+ pass
+ def one():
+ try:
+ yield 0
+ except MyErr:
+ pass
+ yield from two()
+ try:
+ yield 3
+ except MyErr:
+ pass
+ def two():
+ try:
+ yield 1
+ except MyErr:
+ pass
+ try:
+ yield from g1
+ except ValueError:
+ pass
+ try:
+ yield 2
+ except MyErr:
+ pass
+ g1 = one()
+ res = [next(g1)]
+ try:
+ while True:
+ res.append(g1.throw(MyErr))
+ except StopIteration:
+ pass
+ # Check with close
+ class MyIt(object):
+ def __iter__(self):
+ return self
+ def __next__(self):
+ return 42
+ def close(self_):
+ self.assertTrue(g1.gi_running)
+ self.assertRaises(ValueError, next, g1)
+ def one():
+ yield from MyIt()
+ g1 = one()
+ next(g1)
+ g1.close()
+
+ def test_delegator_is_visible_to_debugger(self):
+ def call_stack():
+ return [f[3] for f in inspect.stack()]
+
+ def gen():
+ yield call_stack()
+ yield call_stack()
+ yield call_stack()
+
+ def spam(g):
+ yield from g
+
+ def eggs(g):
+ yield from g
+
+ for stack in spam(gen()):
+ self.assertTrue('spam' in stack)
+
+ for stack in spam(eggs(gen())):
+ self.assertTrue('spam' in stack and 'eggs' in stack)
+
+
+def test_main():
+ from test import support
+ test_classes = [TestPEP380Operation]
+ support.run_unittest(*test_classes)
+
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/test/test_pickle.py b/Lib/test/test_pickle.py
index 9da2cae..f52d4bd 100644
--- a/Lib/test/test_pickle.py
+++ b/Lib/test/test_pickle.py
@@ -1,5 +1,6 @@
import pickle
import io
+import collections
from test import support
@@ -7,6 +8,7 @@ from test.pickletester import AbstractPickleTests
from test.pickletester import AbstractPickleModuleTests
from test.pickletester import AbstractPersistentPicklerTests
from test.pickletester import AbstractPicklerUnpicklerObjectTests
+from test.pickletester import AbstractDispatchTableTests
from test.pickletester import BigmemPickleTests
try:
@@ -80,6 +82,18 @@ class PyPicklerUnpicklerObjectTests(AbstractPicklerUnpicklerObjectTests):
unpickler_class = pickle._Unpickler
+class PyDispatchTableTests(AbstractDispatchTableTests):
+ pickler_class = pickle._Pickler
+ def get_dispatch_table(self):
+ return pickle.dispatch_table.copy()
+
+
+class PyChainDispatchTableTests(AbstractDispatchTableTests):
+ pickler_class = pickle._Pickler
+ def get_dispatch_table(self):
+ return collections.ChainMap({}, pickle.dispatch_table)
+
+
if has_c_implementation:
class CPicklerTests(PyPicklerTests):
pickler = _pickle.Pickler
@@ -101,14 +115,26 @@ if has_c_implementation:
pickler_class = _pickle.Pickler
unpickler_class = _pickle.Unpickler
+ class CDispatchTableTests(AbstractDispatchTableTests):
+ pickler_class = pickle.Pickler
+ def get_dispatch_table(self):
+ return pickle.dispatch_table.copy()
+
+ class CChainDispatchTableTests(AbstractDispatchTableTests):
+ pickler_class = pickle.Pickler
+ def get_dispatch_table(self):
+ return collections.ChainMap({}, pickle.dispatch_table)
+
def test_main():
- tests = [PickleTests, PyPicklerTests, PyPersPicklerTests]
+ tests = [PickleTests, PyPicklerTests, PyPersPicklerTests,
+ PyDispatchTableTests, PyChainDispatchTableTests]
if has_c_implementation:
tests.extend([CPicklerTests, CPersPicklerTests,
CDumpPickle_LoadPickle, DumpPickle_CLoadPickle,
PyPicklerUnpicklerObjectTests,
CPicklerUnpicklerObjectTests,
+ CDispatchTableTests, CChainDispatchTableTests,
InMemoryPickleTests])
support.run_unittest(*tests)
support.run_doctest(pickle)
diff --git a/Lib/test/test_pipes.py b/Lib/test/test_pipes.py
index d5b886f..6a7b45f 100644
--- a/Lib/test/test_pipes.py
+++ b/Lib/test/test_pipes.py
@@ -79,21 +79,6 @@ class SimplePipeTests(unittest.TestCase):
with open(TESTFN) as f:
self.assertEqual(f.read(), d)
- def testQuoting(self):
- safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./'
- unicode_sample = '\xe9\xe0\xdf' # e + acute accent, a + grave, sharp s
- unsafe = '"`$\\!' + unicode_sample
-
- self.assertEqual(pipes.quote(''), "''")
- self.assertEqual(pipes.quote(safeunquoted), safeunquoted)
- self.assertEqual(pipes.quote('test file name'), "'test file name'")
- for u in unsafe:
- self.assertEqual(pipes.quote('test%sname' % u),
- "'test%sname'" % u)
- for u in unsafe:
- self.assertEqual(pipes.quote("test%s'name'" % u),
- "'test%s'\"'\"'name'\"'\"''" % u)
-
def testRepr(self):
t = pipes.Template()
self.assertEqual(repr(t), "<Template instance, steps=[]>")
diff --git a/Lib/test/test_pkgimport.py b/Lib/test/test_pkgimport.py
index c37e936..a8426b5 100644
--- a/Lib/test/test_pkgimport.py
+++ b/Lib/test/test_pkgimport.py
@@ -7,7 +7,7 @@ import tempfile
import unittest
from imp import cache_from_source
-from test.support import run_unittest
+from test.support import run_unittest, create_empty_file
class TestImport(unittest.TestCase):
@@ -29,7 +29,7 @@ class TestImport(unittest.TestCase):
self.package_dir = os.path.join(self.test_dir,
self.package_name)
os.mkdir(self.package_dir)
- open(os.path.join(self.package_dir, '__init__.py'), 'w').close()
+ create_empty_file(os.path.join(self.package_dir, '__init__.py'))
self.module_path = os.path.join(self.package_dir, 'foo.py')
def tearDown(self):
diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py
index 8751aa8..cfe623a 100644
--- a/Lib/test/test_platform.py
+++ b/Lib/test/test_platform.py
@@ -1,8 +1,9 @@
-import sys
import os
-import unittest
import platform
import subprocess
+import sys
+import unittest
+import warnings
from test import support
@@ -56,13 +57,11 @@ class PlatformTest(unittest.TestCase):
def setUp(self):
self.save_version = sys.version
- self.save_subversion = sys.subversion
self.save_mercurial = sys._mercurial
self.save_platform = sys.platform
def tearDown(self):
sys.version = self.save_version
- sys.subversion = self.save_subversion
sys._mercurial = self.save_mercurial
sys.platform = self.save_platform
@@ -77,7 +76,7 @@ class PlatformTest(unittest.TestCase):
('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')),
):
# branch and revision are not "parsed", but fetched
- # from sys.subversion. Ignore them
+ # from sys._mercurial. Ignore them
(name, version, branch, revision, buildno, builddate, compiler) \
= platform._sys_version(input)
self.assertEqual(
@@ -113,8 +112,6 @@ class PlatformTest(unittest.TestCase):
if subversion is None:
if hasattr(sys, "_mercurial"):
del sys._mercurial
- if hasattr(sys, "subversion"):
- del sys.subversion
else:
sys._mercurial = subversion
if sys_platform is not None:
@@ -247,6 +244,38 @@ class PlatformTest(unittest.TestCase):
):
self.assertEqual(platform._parse_release_file(input), output)
+ def test_popen(self):
+ mswindows = (sys.platform == "win32")
+
+ if mswindows:
+ command = '"{}" -c "print(\'Hello\')"'.format(sys.executable)
+ else:
+ command = "'{}' -c 'print(\"Hello\")'".format(sys.executable)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ with platform.popen(command) as stdout:
+ hello = stdout.read().strip()
+ stdout.close()
+ self.assertEqual(hello, "Hello")
+
+ data = 'plop'
+ if mswindows:
+ command = '"{}" -c "import sys; data=sys.stdin.read(); exit(len(data))"'
+ else:
+ command = "'{}' -c 'import sys; data=sys.stdin.read(); exit(len(data))'"
+ command = command.format(sys.executable)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ with platform.popen(command, 'w') as stdin:
+ stdout = stdin.write(data)
+ ret = stdin.close()
+ self.assertIsNotNone(ret)
+ if os.name == 'nt':
+ returncode = ret
+ else:
+ returncode = ret >> 8
+ self.assertEqual(returncode, len(data))
+
def test_main():
support.run_unittest(
diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py
index e3901b8..c0929a0 100644
--- a/Lib/test/test_poplib.py
+++ b/Lib/test/test_poplib.py
@@ -108,6 +108,10 @@ class DummyPOP3Handler(asynchat.async_chat):
def cmd_apop(self, arg):
self.push('+OK done nothing.')
+ def cmd_quit(self, arg):
+ self.push('+OK closing.')
+ self.close_when_done()
+
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
@@ -165,10 +169,10 @@ class TestPOP3Class(TestCase):
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
- self.client = poplib.POP3(self.server.host, self.server.port)
+ self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
def tearDown(self):
- self.client.quit()
+ self.client.close()
self.server.stop()
def test_getwelcome(self):
@@ -228,6 +232,12 @@ class TestPOP3Class(TestCase):
self.client.uidl()
self.client.uidl('foo')
+ def test_quit(self):
+ resp = self.client.quit()
+ self.assertTrue(resp)
+ self.assertIsNone(self.client.sock)
+ self.assertIsNone(self.client.file)
+
SUPPORTS_SSL = False
if hasattr(poplib, 'POP3_SSL'):
@@ -274,6 +284,7 @@ if hasattr(poplib, 'POP3_SSL'):
else:
DummyPOP3Handler.handle_read(self)
+
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 09f04ec..c3dfffb 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -9,6 +9,7 @@ import errno
import sys
import time
import os
+import fcntl
import pwd
import shutil
import stat
@@ -42,7 +43,7 @@ class PosixTester(unittest.TestCase):
NO_ARG_FUNCTIONS = [ "ctermid", "getcwd", "getcwdb", "uname",
"times", "getloadavg",
"getegid", "geteuid", "getgid", "getgroups",
- "getpid", "getpgrp", "getppid", "getuid",
+ "getpid", "getpgrp", "getppid", "getuid", "sync",
]
for name in NO_ARG_FUNCTIONS:
@@ -137,6 +138,159 @@ class PosixTester(unittest.TestCase):
finally:
fp.close()
+ @unittest.skipUnless(hasattr(posix, 'truncate'), "test needs posix.truncate()")
+ def test_truncate(self):
+ with open(support.TESTFN, 'w') as fp:
+ fp.write('test')
+ fp.flush()
+ posix.truncate(support.TESTFN, 0)
+
+ @unittest.skipUnless(hasattr(posix, 'fexecve'), "test needs posix.fexecve()")
+ @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
+ @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()")
+ def test_fexecve(self):
+ fp = os.open(sys.executable, os.O_RDONLY)
+ try:
+ pid = os.fork()
+ if pid == 0:
+ os.chdir(os.path.split(sys.executable)[0])
+ posix.fexecve(fp, [sys.executable, '-c', 'pass'], os.environ)
+ else:
+ self.assertEqual(os.waitpid(pid, 0), (pid, 0))
+ finally:
+ os.close(fp)
+
+ @unittest.skipUnless(hasattr(posix, 'waitid'), "test needs posix.waitid()")
+ @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
+ def test_waitid(self):
+ pid = os.fork()
+ if pid == 0:
+ os.chdir(os.path.split(sys.executable)[0])
+ posix.execve(sys.executable, [sys.executable, '-c', 'pass'], os.environ)
+ else:
+ res = posix.waitid(posix.P_PID, pid, posix.WEXITED)
+ self.assertEqual(pid, res.si_pid)
+
+ @unittest.skipUnless(hasattr(posix, 'lockf'), "test needs posix.lockf()")
+ def test_lockf(self):
+ fd = os.open(support.TESTFN, os.O_WRONLY | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ posix.lockf(fd, posix.F_LOCK, 4)
+ # section is locked
+ posix.lockf(fd, posix.F_ULOCK, 4)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'pread'), "test needs posix.pread()")
+ def test_pread(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ self.assertEqual(b'es', posix.pread(fd, 2, 1))
+ # the first pread() shouldn't disturb the file offset
+ self.assertEqual(b'te', posix.read(fd, 2))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'pwrite'), "test needs posix.pwrite()")
+ def test_pwrite(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ posix.pwrite(fd, b'xx', 1)
+ self.assertEqual(b'txxt', posix.read(fd, 4))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'posix_fallocate'),
+ "test needs posix.posix_fallocate()")
+ def test_posix_fallocate(self):
+ fd = os.open(support.TESTFN, os.O_WRONLY | os.O_CREAT)
+ try:
+ posix.posix_fallocate(fd, 0, 10)
+ except OSError as inst:
+ # issue10812, ZFS doesn't appear to support posix_fallocate,
+ # so skip Solaris-based since they are likely to have ZFS.
+ if inst.errno != errno.EINVAL or not sys.platform.startswith("sunos"):
+ raise
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'posix_fadvise'),
+ "test needs posix.posix_fadvise()")
+ def test_posix_fadvise(self):
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_WILLNEED)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'futimes'), "test needs posix.futimes()")
+ def test_futimes(self):
+ now = time.time()
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ posix.futimes(fd, None)
+ posix.futimes(fd)
+ self.assertRaises(TypeError, posix.futimes, fd, (None, None))
+ self.assertRaises(TypeError, posix.futimes, fd, (now, None))
+ self.assertRaises(TypeError, posix.futimes, fd, (None, now))
+ posix.futimes(fd, (int(now), int(now)))
+ posix.futimes(fd, (now, now))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'lutimes'), "test needs posix.lutimes()")
+ def test_lutimes(self):
+ now = time.time()
+ posix.lutimes(support.TESTFN, None)
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, None))
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (now, None))
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, now))
+ posix.lutimes(support.TESTFN, (int(now), int(now)))
+ posix.lutimes(support.TESTFN, (now, now))
+ posix.lutimes(support.TESTFN)
+
+ @unittest.skipUnless(hasattr(posix, 'futimens'), "test needs posix.futimens()")
+ def test_futimens(self):
+ now = time.time()
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ self.assertRaises(TypeError, posix.futimens, fd, (None, None), (None, None))
+ self.assertRaises(TypeError, posix.futimens, fd, (now, 0), None)
+ self.assertRaises(TypeError, posix.futimens, fd, None, (now, 0))
+ posix.futimens(fd, (int(now), int((now - int(now)) * 1e9)),
+ (int(now), int((now - int(now)) * 1e9)))
+ posix.futimens(fd)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'writev'), "test needs posix.writev()")
+ def test_writev(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.writev(fd, (b'test1', b'tt2', b't3'))
+ os.lseek(fd, 0, os.SEEK_SET)
+ self.assertEqual(b'test1tt2t3', posix.read(fd, 10))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'readv'), "test needs posix.readv()")
+ def test_readv(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test1tt2t3')
+ os.lseek(fd, 0, os.SEEK_SET)
+ buf = [bytearray(i) for i in [5, 3, 2]]
+ self.assertEqual(posix.readv(fd, buf), 10)
+ self.assertEqual([b'test1', b'tt2', b't3'], [bytes(i) for i in buf])
+ finally:
+ os.close(fd)
+
def test_dup(self):
if hasattr(posix, 'dup'):
fp = open(support.TESTFN)
@@ -162,6 +316,13 @@ class PosixTester(unittest.TestCase):
fp1.close()
fp2.close()
+ @unittest.skipUnless(hasattr(os, 'O_CLOEXEC'), "needs os.O_CLOEXEC")
+ @support.requires_linux_version(2, 6, 23)
+ def test_oscloexec(self):
+ fd = os.open(support.TESTFN, os.O_RDONLY|os.O_CLOEXEC)
+ self.addCleanup(os.close, fd)
+ self.assertTrue(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC)
+
def test_osexlock(self):
if hasattr(posix, "O_EXLOCK"):
fd = os.open(support.TESTFN,
@@ -254,7 +415,7 @@ class PosixTester(unittest.TestCase):
self.assertRaises(OSError, posix.chown, support.TESTFN, -1, -1)
# re-create the file
- open(support.TESTFN, 'w').close()
+ support.create_empty_file(support.TESTFN)
self._test_all_chown_common(posix.chown, support.TESTFN)
@unittest.skipUnless(hasattr(posix, 'fchown'), "test needs os.fchown()")
@@ -290,6 +451,20 @@ class PosixTester(unittest.TestCase):
if hasattr(posix, 'listdir'):
self.assertTrue(support.TESTFN in posix.listdir())
+ @unittest.skipUnless(hasattr(posix, 'flistdir'), "test needs posix.flistdir()")
+ def test_flistdir(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ self.addCleanup(posix.close, f)
+ self.assertEqual(
+ sorted(posix.listdir('.')),
+ sorted(posix.flistdir(f))
+ )
+ # Check that the fd offset was reset (issue #13739)
+ self.assertEqual(
+ sorted(posix.listdir('.')),
+ sorted(posix.flistdir(f))
+ )
+
def test_access(self):
if hasattr(posix, 'access'):
self.assertTrue(posix.access(support.TESTFN, os.R_OK))
@@ -310,6 +485,32 @@ class PosixTester(unittest.TestCase):
os.close(reader)
os.close(writer)
+ @unittest.skipUnless(hasattr(os, 'pipe2'), "test needs os.pipe2()")
+ @support.requires_linux_version(2, 6, 27)
+ def test_pipe2(self):
+ self.assertRaises(TypeError, os.pipe2, 'DEADBEEF')
+ self.assertRaises(TypeError, os.pipe2, 0, 0)
+
+ # try calling with flags = 0, like os.pipe()
+ r, w = os.pipe2(0)
+ os.close(r)
+ os.close(w)
+
+ # test flags
+ r, w = os.pipe2(os.O_CLOEXEC|os.O_NONBLOCK)
+ self.addCleanup(os.close, r)
+ self.addCleanup(os.close, w)
+ self.assertTrue(fcntl.fcntl(r, fcntl.F_GETFD) & fcntl.FD_CLOEXEC)
+ self.assertTrue(fcntl.fcntl(w, fcntl.F_GETFD) & fcntl.FD_CLOEXEC)
+ # try reading from an empty pipe: this should fail, not block
+ self.assertRaises(OSError, os.read, r, 1)
+ # try a write big enough to fill-up the pipe: this should either
+ # fail or perform a partial write, not block
+ try:
+ os.write(w, b'x' * support.PIPE_MAX_SIZE)
+ except OSError:
+ pass
+
def test_utime(self):
if hasattr(posix, 'utime'):
now = time.time()
@@ -410,6 +611,21 @@ class PosixTester(unittest.TestCase):
os.chdir(curdir)
support.rmtree(base_path)
+ @unittest.skipUnless(hasattr(posix, 'getgrouplist'), "test needs posix.getgrouplist()")
+ @unittest.skipUnless(hasattr(pwd, 'getpwuid'), "test needs pwd.getpwuid()")
+ @unittest.skipUnless(hasattr(os, 'getuid'), "test needs os.getuid()")
+ def test_getgrouplist(self):
+ with os.popen('id -G') as idg:
+ groups = idg.read().strip()
+
+ if not groups:
+ raise unittest.SkipTest("need working 'id -G'")
+
+ self.assertEqual(
+ set([int(x) for x in groups.split()]),
+ set(posix.getgrouplist(pwd.getpwuid(os.getuid())[0],
+ pwd.getpwuid(os.getuid())[3])))
+
@unittest.skipUnless(hasattr(os, 'getegid'), "test needs os.getegid()")
def test_getgroups(self):
with os.popen('id -G') as idg:
@@ -426,6 +642,366 @@ class PosixTester(unittest.TestCase):
set([int(x) for x in groups.split()]),
set(posix.getgroups() + [posix.getegid()]))
+ # tests for the posix *at functions follow
+
+ @unittest.skipUnless(hasattr(posix, 'faccessat'), "test needs posix.faccessat()")
+ def test_faccessat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ self.assertTrue(posix.faccessat(f, support.TESTFN, os.R_OK))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'fchmodat'), "test needs posix.fchmodat()")
+ def test_fchmodat(self):
+ os.chmod(support.TESTFN, stat.S_IRUSR)
+
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.fchmodat(f, support.TESTFN, stat.S_IRUSR | stat.S_IWUSR)
+
+ s = posix.stat(support.TESTFN)
+ self.assertEqual(s[0] & stat.S_IRWXU, stat.S_IRUSR | stat.S_IWUSR)
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'fchownat'), "test needs posix.fchownat()")
+ def test_fchownat(self):
+ support.unlink(support.TESTFN)
+ support.create_empty_file(support.TESTFN)
+
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.fchownat(f, support.TESTFN, os.getuid(), os.getgid())
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'fstatat'), "test needs posix.fstatat()")
+ def test_fstatat(self):
+ support.unlink(support.TESTFN)
+ with open(support.TESTFN, 'w') as outfile:
+ outfile.write("testline\n")
+
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ s1 = posix.stat(support.TESTFN)
+ s2 = posix.fstatat(f, support.TESTFN)
+ self.assertEqual(s1, s2)
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'futimesat'), "test needs posix.futimesat()")
+ def test_futimesat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ now = time.time()
+ posix.futimesat(f, support.TESTFN, None)
+ posix.futimesat(f, support.TESTFN)
+ self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (None, None))
+ self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (now, None))
+ self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (None, now))
+ posix.futimesat(f, support.TESTFN, (int(now), int(now)))
+ posix.futimesat(f, support.TESTFN, (now, now))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'linkat'), "test needs posix.linkat()")
+ def test_linkat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.linkat(f, support.TESTFN, f, support.TESTFN + 'link')
+ # should have same inodes
+ self.assertEqual(posix.stat(support.TESTFN)[1],
+ posix.stat(support.TESTFN + 'link')[1])
+ finally:
+ posix.close(f)
+ support.unlink(support.TESTFN + 'link')
+
+ @unittest.skipUnless(hasattr(posix, 'mkdirat'), "test needs posix.mkdirat()")
+ def test_mkdirat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.mkdirat(f, support.TESTFN + 'dir')
+ posix.stat(support.TESTFN + 'dir') # should not raise exception
+ finally:
+ posix.close(f)
+ support.rmtree(support.TESTFN + 'dir')
+
+ @unittest.skipUnless(hasattr(posix, 'mknodat') and hasattr(stat, 'S_IFIFO'),
+ "don't have mknodat()/S_IFIFO")
+ def test_mknodat(self):
+ # Test using mknodat() to create a FIFO (the only use specified
+ # by POSIX).
+ support.unlink(support.TESTFN)
+ mode = stat.S_IFIFO | stat.S_IRUSR | stat.S_IWUSR
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.mknodat(f, support.TESTFN, mode, 0)
+ except OSError as e:
+ # Some old systems don't allow unprivileged users to use
+ # mknod(), or only support creating device nodes.
+ self.assertIn(e.errno, (errno.EPERM, errno.EINVAL))
+ else:
+ self.assertTrue(stat.S_ISFIFO(posix.stat(support.TESTFN).st_mode))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'openat'), "test needs posix.openat()")
+ def test_openat(self):
+ support.unlink(support.TESTFN)
+ with open(support.TESTFN, 'w') as outfile:
+ outfile.write("testline\n")
+ a = posix.open(posix.getcwd(), posix.O_RDONLY)
+ b = posix.openat(a, support.TESTFN, posix.O_RDONLY)
+ try:
+ res = posix.read(b, 9).decode(encoding="utf-8")
+ self.assertEqual("testline\n", res)
+ finally:
+ posix.close(a)
+ posix.close(b)
+
+ @unittest.skipUnless(hasattr(posix, 'readlinkat'), "test needs posix.readlinkat()")
+ def test_readlinkat(self):
+ os.symlink(support.TESTFN, support.TESTFN + 'link')
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ self.assertEqual(posix.readlink(support.TESTFN + 'link'),
+ posix.readlinkat(f, support.TESTFN + 'link'))
+ finally:
+ support.unlink(support.TESTFN + 'link')
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'renameat'), "test needs posix.renameat()")
+ def test_renameat(self):
+ support.unlink(support.TESTFN)
+ support.create_empty_file(support.TESTFN + 'ren')
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.renameat(f, support.TESTFN + 'ren', f, support.TESTFN)
+ except:
+ posix.rename(support.TESTFN + 'ren', support.TESTFN)
+ raise
+ else:
+ posix.stat(support.TESTFN) # should not throw exception
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'symlinkat'), "test needs posix.symlinkat()")
+ def test_symlinkat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.symlinkat(support.TESTFN, f, support.TESTFN + 'link')
+ self.assertEqual(posix.readlink(support.TESTFN + 'link'), support.TESTFN)
+ finally:
+ posix.close(f)
+ support.unlink(support.TESTFN + 'link')
+
+ @unittest.skipUnless(hasattr(posix, 'unlinkat'), "test needs posix.unlinkat()")
+ def test_unlinkat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ support.create_empty_file(support.TESTFN + 'del')
+ posix.stat(support.TESTFN + 'del') # should not throw exception
+ try:
+ posix.unlinkat(f, support.TESTFN + 'del')
+ except:
+ support.unlink(support.TESTFN + 'del')
+ raise
+ else:
+ self.assertRaises(OSError, posix.stat, support.TESTFN + 'link')
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'utimensat'), "test needs posix.utimensat()")
+ def test_utimensat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ now = time.time()
+ posix.utimensat(f, support.TESTFN, None, None)
+ posix.utimensat(f, support.TESTFN)
+ posix.utimensat(f, support.TESTFN, flags=os.AT_SYMLINK_NOFOLLOW)
+ self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, (None, None), (None, None))
+ self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, (now, 0), None)
+ self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, None, (now, 0))
+ posix.utimensat(f, support.TESTFN, (int(now), int((now - int(now)) * 1e9)),
+ (int(now), int((now - int(now)) * 1e9)))
+ posix.utimensat(dirfd=f, path=support.TESTFN,
+ atime=(int(now), int((now - int(now)) * 1e9)),
+ mtime=(int(now), int((now - int(now)) * 1e9)))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'mkfifoat'), "don't have mkfifoat()")
+ def test_mkfifoat(self):
+ support.unlink(support.TESTFN)
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.mkfifoat(f, support.TESTFN, stat.S_IRUSR | stat.S_IWUSR)
+ self.assertTrue(stat.S_ISFIFO(posix.stat(support.TESTFN).st_mode))
+ finally:
+ posix.close(f)
+
+ requires_sched_h = unittest.skipUnless(hasattr(posix, 'sched_yield'),
+ "don't have scheduling support")
+ requires_sched_affinity = unittest.skipUnless(hasattr(posix, 'cpu_set'),
+ "don't have sched affinity support")
+
+ @requires_sched_h
+ def test_sched_yield(self):
+ # This has no error conditions (at least on Linux).
+ posix.sched_yield()
+
+ @requires_sched_h
+ @unittest.skipUnless(hasattr(posix, 'sched_get_priority_max'),
+ "requires sched_get_priority_max()")
+ def test_sched_priority(self):
+ # Round-robin usually has interesting priorities.
+ pol = posix.SCHED_RR
+ lo = posix.sched_get_priority_min(pol)
+ hi = posix.sched_get_priority_max(pol)
+ self.assertIsInstance(lo, int)
+ self.assertIsInstance(hi, int)
+ self.assertGreaterEqual(hi, lo)
+ # OSX evidently just returns 15 without checking the argument.
+ if sys.platform != "darwin":
+ self.assertRaises(OSError, posix.sched_get_priority_min, -23)
+ self.assertRaises(OSError, posix.sched_get_priority_max, -23)
+
+ @unittest.skipUnless(hasattr(posix, 'sched_setscheduler'), "can't change scheduler")
+ def test_get_and_set_scheduler_and_param(self):
+ possible_schedulers = [sched for name, sched in posix.__dict__.items()
+ if name.startswith("SCHED_")]
+ mine = posix.sched_getscheduler(0)
+ self.assertIn(mine, possible_schedulers)
+ try:
+ parent = posix.sched_getscheduler(os.getppid())
+ except OSError as e:
+ if e.errno != errno.EPERM:
+ raise
+ else:
+ self.assertIn(parent, possible_schedulers)
+ self.assertRaises(OSError, posix.sched_getscheduler, -1)
+ self.assertRaises(OSError, posix.sched_getparam, -1)
+ param = posix.sched_getparam(0)
+ self.assertIsInstance(param.sched_priority, int)
+ try:
+ posix.sched_setscheduler(0, mine, param)
+ except OSError as e:
+ if e.errno != errno.EPERM:
+ raise
+
+ # POSIX states that calling sched_setparam() on a process with a
+ # scheduling policy other than SCHED_FIFO or SCHED_RR is
+ # implementation-defined: FreeBSD returns EINVAL.
+ if not sys.platform.startswith('freebsd'):
+ posix.sched_setparam(0, param)
+ self.assertRaises(OSError, posix.sched_setparam, -1, param)
+
+ self.assertRaises(OSError, posix.sched_setscheduler, -1, mine, param)
+ self.assertRaises(TypeError, posix.sched_setscheduler, 0, mine, None)
+ self.assertRaises(TypeError, posix.sched_setparam, 0, 43)
+ param = posix.sched_param(None)
+ self.assertRaises(TypeError, posix.sched_setparam, 0, param)
+ large = 214748364700
+ param = posix.sched_param(large)
+ self.assertRaises(OverflowError, posix.sched_setparam, 0, param)
+ param = posix.sched_param(sched_priority=-large)
+ self.assertRaises(OverflowError, posix.sched_setparam, 0, param)
+
+ @unittest.skipUnless(hasattr(posix, "sched_rr_get_interval"), "no function")
+ def test_sched_rr_get_interval(self):
+ try:
+ interval = posix.sched_rr_get_interval(0)
+ except OSError as e:
+ # This likely means that sched_rr_get_interval is only valid for
+ # processes with the SCHED_RR scheduler in effect.
+ if e.errno != errno.EINVAL:
+ raise
+ self.skipTest("only works on SCHED_RR processes")
+ self.assertIsInstance(interval, float)
+ # Reasonable constraints, I think.
+ self.assertGreaterEqual(interval, 0.)
+ self.assertLess(interval, 1.)
+
+ @requires_sched_affinity
+ def test_sched_affinity(self):
+ mask = posix.sched_getaffinity(0, 1024)
+ self.assertGreaterEqual(mask.count(), 1)
+ self.assertIsInstance(mask, posix.cpu_set)
+ self.assertRaises(OSError, posix.sched_getaffinity, -1, 1024)
+ empty = posix.cpu_set(10)
+ posix.sched_setaffinity(0, mask)
+ self.assertRaises(OSError, posix.sched_setaffinity, 0, empty)
+ self.assertRaises(OSError, posix.sched_setaffinity, -1, mask)
+
+ @requires_sched_affinity
+ def test_cpu_set_basic(self):
+ s = posix.cpu_set(10)
+ self.assertEqual(len(s), 10)
+ self.assertEqual(s.count(), 0)
+ s.set(0)
+ s.set(9)
+ self.assertTrue(s.isset(0))
+ self.assertTrue(s.isset(9))
+ self.assertFalse(s.isset(5))
+ self.assertEqual(s.count(), 2)
+ s.clear(0)
+ self.assertFalse(s.isset(0))
+ self.assertEqual(s.count(), 1)
+ s.zero()
+ self.assertFalse(s.isset(0))
+ self.assertFalse(s.isset(9))
+ self.assertEqual(s.count(), 0)
+ self.assertRaises(ValueError, s.set, -1)
+ self.assertRaises(ValueError, s.set, 10)
+ self.assertRaises(ValueError, s.clear, -1)
+ self.assertRaises(ValueError, s.clear, 10)
+ self.assertRaises(ValueError, s.isset, -1)
+ self.assertRaises(ValueError, s.isset, 10)
+
+ @requires_sched_affinity
+ def test_cpu_set_cmp(self):
+ self.assertNotEqual(posix.cpu_set(11), posix.cpu_set(12))
+ l = posix.cpu_set(10)
+ r = posix.cpu_set(10)
+ self.assertEqual(l, r)
+ l.set(1)
+ self.assertNotEqual(l, r)
+ r.set(1)
+ self.assertEqual(l, r)
+
+ @requires_sched_affinity
+ def test_cpu_set_bitwise(self):
+ l = posix.cpu_set(5)
+ l.set(0)
+ l.set(1)
+ r = posix.cpu_set(5)
+ r.set(1)
+ r.set(2)
+ b = l & r
+ self.assertEqual(b.count(), 1)
+ self.assertTrue(b.isset(1))
+ b = l | r
+ self.assertEqual(b.count(), 3)
+ self.assertTrue(b.isset(0))
+ self.assertTrue(b.isset(1))
+ self.assertTrue(b.isset(2))
+ b = l ^ r
+ self.assertEqual(b.count(), 2)
+ self.assertTrue(b.isset(0))
+ self.assertFalse(b.isset(1))
+ self.assertTrue(b.isset(2))
+ b = l
+ b |= r
+ self.assertIs(b, l)
+ self.assertEqual(l.count(), 3)
+
+ def test_rtld_constants(self):
+ # check presence of major RTLD_* constants
+ posix.RTLD_LAZY
+ posix.RTLD_NOW
+ posix.RTLD_GLOBAL
+ posix.RTLD_LOCAL
+
class PosixGroupsTester(unittest.TestCase):
def setUp(self):
@@ -461,9 +1037,11 @@ class PosixGroupsTester(unittest.TestCase):
posix.setgroups(groups)
self.assertListEqual(groups, posix.getgroups())
-
def test_main():
- support.run_unittest(PosixTester, PosixGroupsTester)
+ try:
+ support.run_unittest(PosixTester, PosixGroupsTester)
+ finally:
+ support.reap_children()
if __name__ == '__main__':
test_main()
diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py
index bb4559c..709ef04 100644
--- a/Lib/test/test_posixpath.py
+++ b/Lib/test/test_posixpath.py
@@ -1,10 +1,10 @@
-import unittest
-from test import support, test_genericpath
-
-import posixpath
import os
+import posixpath
import sys
+import unittest
+import warnings
from posixpath import realpath, abspath, dirname, basename
+from test import support, test_genericpath
try:
import posix
@@ -231,7 +231,9 @@ class PosixPathTest(unittest.TestCase):
def test_ismount(self):
self.assertIs(posixpath.ismount("/"), True)
- self.assertIs(posixpath.ismount(b"/"), True)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ self.assertIs(posixpath.ismount(b"/"), True)
def test_ismount_non_existent(self):
# Non-existent mountpoint.
diff --git a/Lib/test/test_print.py b/Lib/test/test_print.py
index 8d37bbc..9d6dbea 100644
--- a/Lib/test/test_print.py
+++ b/Lib/test/test_print.py
@@ -111,6 +111,32 @@ class TestPrint(unittest.TestCase):
self.assertRaises(TypeError, print, '', end=3)
self.assertRaises(AttributeError, print, '', file='')
+ def test_print_flush(self):
+ # operation of the flush flag
+ class filelike():
+ def __init__(self):
+ self.written = ''
+ self.flushed = 0
+ def write(self, str):
+ self.written += str
+ def flush(self):
+ self.flushed += 1
+
+ f = filelike()
+ print(1, file=f, end='', flush=True)
+ print(2, file=f, end='', flush=True)
+ print(3, file=f, flush=False)
+ self.assertEqual(f.written, '123\n')
+ self.assertEqual(f.flushed, 2)
+
+ # ensure exceptions from flush are passed through
+ class noflush():
+ def write(self, str):
+ pass
+ def flush(self):
+ raise RuntimeError
+ self.assertRaises(RuntimeError, print, 1, file=noflush(), flush=True)
+
def test_main():
support.run_unittest(TestPrint)
diff --git a/Lib/test/test_property.py b/Lib/test/test_property.py
index cc6a872..726d6fe 100644
--- a/Lib/test/test_property.py
+++ b/Lib/test/test_property.py
@@ -128,6 +128,29 @@ class PropertyTests(unittest.TestCase):
self.assertEqual(newgetter.spam, 8)
self.assertEqual(newgetter.__class__.spam.__doc__, "new docstring")
+ def test_property___isabstractmethod__descriptor(self):
+ for val in (True, False, [], [1], '', '1'):
+ class C(object):
+ def foo(self):
+ pass
+ foo.__isabstractmethod__ = val
+ foo = property(foo)
+ self.assertIs(C.foo.__isabstractmethod__, bool(val))
+
+ # check that the property's __isabstractmethod__ descriptor does the
+ # right thing when presented with a value that fails truth testing:
+ class NotBool(object):
+ def __nonzero__(self):
+ raise ValueError()
+ __len__ = __nonzero__
+ with self.assertRaises(ValueError):
+ class C(object):
+ def foo(self):
+ pass
+ foo.__isabstractmethod__ = NotBool()
+ foo = property(foo)
+ C.foo.__isabstractmethod__
+
# Issue 5890: subclasses of property do not preserve method __doc__ strings
class PropertySub(property):
diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py
index 4f1251c..ef95268 100644
--- a/Lib/test/test_pty.py
+++ b/Lib/test/test_pty.py
@@ -205,6 +205,7 @@ class SmallPtyTests(unittest.TestCase):
self.orig_stdout_fileno = pty.STDOUT_FILENO
self.orig_pty_select = pty.select
self.fds = [] # A list of file descriptors to close.
+ self.files = []
self.select_rfds_lengths = []
self.select_rfds_results = []
@@ -212,10 +213,15 @@ class SmallPtyTests(unittest.TestCase):
pty.STDIN_FILENO = self.orig_stdin_fileno
pty.STDOUT_FILENO = self.orig_stdout_fileno
pty.select = self.orig_pty_select
+ for file in self.files:
+ try:
+ file.close()
+ except OSError:
+ pass
for fd in self.fds:
try:
os.close(fd)
- except:
+ except OSError:
pass
def _pipe(self):
@@ -223,6 +229,11 @@ class SmallPtyTests(unittest.TestCase):
self.fds.extend(pipe_fds)
return pipe_fds
+ def _socketpair(self):
+ socketpair = socket.socketpair()
+ self.files.extend(socketpair)
+ return socketpair
+
def _mock_select(self, rfds, wfds, xfds):
# This will raise IndexError when no more expected calls exist.
self.assertEqual(self.select_rfds_lengths.pop(0), len(rfds))
@@ -234,9 +245,8 @@ class SmallPtyTests(unittest.TestCase):
pty.STDOUT_FILENO = mock_stdout_fd
mock_stdin_fd, write_to_stdin_fd = self._pipe()
pty.STDIN_FILENO = mock_stdin_fd
- socketpair = socket.socketpair()
+ socketpair = self._socketpair()
masters = [s.fileno() for s in socketpair]
- self.fds.extend(masters)
# Feed data. Smaller than PIPEBUF. These writes will not block.
os.write(masters[1], b'from master')
@@ -263,9 +273,8 @@ class SmallPtyTests(unittest.TestCase):
pty.STDOUT_FILENO = mock_stdout_fd
mock_stdin_fd, write_to_stdin_fd = self._pipe()
pty.STDIN_FILENO = mock_stdin_fd
- socketpair = socket.socketpair()
+ socketpair = self._socketpair()
masters = [s.fileno() for s in socketpair]
- self.fds.extend(masters)
os.close(masters[1])
socketpair[1].close()
diff --git a/Lib/test/test_pulldom.py b/Lib/test/test_pulldom.py
new file mode 100644
index 0000000..b81a595
--- /dev/null
+++ b/Lib/test/test_pulldom.py
@@ -0,0 +1,347 @@
+import io
+import unittest
+import sys
+import xml.sax
+
+from xml.sax.xmlreader import AttributesImpl
+from xml.dom import pulldom
+
+from test.support import run_unittest, findfile
+
+
+tstfile = findfile("test.xml", subdir="xmltestdata")
+
+# A handy XML snippet, containing attributes, a namespace prefix, and a
+# self-closing tag:
+SMALL_SAMPLE = """<?xml version="1.0"?>
+<html xmlns="http://www.w3.org/1999/xhtml" xmlns:xdc="http://www.xml.com/books">
+<!-- A comment -->
+<title>Introduction to XSL</title>
+<hr/>
+<p><xdc:author xdc:attrib="prefixed attribute" attrib="other attrib">A. Namespace</xdc:author></p>
+</html>"""
+
+
+class PullDOMTestCase(unittest.TestCase):
+
+ def test_parse(self):
+ """Minimal test of DOMEventStream.parse()"""
+
+ # This just tests that parsing from a stream works. Actual parser
+ # semantics are tested using parseString with a more focused XML
+ # fragment.
+
+ # Test with a filename:
+ handler = pulldom.parse(tstfile)
+ self.addCleanup(handler.stream.close)
+ list(handler)
+
+ # Test with a file object:
+ with open(tstfile, "rb") as fin:
+ list(pulldom.parse(fin))
+
+ def test_parse_semantics(self):
+ """Test DOMEventStream parsing semantics."""
+
+ items = pulldom.parseString(SMALL_SAMPLE)
+ evt, node = next(items)
+ # Just check the node is a Document:
+ self.assertTrue(hasattr(node, "createElement"))
+ self.assertEqual(pulldom.START_DOCUMENT, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("html", node.tagName)
+ self.assertEqual(2, len(node.attributes))
+ self.assertEqual(node.attributes.getNamedItem("xmlns:xdc").value,
+ "http://www.xml.com/books")
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt) # Line break
+ evt, node = next(items)
+ # XXX - A comment should be reported here!
+ # self.assertEqual(pulldom.COMMENT, evt)
+ # Line break after swallowed comment:
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual("title", node.tagName)
+ title_node = node
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ self.assertEqual("Introduction to XSL", node.data)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("title", node.tagName)
+ self.assertTrue(title_node is node)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("hr", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("hr", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("p", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("xdc:author", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("xdc:author", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ # XXX No END_DOCUMENT item is ever obtained:
+ #evt, node = next(items)
+ #self.assertEqual(pulldom.END_DOCUMENT, evt)
+
+ def test_expandItem(self):
+ """Ensure expandItem works as expected."""
+ items = pulldom.parseString(SMALL_SAMPLE)
+ # Loop through the nodes until we get to a "title" start tag:
+ for evt, item in items:
+ if evt == pulldom.START_ELEMENT and item.tagName == "title":
+ items.expandNode(item)
+ self.assertEqual(1, len(item.childNodes))
+ break
+ else:
+ self.fail("No \"title\" element detected in SMALL_SAMPLE!")
+ # Loop until we get to the next start-element:
+ for evt, node in items:
+ if evt == pulldom.START_ELEMENT:
+ break
+ self.assertEqual("hr", node.tagName,
+ "expandNode did not leave DOMEventStream in the correct state.")
+ # Attempt to expand a standalone element:
+ items.expandNode(node)
+ self.assertEqual(next(items)[0], pulldom.CHARACTERS)
+ evt, node = next(items)
+ self.assertEqual(node.tagName, "p")
+ items.expandNode(node)
+ next(items) # Skip character data
+ evt, node = next(items)
+ self.assertEqual(node.tagName, "html")
+ with self.assertRaises(StopIteration):
+ next(items)
+ items.clear()
+ self.assertIsNone(items.parser)
+ self.assertIsNone(items.stream)
+
+ @unittest.expectedFailure
+ def test_comment(self):
+ """PullDOM does not receive "comment" events."""
+ items = pulldom.parseString(SMALL_SAMPLE)
+ for evt, _ in items:
+ if evt == pulldom.COMMENT:
+ break
+ else:
+ self.fail("No comment was encountered")
+
+ @unittest.expectedFailure
+ def test_end_document(self):
+ """PullDOM does not receive "end-document" events."""
+ items = pulldom.parseString(SMALL_SAMPLE)
+ # Read all of the nodes up to and including </html>:
+ for evt, node in items:
+ if evt == pulldom.END_ELEMENT and node.tagName == "html":
+ break
+ try:
+ # Assert that the next node is END_DOCUMENT:
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_DOCUMENT, evt)
+ except StopIteration:
+ self.fail(
+ "Ran out of events, but should have received END_DOCUMENT")
+
+
+class ThoroughTestCase(unittest.TestCase):
+ """Test the hard-to-reach parts of pulldom."""
+
+ def test_thorough_parse(self):
+ """Test some of the hard-to-reach parts of PullDOM."""
+ self._test_thorough(pulldom.parse(None, parser=SAXExerciser()))
+
+ @unittest.expectedFailure
+ def test_sax2dom_fail(self):
+ """SAX2DOM can"t handle a PI before the root element."""
+ pd = SAX2DOMTestHelper(None, SAXExerciser(), 12)
+ self._test_thorough(pd)
+
+ def test_thorough_sax2dom(self):
+ """Test some of the hard-to-reach parts of SAX2DOM."""
+ pd = SAX2DOMTestHelper(None, SAX2DOMExerciser(), 12)
+ self._test_thorough(pd, False)
+
+ def _test_thorough(self, pd, before_root=True):
+ """Test some of the hard-to-reach parts of the parser, using a mock
+ parser."""
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.START_DOCUMENT, evt)
+ # Just check the node is a Document:
+ self.assertTrue(hasattr(node, "createElement"))
+
+ if before_root:
+ evt, node = next(pd)
+ self.assertEqual(pulldom.COMMENT, evt)
+ self.assertEqual("a comment", node.data)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.PROCESSING_INSTRUCTION, evt)
+ self.assertEqual("target", node.target)
+ self.assertEqual("data", node.data)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("html", node.tagName)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.COMMENT, evt)
+ self.assertEqual("a comment", node.data)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.PROCESSING_INSTRUCTION, evt)
+ self.assertEqual("target", node.target)
+ self.assertEqual("data", node.data)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("p", node.tagName)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ self.assertEqual("text", node.data)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("p", node.tagName)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("html", node.tagName)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.END_DOCUMENT, evt)
+
+
+class SAXExerciser(object):
+ """A fake sax parser that calls some of the harder-to-reach sax methods to
+ ensure it emits the correct events"""
+
+ def setContentHandler(self, handler):
+ self._handler = handler
+
+ def parse(self, _):
+ h = self._handler
+ h.startDocument()
+
+ # The next two items ensure that items preceding the first
+ # start_element are properly stored and emitted:
+ h.comment("a comment")
+ h.processingInstruction("target", "data")
+
+ h.startElement("html", AttributesImpl({}))
+
+ h.comment("a comment")
+ h.processingInstruction("target", "data")
+
+ h.startElement("p", AttributesImpl({"class": "paraclass"}))
+ h.characters("text")
+ h.endElement("p")
+ h.endElement("html")
+ h.endDocument()
+
+ def stub(self, *args, **kwargs):
+ """Stub method. Does nothing."""
+ pass
+ setProperty = stub
+ setFeature = stub
+
+
+class SAX2DOMExerciser(SAXExerciser):
+ """The same as SAXExerciser, but without the processing instruction and
+ comment before the root element, because S2D can"t handle it"""
+
+ def parse(self, _):
+ h = self._handler
+ h.startDocument()
+ h.startElement("html", AttributesImpl({}))
+ h.comment("a comment")
+ h.processingInstruction("target", "data")
+ h.startElement("p", AttributesImpl({"class": "paraclass"}))
+ h.characters("text")
+ h.endElement("p")
+ h.endElement("html")
+ h.endDocument()
+
+
+class SAX2DOMTestHelper(pulldom.DOMEventStream):
+ """Allows us to drive SAX2DOM from a DOMEventStream."""
+
+ def reset(self):
+ self.pulldom = pulldom.SAX2DOM()
+ # This content handler relies on namespace support
+ self.parser.setFeature(xml.sax.handler.feature_namespaces, 1)
+ self.parser.setContentHandler(self.pulldom)
+
+
+class SAX2DOMTestCase(unittest.TestCase):
+
+ def confirm(self, test, testname="Test"):
+ self.assertTrue(test, testname)
+
+ def test_basic(self):
+ """Ensure SAX2DOM can parse from a stream."""
+ with io.StringIO(SMALL_SAMPLE) as fin:
+ sd = SAX2DOMTestHelper(fin, xml.sax.make_parser(),
+ len(SMALL_SAMPLE))
+ for evt, node in sd:
+ if evt == pulldom.START_ELEMENT and node.tagName == "html":
+ break
+ # Because the buffer is the same length as the XML, all the
+ # nodes should have been parsed and added:
+ self.assertGreater(len(node.childNodes), 0)
+
+ def testSAX2DOM(self):
+ """Ensure SAX2DOM expands nodes as expected."""
+ sax2dom = pulldom.SAX2DOM()
+ sax2dom.startDocument()
+ sax2dom.startElement("doc", {})
+ sax2dom.characters("text")
+ sax2dom.startElement("subelm", {})
+ sax2dom.characters("text")
+ sax2dom.endElement("subelm")
+ sax2dom.characters("text")
+ sax2dom.endElement("doc")
+ sax2dom.endDocument()
+
+ doc = sax2dom.document
+ root = doc.documentElement
+ (text1, elm1, text2) = root.childNodes
+ text3 = elm1.childNodes[0]
+
+ self.assertIsNone(text1.previousSibling)
+ self.assertIs(text1.nextSibling, elm1)
+ self.assertIs(elm1.previousSibling, text1)
+ self.assertIs(elm1.nextSibling, text2)
+ self.assertIs(text2.previousSibling, elm1)
+ self.assertIsNone(text2.nextSibling)
+ self.assertIsNone(text3.previousSibling)
+ self.assertIsNone(text3.nextSibling)
+
+ self.assertIs(root.parentNode, doc)
+ self.assertIs(text1.parentNode, root)
+ self.assertIs(elm1.parentNode, root)
+ self.assertIs(text2.parentNode, root)
+ self.assertIs(text3.parentNode, elm1)
+ doc.unlink()
+
+
+def test_main():
+ run_unittest(PullDOMTestCase, ThoroughTestCase, SAX2DOMTestCase)
+
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
index 65b0ab5..e805ed8 100644
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -198,7 +198,7 @@ war</tt></dd></dl>
missing_pattern = "no Python documentation found for '%s'"
# output pattern for module with bad imports
-badimport_pattern = "problem in %s - ImportError: No module named %s"
+badimport_pattern = "problem in %s - ImportError: No module named %r"
def run_pydoc(module_name, *args, **env):
"""
@@ -236,8 +236,8 @@ def get_pydoc_text(module):
def print_diffs(text1, text2):
"Prints unified diffs for two texts"
# XXX now obsolete, use unittest built-in support
- lines1 = text1.splitlines(True)
- lines2 = text2.splitlines(True)
+ lines1 = text1.splitlines(keepends=True)
+ lines2 = text2.splitlines(keepends=True)
diffs = difflib.unified_diff(lines1, lines2, n=0, fromfile='expected',
tofile='got')
print('\n' + ''.join(diffs))
@@ -254,6 +254,8 @@ class PydocDocTest(unittest.TestCase):
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __locals__ unexpectedly')
def test_html_doc(self):
result, doc_loc = get_pydoc_html(pydoc_mod)
mod_file = inspect.getabsfile(pydoc_mod)
@@ -269,6 +271,8 @@ class PydocDocTest(unittest.TestCase):
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __locals__ unexpectedly')
def test_text_doc(self):
result, doc_loc = get_pydoc_text(pydoc_mod)
expected_text = expected_text_pattern % \
@@ -310,6 +314,8 @@ class PydocDocTest(unittest.TestCase):
@unittest.skipIf(sys.flags.optimize >= 2,
'Docstrings are omitted with -O2 and above')
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __locals__ unexpectedly')
def test_help_output_redirect(self):
# issue 940286, if output is set in Helper, then all output from
# Helper.help should be redirected
diff --git a/Lib/test/test_raise.py b/Lib/test/test_raise.py
index e02c1af..8ae9210 100644
--- a/Lib/test/test_raise.py
+++ b/Lib/test/test_raise.py
@@ -3,12 +3,27 @@
"""Tests for the raise statement."""
-from test import support
+from test import support, script_helper
+import re
import sys
import types
import unittest
+try:
+ from resource import setrlimit, RLIMIT_CORE, error as resource_error
+except ImportError:
+ prepare_subprocess = None
+else:
+ def prepare_subprocess():
+ # don't create core file
+ try:
+ setrlimit(RLIMIT_CORE, (0, 0))
+ except (ValueError, resource_error):
+ pass
+
+
+
def get_tb():
try:
raise OSError()
@@ -77,6 +92,16 @@ class TestRaise(unittest.TestCase):
nested_reraise()
self.assertRaises(TypeError, reraise)
+ def test_raise_from_None(self):
+ try:
+ try:
+ raise TypeError("foo")
+ except:
+ raise ValueError() from None
+ except ValueError as e:
+ self.assertTrue(isinstance(e.__context__, TypeError))
+ self.assertIsNone(e.__cause__)
+
def test_with_reraise1(self):
def reraise():
try:
@@ -130,8 +155,32 @@ class TestRaise(unittest.TestCase):
with self.assertRaises(TypeError):
raise MyException
+ def test_assert_with_tuple_arg(self):
+ try:
+ assert False, (3,)
+ except AssertionError as e:
+ self.assertEqual(str(e), "(3,)")
+
+
class TestCause(unittest.TestCase):
+
+ def testCauseSyntax(self):
+ try:
+ try:
+ try:
+ raise TypeError
+ except Exception:
+ raise ValueError from None
+ except ValueError as exc:
+ self.assertIsNone(exc.__cause__)
+ raise exc from Ellipsis
+ except ValueError as exc:
+ e = exc
+
+ self.assertIs(e.__cause__, Ellipsis)
+ self.assertIsInstance(e.__context__, TypeError)
+
def test_invalid_cause(self):
try:
raise IndexError from 5
@@ -171,6 +220,44 @@ class TestCause(unittest.TestCase):
class TestTraceback(unittest.TestCase):
+
+ def get_output(self, code, filename=None):
+ """
+ Run the specified code in Python (in a new child process) and read the
+ output from the standard error or from a file (if filename is set).
+ Return the output lines as a list.
+ """
+ options = {}
+ if prepare_subprocess:
+ options['preexec_fn'] = prepare_subprocess
+ process = script_helper.spawn_python('-c', code, **options)
+ stdout, stderr = process.communicate()
+ exitcode = process.wait()
+ output = support.strip_python_stderr(stdout)
+ output = output.decode('ascii', 'backslashreplace')
+ if filename:
+ self.assertEqual(output, '')
+ with open(filename, "rb") as fp:
+ output = fp.read()
+ output = output.decode('ascii', 'backslashreplace')
+ output = re.sub('Current thread 0x[0-9a-f]+',
+ 'Current thread XXX',
+ output)
+ return output.splitlines(), exitcode
+
+ def test_traceback_verbiage(self):
+ code = """
+try:
+ raise ValueError
+except:
+ raise NameError from None
+"""
+ text, exitcode = self.get_output(code)
+ self.assertEqual(len(text), 3)
+ self.assertTrue(text[0].startswith('Traceback'))
+ self.assertTrue(text[1].startswith(' File '))
+ self.assertTrue(text[2].startswith('NameError'))
+
def test_sets_traceback(self):
try:
raise IndexError()
diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py
index ede0791..2e335cc 100644
--- a/Lib/test/test_range.py
+++ b/Lib/test/test_range.py
@@ -507,6 +507,87 @@ class RangeTest(unittest.TestCase):
for k in values - {0}:
r[i:j:k]
+ def test_comparison(self):
+ test_ranges = [range(0), range(0, -1), range(1, 1, 3),
+ range(1), range(5, 6), range(5, 6, 2),
+ range(5, 7, 2), range(2), range(0, 4, 2),
+ range(0, 5, 2), range(0, 6, 2)]
+ test_tuples = list(map(tuple, test_ranges))
+
+ # Check that equality of ranges matches equality of the corresponding
+ # tuples for each pair from the test lists above.
+ ranges_eq = [a == b for a in test_ranges for b in test_ranges]
+ tuples_eq = [a == b for a in test_tuples for b in test_tuples]
+ self.assertEqual(ranges_eq, tuples_eq)
+
+ # Check that != correctly gives the logical negation of ==
+ ranges_ne = [a != b for a in test_ranges for b in test_ranges]
+ self.assertEqual(ranges_ne, [not x for x in ranges_eq])
+
+ # Equal ranges should have equal hashes.
+ for a in test_ranges:
+ for b in test_ranges:
+ if a == b:
+ self.assertEqual(hash(a), hash(b))
+
+ # Ranges are unequal to other types (even sequence types)
+ self.assertIs(range(0) == (), False)
+ self.assertIs(() == range(0), False)
+ self.assertIs(range(2) == [0, 1], False)
+
+ # Huge integers aren't a problem.
+ self.assertEqual(range(0, 2**100 - 1, 2),
+ range(0, 2**100, 2))
+ self.assertEqual(hash(range(0, 2**100 - 1, 2)),
+ hash(range(0, 2**100, 2)))
+ self.assertNotEqual(range(0, 2**100, 2),
+ range(0, 2**100 + 1, 2))
+ self.assertEqual(range(2**200, 2**201 - 2**99, 2**100),
+ range(2**200, 2**201, 2**100))
+ self.assertEqual(hash(range(2**200, 2**201 - 2**99, 2**100)),
+ hash(range(2**200, 2**201, 2**100)))
+ self.assertNotEqual(range(2**200, 2**201, 2**100),
+ range(2**200, 2**201 + 1, 2**100))
+
+ # Order comparisons are not implemented for ranges.
+ with self.assertRaises(TypeError):
+ range(0) < range(0)
+ with self.assertRaises(TypeError):
+ range(0) > range(0)
+ with self.assertRaises(TypeError):
+ range(0) <= range(0)
+ with self.assertRaises(TypeError):
+ range(0) >= range(0)
+
+
+ def test_attributes(self):
+ # test the start, stop and step attributes of range objects
+ self.assert_attrs(range(0), 0, 0, 1)
+ self.assert_attrs(range(10), 0, 10, 1)
+ self.assert_attrs(range(-10), 0, -10, 1)
+ self.assert_attrs(range(0, 10, 1), 0, 10, 1)
+ self.assert_attrs(range(0, 10, 3), 0, 10, 3)
+ self.assert_attrs(range(10, 0, -1), 10, 0, -1)
+ self.assert_attrs(range(10, 0, -3), 10, 0, -3)
+
+ def assert_attrs(self, rangeobj, start, stop, step):
+ self.assertEqual(rangeobj.start, start)
+ self.assertEqual(rangeobj.stop, stop)
+ self.assertEqual(rangeobj.step, step)
+
+ with self.assertRaises(AttributeError):
+ rangeobj.start = 0
+ with self.assertRaises(AttributeError):
+ rangeobj.stop = 10
+ with self.assertRaises(AttributeError):
+ rangeobj.step = 1
+
+ with self.assertRaises(AttributeError):
+ del rangeobj.start
+ with self.assertRaises(AttributeError):
+ del rangeobj.stop
+ with self.assertRaises(AttributeError):
+ del rangeobj.step
def test_main():
test.support.run_unittest(RangeTest)
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py
index 940ba39..74a7b71 100644
--- a/Lib/test/test_re.py
+++ b/Lib/test/test_re.py
@@ -466,7 +466,7 @@ class ReTests(unittest.TestCase):
self.assertEqual(m.span(), span)
def test_re_escape(self):
- alnum_chars = string.ascii_letters + string.digits
+ alnum_chars = string.ascii_letters + string.digits + '_'
p = ''.join(chr(i) for i in range(256))
for c in p:
if c in alnum_chars:
@@ -479,7 +479,7 @@ class ReTests(unittest.TestCase):
self.assertMatch(re.escape(p), p)
def test_re_escape_byte(self):
- alnum_chars = (string.ascii_letters + string.digits).encode('ascii')
+ alnum_chars = (string.ascii_letters + string.digits + '_').encode('ascii')
p = bytes(range(256))
for i in p:
b = bytes([i])
@@ -652,6 +652,26 @@ class ReTests(unittest.TestCase):
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
+ pat = re.compile(r":+")
+ iter = pat.finditer("a:b::c:::d", 1, 10)
+ self.assertEqual([item.group(0) for item in iter],
+ [":", "::", ":::"])
+
+ pat = re.compile(r":+")
+ iter = pat.finditer("a:b::c:::d", pos=1, endpos=10)
+ self.assertEqual([item.group(0) for item in iter],
+ [":", "::", ":::"])
+
+ pat = re.compile(r":+")
+ iter = pat.finditer("a:b::c:::d", endpos=10, pos=1)
+ self.assertEqual([item.group(0) for item in iter],
+ [":", "::", ":::"])
+
+ pat = re.compile(r":+")
+ iter = pat.finditer("a:b::c:::d", pos=3, endpos=8)
+ self.assertEqual([item.group(0) for item in iter],
+ ["::", "::"])
+
def test_bug_926075(self):
self.assertTrue(re.compile('bug_926075') is not
re.compile(b'bug_926075'))
@@ -818,6 +838,13 @@ class ReTests(unittest.TestCase):
self.assertRaises(OverflowError, _sre.compile, "abc", 0, [long_overflow])
self.assertRaises(TypeError, _sre.compile, {}, 0, [])
+ def test_search_dot_unicode(self):
+ self.assertIsNotNone(re.search("123.*-", '123abc-'))
+ self.assertIsNotNone(re.search("123.*-", '123\xe9-'))
+ self.assertIsNotNone(re.search("123.*-", '123\u20ac-'))
+ self.assertIsNotNone(re.search("123.*-", '123\U0010ffff-'))
+ self.assertIsNotNone(re.search("123.*-", '123\xe9\u20ac\U0010ffff-'))
+
def test_compile(self):
# Test return value when given string and pattern as parameter
pattern = re.compile('random pattern')
diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py
index b0dc4d7..e75ba1c 100644
--- a/Lib/test/test_reprlib.py
+++ b/Lib/test/test_reprlib.py
@@ -6,9 +6,10 @@
import sys
import os
import shutil
+import importlib
import unittest
-from test.support import run_unittest
+from test.support import run_unittest, create_empty_file
from reprlib import repr as r # Don't shadow builtin repr
from reprlib import Repr
from reprlib import recursive_repr
@@ -129,8 +130,8 @@ class ReprTests(unittest.TestCase):
self.assertIn(s.find("..."), [12, 13])
def test_lambda(self):
- self.assertTrue(repr(lambda x: x).startswith(
- "<function <lambda"))
+ r = repr(lambda x: x)
+ self.assertTrue(r.startswith("<function ReprTests.test_lambda.<locals>.<lambda"), r)
# XXX anonymous functions? see func_repr
def test_builtin_function(self):
@@ -193,10 +194,9 @@ class ReprTests(unittest.TestCase):
r(y)
r(z)
-def touch(path, text=''):
- fp = open(path, 'w')
- fp.write(text)
- fp.close()
+def write_file(path, text):
+ with open(path, 'w', encoding='ASCII') as fp:
+ fp.write(text)
class LongReprTest(unittest.TestCase):
def setUp(self):
@@ -206,13 +206,14 @@ class LongReprTest(unittest.TestCase):
# Make the package and subpackage
shutil.rmtree(self.pkgname, ignore_errors=True)
os.mkdir(self.pkgname)
- touch(os.path.join(self.pkgname, '__init__.py'))
+ create_empty_file(os.path.join(self.pkgname, '__init__.py'))
shutil.rmtree(self.subpkgname, ignore_errors=True)
os.mkdir(self.subpkgname)
- touch(os.path.join(self.subpkgname, '__init__.py'))
+ create_empty_file(os.path.join(self.subpkgname, '__init__.py'))
# Remember where we are
self.here = os.getcwd()
sys.path.insert(0, self.here)
+ importlib.invalidate_caches()
def tearDown(self):
actions = []
@@ -231,15 +232,15 @@ class LongReprTest(unittest.TestCase):
def test_module(self):
eq = self.assertEqual
- touch(os.path.join(self.subpkgname, self.pkgname + '.py'))
+ create_empty_file(os.path.join(self.subpkgname, self.pkgname + '.py'))
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
eq(repr(areallylongpackageandmodulenametotestreprtruncation),
- "<module '%s' from '%s'>" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
+ "<module %r from %r>" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
eq(repr(sys), "<module 'sys' (built-in)>")
def test_type(self):
eq = self.assertEqual
- touch(os.path.join(self.subpkgname, 'foo.py'), '''\
+ write_file(os.path.join(self.subpkgname, 'foo.py'), '''\
class foo(object):
pass
''')
@@ -253,7 +254,7 @@ class foo(object):
pass
def test_class(self):
- touch(os.path.join(self.subpkgname, 'bar.py'), '''\
+ write_file(os.path.join(self.subpkgname, 'bar.py'), '''\
class bar:
pass
''')
@@ -262,7 +263,7 @@ class bar:
self.assertEqual(repr(bar.bar), "<class '%s.bar'>" % bar.__name__)
def test_instance(self):
- touch(os.path.join(self.subpkgname, 'baz.py'), '''\
+ write_file(os.path.join(self.subpkgname, 'baz.py'), '''\
class baz:
pass
''')
@@ -273,19 +274,20 @@ class baz:
def test_method(self):
eq = self.assertEqual
- touch(os.path.join(self.subpkgname, 'qux.py'), '''\
+ write_file(os.path.join(self.subpkgname, 'qux.py'), '''\
class aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa:
def amethod(self): pass
''')
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import qux
# Unbound methods first
- self.assertTrue(repr(qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod).startswith(
- '<function amethod'))
+ r = repr(qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod)
+ self.assertTrue(r.startswith('<function aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod'), r)
# Bound method next
iqux = qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa()
- self.assertTrue(repr(iqux.amethod).startswith(
+ r = repr(iqux.amethod)
+ self.assertTrue(r.startswith(
'<bound method aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod of <%s.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa object at 0x' \
- % (qux.__name__,) ))
+ % (qux.__name__,) ), r)
def test_builtin_function(self):
# XXX test built-in functions and methods with really long names
diff --git a/Lib/test/test_richcmp.py b/Lib/test/test_richcmp.py
index f8f3717..0b629dc 100644
--- a/Lib/test/test_richcmp.py
+++ b/Lib/test/test_richcmp.py
@@ -220,6 +220,7 @@ class MiscTest(unittest.TestCase):
for func in (do, operator.not_):
self.assertRaises(Exc, func, Bad())
+ @support.no_tracing
def test_recursion(self):
# Check that comparison for recursive objects fails gracefully
from collections import UserList
diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py
index 7ffb6af..2cede19 100644
--- a/Lib/test/test_runpy.py
+++ b/Lib/test/test_runpy.py
@@ -6,7 +6,9 @@ import sys
import re
import tempfile
import py_compile
-from test.support import forget, make_legacy_pyc, run_unittest, unload, verbose
+from test.support import (
+ forget, make_legacy_pyc, run_unittest, unload, verbose, no_tracing,
+ create_empty_file)
from test.script_helper import (
make_pkg, make_script, make_zip_pkg, make_zip_script, temp_dir)
@@ -112,8 +114,7 @@ class RunModuleTest(unittest.TestCase):
def _add_pkg_dir(self, pkg_dir):
os.mkdir(pkg_dir)
pkg_fname = os.path.join(pkg_dir, "__init__.py")
- pkg_file = open(pkg_fname, "w")
- pkg_file.close()
+ create_empty_file(pkg_fname)
return pkg_fname
def _make_pkg(self, source, depth, mod_base="runpy_test"):
@@ -218,8 +219,7 @@ class RunModuleTest(unittest.TestCase):
module_dir = os.path.join(module_dir, pkg_name)
# Add sibling module
sibling_fname = os.path.join(module_dir, "sibling.py")
- sibling_file = open(sibling_fname, "w")
- sibling_file.close()
+ create_empty_file(sibling_fname)
if verbose: print(" Added sibling module:", sibling_fname)
# Add nephew module
uncle_dir = os.path.join(parent_dir, "uncle")
@@ -229,8 +229,7 @@ class RunModuleTest(unittest.TestCase):
self._add_pkg_dir(cousin_dir)
if verbose: print(" Added cousin package:", cousin_dir)
nephew_fname = os.path.join(cousin_dir, "nephew.py")
- nephew_file = open(nephew_fname, "w")
- nephew_file.close()
+ create_empty_file(nephew_fname)
if verbose: print(" Added nephew module:", nephew_fname)
def _check_relative_imports(self, depth, run_name=None):
@@ -395,6 +394,7 @@ argv0 = sys.argv[0]
msg = "can't find '__main__' module in %r" % zip_name
self._check_import_error(zip_name, msg)
+ @no_tracing
def test_main_recursion_error(self):
with temp_dir() as script_dir, temp_dir() as dummy_dir:
mod_name = '__main__'
diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py
index 1225d6e..8e00889 100644
--- a/Lib/test/test_sax.py
+++ b/Lib/test/test_sax.py
@@ -20,8 +20,8 @@ import unittest
TEST_XMLFILE = findfile("test.xml", subdir="xmltestdata")
TEST_XMLFILE_OUT = findfile("test.xml.out", subdir="xmltestdata")
try:
- TEST_XMLFILE.encode("utf8")
- TEST_XMLFILE_OUT.encode("utf8")
+ TEST_XMLFILE.encode("utf-8")
+ TEST_XMLFILE_OUT.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("filename is not encodable to utf8")
diff --git a/Lib/test/test_sched.py b/Lib/test/test_sched.py
index 91b8f0c..50ada52 100644
--- a/Lib/test/test_sched.py
+++ b/Lib/test/test_sched.py
@@ -63,15 +63,39 @@ class TestCase(unittest.TestCase):
def test_queue(self):
l = []
- events = []
fun = lambda x: l.append(x)
scheduler = sched.scheduler(time.time, time.sleep)
- self.assertEqual(scheduler._queue, [])
- for x in [0.05, 0.04, 0.03, 0.02, 0.01]:
- events.append(scheduler.enterabs(x, 1, fun, (x,)))
- self.assertEqual(scheduler._queue.sort(), events.sort())
+ now = time.time()
+ e5 = scheduler.enterabs(now + 0.05, 1, fun)
+ e1 = scheduler.enterabs(now + 0.01, 1, fun)
+ e2 = scheduler.enterabs(now + 0.02, 1, fun)
+ e4 = scheduler.enterabs(now + 0.04, 1, fun)
+ e3 = scheduler.enterabs(now + 0.03, 1, fun)
+ # queue property is supposed to return an order list of
+ # upcoming events
+ self.assertEqual(list(scheduler.queue), [e1, e2, e3, e4, e5])
+
+ def test_args_kwargs(self):
+ flag = []
+
+ def fun(*a, **b):
+ flag.append(None)
+ self.assertEqual(a, (1,2,3))
+ self.assertEqual(b, {"foo":1})
+
+ scheduler = sched.scheduler(time.time, time.sleep)
+ z = scheduler.enterabs(0.01, 1, fun, argument=(1,2,3), kwargs={"foo":1})
scheduler.run()
- self.assertEqual(scheduler._queue, [])
+ self.assertEqual(flag, [None])
+
+ def test_run_non_blocking(self):
+ l = []
+ fun = lambda x: l.append(x)
+ scheduler = sched.scheduler(time.time, time.sleep)
+ for x in [10, 9, 8, 7, 6]:
+ scheduler.enter(x, 1, fun, (x,))
+ scheduler.run(blocking=False)
+ self.assertEqual(l, [])
def test_main():
diff --git a/Lib/test/test_scope.py b/Lib/test/test_scope.py
index fbc87aa..129a18a 100644
--- a/Lib/test/test_scope.py
+++ b/Lib/test/test_scope.py
@@ -1,5 +1,5 @@
import unittest
-from test.support import check_syntax_error, run_unittest
+from test.support import check_syntax_error, cpython_only, run_unittest
class ScopeTests(unittest.TestCase):
@@ -496,23 +496,22 @@ class ScopeTests(unittest.TestCase):
self.assertNotIn("x", varnames)
self.assertIn("y", varnames)
+ @cpython_only
def testLocalsClass_WithTrace(self):
# Issue23728: after the trace function returns, the locals()
# dictionary is used to update all variables, this used to
# include free variables. But in class statements, free
# variables are not inserted...
import sys
+ self.addCleanup(sys.settrace, sys.gettrace())
sys.settrace(lambda a,b,c:None)
- try:
- x = 12
+ x = 12
- class C:
- def f(self):
- return x
+ class C:
+ def f(self):
+ return x
- self.assertEqual(x, 12) # Used to raise UnboundLocalError
- finally:
- sys.settrace(None)
+ self.assertEqual(x, 12) # Used to raise UnboundLocalError
def testBoundAndFree(self):
# var is bound and free in class
@@ -527,6 +526,7 @@ class ScopeTests(unittest.TestCase):
inst = f(3)()
self.assertEqual(inst.a, inst.m())
+ @cpython_only
def testInteractionWithTraceFunc(self):
import sys
@@ -543,6 +543,7 @@ class ScopeTests(unittest.TestCase):
class TestClass:
pass
+ self.addCleanup(sys.settrace, sys.gettrace())
sys.settrace(tracer)
adaptgetter("foo", TestClass, (1, ""))
sys.settrace(None)
diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py
index fe92f45..3144c54 100644
--- a/Lib/test/test_select.py
+++ b/Lib/test/test_select.py
@@ -1,8 +1,9 @@
-from test import support
-import unittest
-import select
+import errno
import os
+import select
import sys
+import unittest
+from test import support
@unittest.skipIf(sys.platform[:3] in ('win', 'os2', 'riscos'),
"can't easily test on this system")
@@ -20,6 +21,21 @@ class SelectTestCase(unittest.TestCase):
self.assertRaises(TypeError, select.select, [self.Nope()], [], [])
self.assertRaises(TypeError, select.select, [self.Almost()], [], [])
self.assertRaises(TypeError, select.select, [], [], [], "not a number")
+ self.assertRaises(ValueError, select.select, [], [], [], -1)
+
+ # Issue #12367: http://www.freebsd.org/cgi/query-pr.cgi?pr=kern/155606
+ @unittest.skipIf(sys.platform.startswith('freebsd'),
+ 'skip because of a FreeBSD bug: kern/155606')
+ def test_errno(self):
+ with open(__file__, 'rb') as fp:
+ fd = fp.fileno()
+ fp.close()
+ try:
+ select.select([fd], [], [], 0)
+ except select.error as err:
+ self.assertEqual(err.errno, errno.EBADF)
+ else:
+ self.fail("exception not raised")
def test_returned_list_identity(self):
# See issue #8329
diff --git a/Lib/test/test_shelve.py b/Lib/test/test_shelve.py
index 3e73f52..13c1265 100644
--- a/Lib/test/test_shelve.py
+++ b/Lib/test/test_shelve.py
@@ -2,7 +2,7 @@ import unittest
import shelve
import glob
from test import support
-from collections import MutableMapping
+from collections.abc import MutableMapping
from test.test_dbm import dbm_iterator
def L1(s):
@@ -129,8 +129,8 @@ class TestCase(unittest.TestCase):
shelve.Shelf(d)[key] = [1]
self.assertIn(key.encode('utf-8'), d)
# but a different one can be given
- shelve.Shelf(d, keyencoding='latin1')[key] = [1]
- self.assertIn(key.encode('latin1'), d)
+ shelve.Shelf(d, keyencoding='latin-1')[key] = [1]
+ self.assertIn(key.encode('latin-1'), d)
# with all consequences
s = shelve.Shelf(d, keyencoding='ascii')
self.assertRaises(UnicodeEncodeError, s.__setitem__, key, [1])
diff --git a/Lib/test/test_shlex.py b/Lib/test/test_shlex.py
index 25e4b6d..d4463f30 100644
--- a/Lib/test/test_shlex.py
+++ b/Lib/test/test_shlex.py
@@ -1,6 +1,7 @@
-import unittest
-import os, sys, io
+import io
import shlex
+import string
+import unittest
from test import support
@@ -173,6 +174,22 @@ class ShlexTest(unittest.TestCase):
"%s: %s != %s" %
(self.data[i][0], l, self.data[i][1:]))
+ def testQuote(self):
+ safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./'
+ unicode_sample = '\xe9\xe0\xdf' # e + acute accent, a + grave, sharp s
+ unsafe = '"`$\\!' + unicode_sample
+
+ self.assertEqual(shlex.quote(''), "''")
+ self.assertEqual(shlex.quote(safeunquoted), safeunquoted)
+ self.assertEqual(shlex.quote('test file name'), "'test file name'")
+ for u in unsafe:
+ self.assertEqual(shlex.quote('test%sname' % u),
+ "'test%sname'" % u)
+ for u in unsafe:
+ self.assertEqual(shlex.quote("test%s'name'" % u),
+ "'test%s'\"'\"'name'\"'\"''" % u)
+
+
# Allow this test to be used with old shlex.py
if not getattr(shlex, "split", None):
for methname in dir(ShlexTest):
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index e2310e2..6c9515e 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -7,7 +7,9 @@ import sys
import stat
import os
import os.path
+import errno
import functools
+import subprocess
from test import support
from test.support import TESTFN
from os.path import splitdrive
@@ -21,7 +23,7 @@ import tarfile
import warnings
from test import support
-from test.support import TESTFN, check_warnings, captured_stdout
+from test.support import TESTFN, check_warnings, captured_stdout, requires_zlib
try:
import bz2
@@ -39,11 +41,6 @@ except ImportError:
UID_GID_SUPPORT = False
try:
- import zlib
-except ImportError:
- zlib = None
-
-try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
@@ -51,7 +48,7 @@ except ImportError:
def _fake_rename(*args, **kwargs):
# Pretend the destination path is on a different filesystem.
- raise OSError()
+ raise OSError(getattr(errno, 'EXDEV', 18), "Invalid cross-device link")
def mock_rename(func):
@functools.wraps(func)
@@ -64,6 +61,31 @@ def mock_rename(func):
os.rename = builtin_rename
return wrap
+def write_file(path, content, binary=False):
+ """Write *content* to a file located at *path*.
+
+ If *path* is a tuple instead of a string, os.path.join will be used to
+ make a path. If *binary* is true, the file will be opened in binary
+ mode.
+ """
+ if isinstance(path, tuple):
+ path = os.path.join(*path)
+ with open(path, 'wb' if binary else 'w') as fp:
+ fp.write(content)
+
+def read_file(path, binary=False):
+ """Return contents from a file located at *path*.
+
+ If *path* is a tuple instead of a string, os.path.join will be used to
+ make a path. If *binary* is true, the file will be opened in binary
+ mode.
+ """
+ if isinstance(path, tuple):
+ path = os.path.join(*path)
+ with open(path, 'rb' if binary else 'r') as fp:
+ return fp.read()
+
+
class TestShutil(unittest.TestCase):
def setUp(self):
@@ -76,19 +98,6 @@ class TestShutil(unittest.TestCase):
d = self.tempdirs.pop()
shutil.rmtree(d, os.name in ('nt', 'cygwin'))
- def write_file(self, path, content='xxx'):
- """Writes a file in the given path.
-
-
- path can be a string or a sequence.
- """
- if isinstance(path, (list, tuple)):
- path = os.path.join(*path)
- f = open(path, 'w')
- try:
- f.write(content)
- finally:
- f.close()
def mkdtemp(self):
"""Create a temporary directory that will be cleaned up.
@@ -112,8 +121,7 @@ class TestShutil(unittest.TestCase):
self.errorState = 0
os.mkdir(TESTFN)
self.childpath = os.path.join(TESTFN, 'a')
- f = open(self.childpath, 'w')
- f.close()
+ support.create_empty_file(self.childpath)
old_dir_mode = os.stat(TESTFN).st_mode
old_child_mode = os.stat(self.childpath).st_mode
# Make unwritable.
@@ -158,84 +166,268 @@ class TestShutil(unittest.TestCase):
self.assertTrue(issubclass(exc[0], OSError))
self.errorState = 2
+ @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
+ @support.skip_unless_symlink
+ def test_copymode_follow_symlinks(self):
+ tmp_dir = self.mkdtemp()
+ src = os.path.join(tmp_dir, 'foo')
+ dst = os.path.join(tmp_dir, 'bar')
+ src_link = os.path.join(tmp_dir, 'baz')
+ dst_link = os.path.join(tmp_dir, 'quux')
+ write_file(src, 'foo')
+ write_file(dst, 'foo')
+ os.symlink(src, src_link)
+ os.symlink(dst, dst_link)
+ os.chmod(src, stat.S_IRWXU|stat.S_IRWXG)
+ # file to file
+ os.chmod(dst, stat.S_IRWXO)
+ self.assertNotEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+ shutil.copymode(src, dst)
+ self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+ # follow src link
+ os.chmod(dst, stat.S_IRWXO)
+ shutil.copymode(src_link, dst)
+ self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+ # follow dst link
+ os.chmod(dst, stat.S_IRWXO)
+ shutil.copymode(src, dst_link)
+ self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+ # follow both links
+ os.chmod(dst, stat.S_IRWXO)
+ shutil.copymode(src_link, dst)
+ self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+
+ @unittest.skipUnless(hasattr(os, 'lchmod'), 'requires os.lchmod')
+ @support.skip_unless_symlink
+ def test_copymode_symlink_to_symlink(self):
+ tmp_dir = self.mkdtemp()
+ src = os.path.join(tmp_dir, 'foo')
+ dst = os.path.join(tmp_dir, 'bar')
+ src_link = os.path.join(tmp_dir, 'baz')
+ dst_link = os.path.join(tmp_dir, 'quux')
+ write_file(src, 'foo')
+ write_file(dst, 'foo')
+ os.symlink(src, src_link)
+ os.symlink(dst, dst_link)
+ os.chmod(src, stat.S_IRWXU|stat.S_IRWXG)
+ os.chmod(dst, stat.S_IRWXU)
+ os.lchmod(src_link, stat.S_IRWXO|stat.S_IRWXG)
+ # link to link
+ os.lchmod(dst_link, stat.S_IRWXO)
+ shutil.copymode(src_link, dst_link, symlinks=True)
+ self.assertEqual(os.lstat(src_link).st_mode,
+ os.lstat(dst_link).st_mode)
+ self.assertNotEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+ # src link - use chmod
+ os.lchmod(dst_link, stat.S_IRWXO)
+ shutil.copymode(src_link, dst, symlinks=True)
+ self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+ # dst link - use chmod
+ os.lchmod(dst_link, stat.S_IRWXO)
+ shutil.copymode(src, dst_link, symlinks=True)
+ self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
+
+ @unittest.skipIf(hasattr(os, 'lchmod'), 'requires os.lchmod to be missing')
+ @support.skip_unless_symlink
+ def test_copymode_symlink_to_symlink_wo_lchmod(self):
+ tmp_dir = self.mkdtemp()
+ src = os.path.join(tmp_dir, 'foo')
+ dst = os.path.join(tmp_dir, 'bar')
+ src_link = os.path.join(tmp_dir, 'baz')
+ dst_link = os.path.join(tmp_dir, 'quux')
+ write_file(src, 'foo')
+ write_file(dst, 'foo')
+ os.symlink(src, src_link)
+ os.symlink(dst, dst_link)
+ shutil.copymode(src_link, dst_link, symlinks=True) # silent fail
+
+ @support.skip_unless_symlink
+ def test_copystat_symlinks(self):
+ tmp_dir = self.mkdtemp()
+ src = os.path.join(tmp_dir, 'foo')
+ dst = os.path.join(tmp_dir, 'bar')
+ src_link = os.path.join(tmp_dir, 'baz')
+ dst_link = os.path.join(tmp_dir, 'qux')
+ write_file(src, 'foo')
+ src_stat = os.stat(src)
+ os.utime(src, (src_stat.st_atime,
+ src_stat.st_mtime - 42.0)) # ensure different mtimes
+ write_file(dst, 'bar')
+ self.assertNotEqual(os.stat(src).st_mtime, os.stat(dst).st_mtime)
+ os.symlink(src, src_link)
+ os.symlink(dst, dst_link)
+ if hasattr(os, 'lchmod'):
+ os.lchmod(src_link, stat.S_IRWXO)
+ if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'):
+ os.lchflags(src_link, stat.UF_NODUMP)
+ src_link_stat = os.lstat(src_link)
+ # follow
+ if hasattr(os, 'lchmod'):
+ shutil.copystat(src_link, dst_link, symlinks=False)
+ self.assertNotEqual(src_link_stat.st_mode, os.stat(dst).st_mode)
+ # don't follow
+ shutil.copystat(src_link, dst_link, symlinks=True)
+ dst_link_stat = os.lstat(dst_link)
+ if hasattr(os, 'lutimes'):
+ for attr in 'st_atime', 'st_mtime':
+ # The modification times may be truncated in the new file.
+ self.assertLessEqual(getattr(src_link_stat, attr),
+ getattr(dst_link_stat, attr) + 1)
+ if hasattr(os, 'lchmod'):
+ self.assertEqual(src_link_stat.st_mode, dst_link_stat.st_mode)
+ if hasattr(os, 'lchflags') and hasattr(src_link_stat, 'st_flags'):
+ self.assertEqual(src_link_stat.st_flags, dst_link_stat.st_flags)
+ # tell to follow but dst is not a link
+ shutil.copystat(src_link, dst, symlinks=True)
+ self.assertTrue(abs(os.stat(src).st_mtime - os.stat(dst).st_mtime) <
+ 00000.1)
+
+ @support.skip_unless_symlink
+ def test_copy_symlinks(self):
+ tmp_dir = self.mkdtemp()
+ src = os.path.join(tmp_dir, 'foo')
+ dst = os.path.join(tmp_dir, 'bar')
+ src_link = os.path.join(tmp_dir, 'baz')
+ write_file(src, 'foo')
+ os.symlink(src, src_link)
+ if hasattr(os, 'lchmod'):
+ os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO)
+ # don't follow
+ shutil.copy(src_link, dst, symlinks=False)
+ self.assertFalse(os.path.islink(dst))
+ self.assertEqual(read_file(src), read_file(dst))
+ os.remove(dst)
+ # follow
+ shutil.copy(src_link, dst, symlinks=True)
+ self.assertTrue(os.path.islink(dst))
+ self.assertEqual(os.readlink(dst), os.readlink(src_link))
+ if hasattr(os, 'lchmod'):
+ self.assertEqual(os.lstat(src_link).st_mode,
+ os.lstat(dst).st_mode)
+
+ @support.skip_unless_symlink
+ def test_copy2_symlinks(self):
+ tmp_dir = self.mkdtemp()
+ src = os.path.join(tmp_dir, 'foo')
+ dst = os.path.join(tmp_dir, 'bar')
+ src_link = os.path.join(tmp_dir, 'baz')
+ write_file(src, 'foo')
+ os.symlink(src, src_link)
+ if hasattr(os, 'lchmod'):
+ os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO)
+ if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'):
+ os.lchflags(src_link, stat.UF_NODUMP)
+ src_stat = os.stat(src)
+ src_link_stat = os.lstat(src_link)
+ # follow
+ shutil.copy2(src_link, dst, symlinks=False)
+ self.assertFalse(os.path.islink(dst))
+ self.assertEqual(read_file(src), read_file(dst))
+ os.remove(dst)
+ # don't follow
+ shutil.copy2(src_link, dst, symlinks=True)
+ self.assertTrue(os.path.islink(dst))
+ self.assertEqual(os.readlink(dst), os.readlink(src_link))
+ dst_stat = os.lstat(dst)
+ if hasattr(os, 'lutimes'):
+ for attr in 'st_atime', 'st_mtime':
+ # The modification times may be truncated in the new file.
+ self.assertLessEqual(getattr(src_link_stat, attr),
+ getattr(dst_stat, attr) + 1)
+ if hasattr(os, 'lchmod'):
+ self.assertEqual(src_link_stat.st_mode, dst_stat.st_mode)
+ self.assertNotEqual(src_stat.st_mode, dst_stat.st_mode)
+ if hasattr(os, 'lchflags') and hasattr(src_link_stat, 'st_flags'):
+ self.assertEqual(src_link_stat.st_flags, dst_stat.st_flags)
+
+ @support.skip_unless_symlink
+ def test_copyfile_symlinks(self):
+ tmp_dir = self.mkdtemp()
+ src = os.path.join(tmp_dir, 'src')
+ dst = os.path.join(tmp_dir, 'dst')
+ dst_link = os.path.join(tmp_dir, 'dst_link')
+ link = os.path.join(tmp_dir, 'link')
+ write_file(src, 'foo')
+ os.symlink(src, link)
+ # don't follow
+ shutil.copyfile(link, dst_link, symlinks=True)
+ self.assertTrue(os.path.islink(dst_link))
+ self.assertEqual(os.readlink(link), os.readlink(dst_link))
+ # follow
+ shutil.copyfile(link, dst)
+ self.assertFalse(os.path.islink(dst))
+
def test_rmtree_dont_delete_file(self):
# When called on a file instead of a directory, don't delete it.
handle, path = tempfile.mkstemp()
- os.fdopen(handle).close()
+ os.close(handle)
self.assertRaises(OSError, shutil.rmtree, path)
os.remove(path)
- def _write_data(self, path, data):
- f = open(path, "w")
- f.write(data)
- f.close()
-
def test_copytree_simple(self):
-
- def read_data(path):
- f = open(path)
- data = f.read()
- f.close()
- return data
-
src_dir = tempfile.mkdtemp()
dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
- self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+ self.addCleanup(shutil.rmtree, src_dir)
+ self.addCleanup(shutil.rmtree, os.path.dirname(dst_dir))
+ write_file((src_dir, 'test.txt'), '123')
os.mkdir(os.path.join(src_dir, 'test_dir'))
- self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+ write_file((src_dir, 'test_dir', 'test.txt'), '456')
+
+ shutil.copytree(src_dir, dst_dir)
+ self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
+ self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
+ self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
+ 'test.txt')))
+ actual = read_file((dst_dir, 'test.txt'))
+ self.assertEqual(actual, '123')
+ actual = read_file((dst_dir, 'test_dir', 'test.txt'))
+ self.assertEqual(actual, '456')
- try:
- shutil.copytree(src_dir, dst_dir)
- self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
- self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
- self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
- 'test.txt')))
- actual = read_data(os.path.join(dst_dir, 'test.txt'))
- self.assertEqual(actual, '123')
- actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
- self.assertEqual(actual, '456')
- finally:
- for path in (
- os.path.join(src_dir, 'test.txt'),
- os.path.join(dst_dir, 'test.txt'),
- os.path.join(src_dir, 'test_dir', 'test.txt'),
- os.path.join(dst_dir, 'test_dir', 'test.txt'),
- ):
- if os.path.exists(path):
- os.remove(path)
- for path in (src_dir,
- os.path.dirname(dst_dir)
- ):
- if os.path.exists(path):
- shutil.rmtree(path)
+ @support.skip_unless_symlink
+ def test_copytree_symlinks(self):
+ tmp_dir = self.mkdtemp()
+ src_dir = os.path.join(tmp_dir, 'src')
+ dst_dir = os.path.join(tmp_dir, 'dst')
+ sub_dir = os.path.join(src_dir, 'sub')
+ os.mkdir(src_dir)
+ os.mkdir(sub_dir)
+ write_file((src_dir, 'file.txt'), 'foo')
+ src_link = os.path.join(sub_dir, 'link')
+ dst_link = os.path.join(dst_dir, 'sub/link')
+ os.symlink(os.path.join(src_dir, 'file.txt'),
+ src_link)
+ if hasattr(os, 'lchmod'):
+ os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO)
+ if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'):
+ os.lchflags(src_link, stat.UF_NODUMP)
+ src_stat = os.lstat(src_link)
+ shutil.copytree(src_dir, dst_dir, symlinks=True)
+ self.assertTrue(os.path.islink(os.path.join(dst_dir, 'sub', 'link')))
+ self.assertEqual(os.readlink(os.path.join(dst_dir, 'sub', 'link')),
+ os.path.join(src_dir, 'file.txt'))
+ dst_stat = os.lstat(dst_link)
+ if hasattr(os, 'lchmod'):
+ self.assertEqual(dst_stat.st_mode, src_stat.st_mode)
+ if hasattr(os, 'lchflags'):
+ self.assertEqual(dst_stat.st_flags, src_stat.st_flags)
def test_copytree_with_exclude(self):
-
- def read_data(path):
- f = open(path)
- data = f.read()
- f.close()
- return data
-
# creating data
join = os.path.join
exists = os.path.exists
src_dir = tempfile.mkdtemp()
try:
dst_dir = join(tempfile.mkdtemp(), 'destination')
- self._write_data(join(src_dir, 'test.txt'), '123')
- self._write_data(join(src_dir, 'test.tmp'), '123')
+ write_file((src_dir, 'test.txt'), '123')
+ write_file((src_dir, 'test.tmp'), '123')
os.mkdir(join(src_dir, 'test_dir'))
- self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
+ write_file((src_dir, 'test_dir', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2'))
- self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
+ write_file((src_dir, 'test_dir2', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
- self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
- '456')
- self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
- '456')
-
+ write_file((src_dir, 'test_dir2', 'subdir', 'test.txt'), '456')
+ write_file((src_dir, 'test_dir2', 'subdir2', 'test.py'), '456')
# testing glob-like patterns
try:
@@ -243,21 +435,19 @@ class TestShutil(unittest.TestCase):
shutil.copytree(src_dir, dst_dir, ignore=patterns)
# checking the result: some elements should not be copied
self.assertTrue(exists(join(dst_dir, 'test.txt')))
- self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
+ self.assertFalse(exists(join(dst_dir, 'test.tmp')))
+ self.assertFalse(exists(join(dst_dir, 'test_dir2')))
finally:
- if os.path.exists(dst_dir):
- shutil.rmtree(dst_dir)
+ shutil.rmtree(dst_dir)
try:
patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
shutil.copytree(src_dir, dst_dir, ignore=patterns)
# checking the result: some elements should not be copied
- self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+ self.assertFalse(exists(join(dst_dir, 'test.tmp')))
+ self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2')))
+ self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir')))
finally:
- if os.path.exists(dst_dir):
- shutil.rmtree(dst_dir)
+ shutil.rmtree(dst_dir)
# testing callable-style
try:
@@ -276,13 +466,12 @@ class TestShutil(unittest.TestCase):
shutil.copytree(src_dir, dst_dir, ignore=_filter)
# checking the result: some elements should not be copied
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2',
- 'test.py')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+ self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2',
+ 'test.py')))
+ self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir')))
finally:
- if os.path.exists(dst_dir):
- shutil.rmtree(dst_dir)
+ shutil.rmtree(dst_dir)
finally:
shutil.rmtree(src_dir)
shutil.rmtree(os.path.dirname(dst_dir))
@@ -377,9 +566,9 @@ class TestShutil(unittest.TestCase):
src_dir = self.mkdtemp()
dst_dir = os.path.join(self.mkdtemp(), 'destination')
- self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+ write_file((src_dir, 'test.txt'), '123')
os.mkdir(os.path.join(src_dir, 'test_dir'))
- self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+ write_file((src_dir, 'test_dir', 'test.txt'), '456')
copied = []
def _copy(src, dst):
@@ -396,7 +585,7 @@ class TestShutil(unittest.TestCase):
dst_dir = os.path.join(self.mkdtemp(), 'destination')
os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
os.mkdir(os.path.join(src_dir, 'test_dir'))
- self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+ write_file((src_dir, 'test_dir', 'test.txt'), '456')
self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)
# a dangling symlink is ignored with the proper flag
@@ -412,7 +601,7 @@ class TestShutil(unittest.TestCase):
def _copy_file(self, method):
fname = 'test.txt'
tmpdir = self.mkdtemp()
- self.write_file([tmpdir, fname])
+ write_file((tmpdir, fname), 'xxx')
file1 = os.path.join(tmpdir, fname)
tmpdir2 = self.mkdtemp()
method(file1, tmpdir2)
@@ -444,14 +633,14 @@ class TestShutil(unittest.TestCase):
self.assertEqual(getattr(file1_stat, 'st_flags'),
getattr(file2_stat, 'st_flags'))
- @unittest.skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_make_tarball(self):
# creating something to tar
tmpdir = self.mkdtemp()
- self.write_file([tmpdir, 'file1'], 'xxx')
- self.write_file([tmpdir, 'file2'], 'xxx')
+ write_file((tmpdir, 'file1'), 'xxx')
+ write_file((tmpdir, 'file2'), 'xxx')
os.mkdir(os.path.join(tmpdir, 'sub'))
- self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
+ write_file((tmpdir, 'sub', 'file3'), 'xxx')
tmpdir2 = self.mkdtemp()
# force shutil to create the directory
@@ -498,16 +687,16 @@ class TestShutil(unittest.TestCase):
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
- self.write_file([dist, 'file1'], 'xxx')
- self.write_file([dist, 'file2'], 'xxx')
+ write_file((dist, 'file1'), 'xxx')
+ write_file((dist, 'file2'), 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
- self.write_file([dist, 'sub', 'file3'], 'xxx')
+ write_file((dist, 'sub', 'file3'), 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
return tmpdir, tmpdir2, base_name
- @unittest.skipUnless(zlib, "Requires zlib")
+ @requires_zlib
@unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
'Need the tar command to run')
def test_tarfile_vs_tar(self):
@@ -562,13 +751,13 @@ class TestShutil(unittest.TestCase):
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
- @unittest.skipUnless(zlib, "Requires zlib")
+ @requires_zlib
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
def test_make_zipfile(self):
# creating something to tar
tmpdir = self.mkdtemp()
- self.write_file([tmpdir, 'file1'], 'xxx')
- self.write_file([tmpdir, 'file2'], 'xxx')
+ write_file((tmpdir, 'file1'), 'xxx')
+ write_file((tmpdir, 'file2'), 'xxx')
tmpdir2 = self.mkdtemp()
# force shutil to create the directory
@@ -586,7 +775,7 @@ class TestShutil(unittest.TestCase):
base_name = os.path.join(tmpdir, 'archive')
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
- @unittest.skipUnless(zlib, "Requires zlib")
+ @requires_zlib
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
@@ -614,7 +803,7 @@ class TestShutil(unittest.TestCase):
self.assertTrue(os.path.exists(res))
- @unittest.skipUnless(zlib, "Requires zlib")
+ @requires_zlib
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
def test_tarfile_root_owner(self):
tmpdir, tmpdir2, base_name = self._create_files()
@@ -683,7 +872,7 @@ class TestShutil(unittest.TestCase):
diff.append(file_)
return diff
- @unittest.skipUnless(zlib, "Requires zlib")
+ @requires_zlib
def test_unpack_archive(self):
formats = ['tar', 'gztar', 'zip']
if BZ2_SUPPORTED:
@@ -734,6 +923,75 @@ class TestShutil(unittest.TestCase):
unregister_unpack_format('Boo2')
self.assertEqual(get_unpack_formats(), formats)
+ @unittest.skipUnless(hasattr(shutil, 'disk_usage'),
+ "disk_usage not available on this platform")
+ def test_disk_usage(self):
+ usage = shutil.disk_usage(os.getcwd())
+ self.assertGreater(usage.total, 0)
+ self.assertGreater(usage.used, 0)
+ self.assertGreaterEqual(usage.free, 0)
+ self.assertGreaterEqual(usage.total, usage.used)
+ self.assertGreater(usage.total, usage.free)
+
+ @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
+ @unittest.skipUnless(hasattr(os, 'chown'), 'requires os.chown')
+ def test_chown(self):
+
+ # cleaned-up automatically by TestShutil.tearDown method
+ dirname = self.mkdtemp()
+ filename = tempfile.mktemp(dir=dirname)
+ write_file(filename, 'testing chown function')
+
+ with self.assertRaises(ValueError):
+ shutil.chown(filename)
+
+ with self.assertRaises(LookupError):
+ shutil.chown(filename, user='non-exising username')
+
+ with self.assertRaises(LookupError):
+ shutil.chown(filename, group='non-exising groupname')
+
+ with self.assertRaises(TypeError):
+ shutil.chown(filename, b'spam')
+
+ with self.assertRaises(TypeError):
+ shutil.chown(filename, 3.14)
+
+ uid = os.getuid()
+ gid = os.getgid()
+
+ def check_chown(path, uid=None, gid=None):
+ s = os.stat(filename)
+ if uid is not None:
+ self.assertEqual(uid, s.st_uid)
+ if gid is not None:
+ self.assertEqual(gid, s.st_gid)
+
+ shutil.chown(filename, uid, gid)
+ check_chown(filename, uid, gid)
+ shutil.chown(filename, uid)
+ check_chown(filename, uid)
+ shutil.chown(filename, user=uid)
+ check_chown(filename, uid)
+ shutil.chown(filename, group=gid)
+ check_chown(filename, gid=gid)
+
+ shutil.chown(dirname, uid, gid)
+ check_chown(dirname, uid, gid)
+ shutil.chown(dirname, uid)
+ check_chown(dirname, uid)
+ shutil.chown(dirname, user=uid)
+ check_chown(dirname, uid)
+ shutil.chown(dirname, group=gid)
+ check_chown(dirname, gid=gid)
+
+ user = pwd.getpwuid(uid)[0]
+ group = grp.getgrgid(gid)[0]
+ shutil.chown(filename, user, group)
+ check_chown(filename, uid, gid)
+ shutil.chown(dirname, user, group)
+ check_chown(dirname, uid, gid)
+
class TestMove(unittest.TestCase):
@@ -847,6 +1105,49 @@ class TestMove(unittest.TestCase):
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
+ @support.skip_unless_symlink
+ @mock_rename
+ def test_move_file_symlink(self):
+ dst = os.path.join(self.src_dir, 'bar')
+ os.symlink(self.src_file, dst)
+ shutil.move(dst, self.dst_file)
+ self.assertTrue(os.path.islink(self.dst_file))
+ self.assertTrue(os.path.samefile(self.src_file, self.dst_file))
+
+ @support.skip_unless_symlink
+ @mock_rename
+ def test_move_file_symlink_to_dir(self):
+ filename = "bar"
+ dst = os.path.join(self.src_dir, filename)
+ os.symlink(self.src_file, dst)
+ shutil.move(dst, self.dst_dir)
+ final_link = os.path.join(self.dst_dir, filename)
+ self.assertTrue(os.path.islink(final_link))
+ self.assertTrue(os.path.samefile(self.src_file, final_link))
+
+ @support.skip_unless_symlink
+ @mock_rename
+ def test_move_dangling_symlink(self):
+ src = os.path.join(self.src_dir, 'baz')
+ dst = os.path.join(self.src_dir, 'bar')
+ os.symlink(src, dst)
+ dst_link = os.path.join(self.dst_dir, 'quux')
+ shutil.move(dst, dst_link)
+ self.assertTrue(os.path.islink(dst_link))
+ self.assertEqual(os.path.realpath(src), os.path.realpath(dst_link))
+
+ @support.skip_unless_symlink
+ @mock_rename
+ def test_move_dir_symlink(self):
+ src = os.path.join(self.src_dir, 'baz')
+ dst = os.path.join(self.src_dir, 'bar')
+ os.mkdir(src)
+ os.symlink(src, dst)
+ dst_link = os.path.join(self.dst_dir, 'quux')
+ shutil.move(dst, dst_link)
+ self.assertTrue(os.path.islink(dst_link))
+ self.assertTrue(os.path.samefile(src, dst_link))
+
class TestCopyFile(unittest.TestCase):
@@ -965,13 +1266,57 @@ class TestCopyFile(unittest.TestCase):
shutil.move(self.src_dir, dst_dir)
self.assertTrue(os.path.isdir(dst_dir))
finally:
- if os.path.exists(dst_dir):
- os.rmdir(dst_dir)
+ os.rmdir(dst_dir)
+
+class TermsizeTests(unittest.TestCase):
+ def test_does_not_crash(self):
+ """Check if get_terminal_size() returns a meaningful value.
+
+ There's no easy portable way to actually check the size of the
+ terminal, so let's check if it returns something sensible instead.
+ """
+ size = shutil.get_terminal_size()
+ self.assertGreaterEqual(size.columns, 0)
+ self.assertGreaterEqual(size.lines, 0)
+
+ def test_os_environ_first(self):
+ "Check if environment variables have precedence"
+
+ with support.EnvironmentVarGuard() as env:
+ env['COLUMNS'] = '777'
+ size = shutil.get_terminal_size()
+ self.assertEqual(size.columns, 777)
+
+ with support.EnvironmentVarGuard() as env:
+ env['LINES'] = '888'
+ size = shutil.get_terminal_size()
+ self.assertEqual(size.lines, 888)
+
+ @unittest.skipUnless(os.isatty(sys.__stdout__.fileno()), "not on tty")
+ def test_stty_match(self):
+ """Check if stty returns the same results ignoring env
+
+ This test will fail if stdin and stdout are connected to
+ different terminals with different sizes. Nevertheless, such
+ situations should be pretty rare.
+ """
+ try:
+ size = subprocess.check_output(['stty', 'size']).decode().split()
+ except (FileNotFoundError, subprocess.CalledProcessError):
+ self.skipTest("stty invocation failed")
+ expected = (int(size[1]), int(size[0])) # reversed order
+
+ with support.EnvironmentVarGuard() as env:
+ del env['LINES']
+ del env['COLUMNS']
+ actual = shutil.get_terminal_size()
+ self.assertEqual(expected, actual)
def test_main():
- support.run_unittest(TestShutil, TestMove, TestCopyFile)
+ support.run_unittest(TestShutil, TestMove, TestCopyFile,
+ TermsizeTests)
if __name__ == '__main__':
test_main()
diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py
index 8df1bf0..6be259b 100644
--- a/Lib/test/test_signal.py
+++ b/Lib/test/test_signal.py
@@ -1,17 +1,19 @@
-import errno
+import unittest
+from test import support
+from contextlib import closing
import gc
-import os
import pickle
import select
import signal
+import struct
import subprocess
-import sys
-import time
import traceback
-import unittest
-from test import support
-from contextlib import closing
+import sys, os, time, errno
from test.script_helper import assert_python_ok, spawn_python
+try:
+ import threading
+except ImportError:
+ threading = None
if sys.platform in ('os2', 'riscos'):
raise unittest.SkipTest("Can't test signal on %s" % sys.platform)
@@ -57,15 +59,9 @@ class InterProcessSignalTests(unittest.TestCase):
def handlerA(self, signum, frame):
self.a_called = True
- if support.verbose:
- print("handlerA invoked from signal %s at:\n%s" % (
- signum, self.format_frame(frame, limit=1)))
def handlerB(self, signum, frame):
self.b_called = True
- if support.verbose:
- print ("handlerB invoked from signal %s at:\n%s" % (
- signum, self.format_frame(frame, limit=1)))
raise HandlerBCalled(signum, self.format_frame(frame))
def wait(self, child):
@@ -92,8 +88,6 @@ class InterProcessSignalTests(unittest.TestCase):
# Let the sub-processes know who to send signals to.
pid = os.getpid()
- if support.verbose:
- print("test runner's pid is", pid)
child = ignoring_eintr(subprocess.Popen, ['kill', '-HUP', str(pid)])
if child:
@@ -117,8 +111,6 @@ class InterProcessSignalTests(unittest.TestCase):
except HandlerBCalled:
self.assertTrue(self.b_called)
self.assertFalse(self.a_called)
- if support.verbose:
- print("HandlerBCalled exception caught")
child = ignoring_eintr(subprocess.Popen, ['kill', '-USR2', str(pid)])
if child:
@@ -134,8 +126,7 @@ class InterProcessSignalTests(unittest.TestCase):
# may return early.
time.sleep(1)
except KeyboardInterrupt:
- if support.verbose:
- print("KeyboardInterrupt (the alarm() went off)")
+ pass
except:
self.fail("Some other exception woke us from pause: %s" %
traceback.format_exc())
@@ -191,7 +182,7 @@ class InterProcessSignalTests(unittest.TestCase):
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
-class BasicSignalTests(unittest.TestCase):
+class PosixTests(unittest.TestCase):
def trivial_signal_handler(self, *args):
pass
@@ -233,31 +224,44 @@ class WindowsSignalTests(unittest.TestCase):
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
class WakeupSignalTests(unittest.TestCase):
- def check_wakeup(self, test_body):
- # use a subprocess to have only one thread and to not change signal
- # handling of the parent process
+ def check_wakeup(self, test_body, *signals, ordered=True):
+ # use a subprocess to have only one thread
code = """if 1:
import fcntl
import os
import signal
+ import struct
+
+ signals = {!r}
def handler(signum, frame):
pass
+ def check_signum(signals):
+ data = os.read(read, len(signals)+1)
+ raised = struct.unpack('%uB' % len(data), data)
+ if not {!r}:
+ raised = set(raised)
+ signals = set(signals)
+ if raised != signals:
+ raise Exception("%r != %r" % (raised, signals))
+
{}
signal.signal(signal.SIGALRM, handler)
read, write = os.pipe()
- flags = fcntl.fcntl(write, fcntl.F_GETFL, 0)
- flags = flags | os.O_NONBLOCK
- fcntl.fcntl(write, fcntl.F_SETFL, flags)
+ for fd in (read, write):
+ flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0)
+ flags = flags | os.O_NONBLOCK
+ fcntl.fcntl(fd, fcntl.F_SETFL, flags)
signal.set_wakeup_fd(write)
test()
+ check_signum(signals)
os.close(read)
os.close(write)
- """.format(test_body)
+ """.format(signals, ordered, test_body)
assert_python_ok('-c', code)
@@ -283,7 +287,7 @@ class WakeupSignalTests(unittest.TestCase):
dt = after_time - mid_time
if dt >= TIMEOUT_HALF:
raise Exception("%s >= %s" % (dt, TIMEOUT_HALF))
- """)
+ """, signal.SIGALRM)
def test_wakeup_fd_during(self):
self.check_wakeup("""def test():
@@ -306,7 +310,32 @@ class WakeupSignalTests(unittest.TestCase):
dt = after_time - before_time
if dt >= TIMEOUT_HALF:
raise Exception("%s >= %s" % (dt, TIMEOUT_HALF))
- """)
+ """, signal.SIGALRM)
+
+ def test_signum(self):
+ self.check_wakeup("""def test():
+ signal.signal(signal.SIGUSR1, handler)
+ os.kill(os.getpid(), signal.SIGUSR1)
+ os.kill(os.getpid(), signal.SIGALRM)
+ """, signal.SIGUSR1, signal.SIGALRM)
+
+ @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+ 'need signal.pthread_sigmask()')
+ def test_pending(self):
+ self.check_wakeup("""def test():
+ signum1 = signal.SIGUSR1
+ signum2 = signal.SIGUSR2
+
+ signal.signal(signum1, handler)
+ signal.signal(signum2, handler)
+
+ signal.pthread_sigmask(signal.SIG_BLOCK, (signum1, signum2))
+ os.kill(os.getpid(), signum1)
+ os.kill(os.getpid(), signum2)
+ # Unblocking the 2 signals calls the C signal handler twice
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, (signum1, signum2))
+ """, signal.SIGUSR1, signal.SIGUSR2, ordered=False)
+
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
class SiginterruptTest(unittest.TestCase):
@@ -316,9 +345,6 @@ class SiginterruptTest(unittest.TestCase):
read is interrupted by the signal and raises an exception. Return False
if it returns normally.
"""
- class Timeout(Exception):
- pass
-
# use a subprocess to have only one thread, to have a timeout on the
# blocking read and to not touch signal handling in this process
code = """if 1:
@@ -359,18 +385,8 @@ class SiginterruptTest(unittest.TestCase):
# wait until the child process is loaded and has started
first_line = process.stdout.readline()
- # Wait the process with a timeout of 5 seconds
- timeout = time.time() + 5.0
- while True:
- if timeout < time.time():
- raise Timeout()
- status = process.poll()
- if status is not None:
- break
- time.sleep(0.1)
-
- stdout, stderr = process.communicate()
- except Timeout:
+ stdout, stderr = process.communicate(timeout=5.0)
+ except subprocess.TimeoutExpired:
process.kill()
return False
else:
@@ -419,8 +435,6 @@ class ItimerTest(unittest.TestCase):
def sig_alrm(self, *args):
self.hndl_called = True
- if support.verbose:
- print("SIGALRM handler invoked", args)
def sig_vtalrm(self, *args):
self.hndl_called = True
@@ -432,21 +446,13 @@ class ItimerTest(unittest.TestCase):
elif self.hndl_count == 3:
# disable ITIMER_VIRTUAL, this function shouldn't be called anymore
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
- if support.verbose:
- print("last SIGVTALRM handler call")
self.hndl_count += 1
- if support.verbose:
- print("SIGVTALRM handler invoked", args)
-
def sig_prof(self, *args):
self.hndl_called = True
signal.setitimer(signal.ITIMER_PROF, 0)
- if support.verbose:
- print("SIGPROF handler invoked", args)
-
def test_itimer_exc(self):
# XXX I'm assuming -1 is an invalid itimer, but maybe some platform
# defines it ?
@@ -459,10 +465,7 @@ class ItimerTest(unittest.TestCase):
def test_itimer_real(self):
self.itimer = signal.ITIMER_REAL
signal.setitimer(self.itimer, 1.0)
- if support.verbose:
- print("\ncall pause()...")
signal.pause()
-
self.assertEqual(self.hndl_called, True)
# Issue 3864, unknown if this affects earlier versions of freebsd also
@@ -511,11 +514,359 @@ class ItimerTest(unittest.TestCase):
# and the handler should have been called
self.assertEqual(self.hndl_called, True)
+
+class PendingSignalsTests(unittest.TestCase):
+ """
+ Test pthread_sigmask(), pthread_kill(), sigpending() and sigwait()
+ functions.
+ """
+ @unittest.skipUnless(hasattr(signal, 'sigpending'),
+ 'need signal.sigpending()')
+ def test_sigpending_empty(self):
+ self.assertEqual(signal.sigpending(), set())
+
+ @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+ 'need signal.pthread_sigmask()')
+ @unittest.skipUnless(hasattr(signal, 'sigpending'),
+ 'need signal.sigpending()')
+ def test_sigpending(self):
+ code = """if 1:
+ import os
+ import signal
+
+ def handler(signum, frame):
+ 1/0
+
+ signum = signal.SIGUSR1
+ signal.signal(signum, handler)
+
+ signal.pthread_sigmask(signal.SIG_BLOCK, [signum])
+ os.kill(os.getpid(), signum)
+ pending = signal.sigpending()
+ if pending != {signum}:
+ raise Exception('%s != {%s}' % (pending, signum))
+ try:
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
+ except ZeroDivisionError:
+ pass
+ else:
+ raise Exception("ZeroDivisionError not raised")
+ """
+ assert_python_ok('-c', code)
+
+ @unittest.skipUnless(hasattr(signal, 'pthread_kill'),
+ 'need signal.pthread_kill()')
+ def test_pthread_kill(self):
+ code = """if 1:
+ import signal
+ import threading
+ import sys
+
+ signum = signal.SIGUSR1
+
+ def handler(signum, frame):
+ 1/0
+
+ signal.signal(signum, handler)
+
+ if sys.platform == 'freebsd6':
+ # Issue #12392 and #12469: send a signal to the main thread
+ # doesn't work before the creation of the first thread on
+ # FreeBSD 6
+ def noop():
+ pass
+ thread = threading.Thread(target=noop)
+ thread.start()
+ thread.join()
+
+ tid = threading.get_ident()
+ try:
+ signal.pthread_kill(tid, signum)
+ except ZeroDivisionError:
+ pass
+ else:
+ raise Exception("ZeroDivisionError not raised")
+ """
+ assert_python_ok('-c', code)
+
+ @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+ 'need signal.pthread_sigmask()')
+ def wait_helper(self, blocked, test):
+ """
+ test: body of the "def test(signum):" function.
+ blocked: number of the blocked signal
+ """
+ code = '''if 1:
+ import signal
+ import sys
+
+ def handler(signum, frame):
+ 1/0
+
+ %s
+
+ blocked = %s
+ signum = signal.SIGALRM
+
+ # child: block and wait the signal
+ try:
+ signal.signal(signum, handler)
+ signal.pthread_sigmask(signal.SIG_BLOCK, [blocked])
+
+ # Do the tests
+ test(signum)
+
+ # The handler must not be called on unblock
+ try:
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, [blocked])
+ except ZeroDivisionError:
+ print("the signal handler has been called",
+ file=sys.stderr)
+ sys.exit(1)
+ except BaseException as err:
+ print("error: {}".format(err), file=sys.stderr)
+ sys.stderr.flush()
+ sys.exit(1)
+ ''' % (test.strip(), blocked)
+
+ # sig*wait* must be called with the signal blocked: since the current
+ # process might have several threads running, use a subprocess to have
+ # a single thread.
+ assert_python_ok('-c', code)
+
+ @unittest.skipUnless(hasattr(signal, 'sigwait'),
+ 'need signal.sigwait()')
+ def test_sigwait(self):
+ self.wait_helper(signal.SIGALRM, '''
+ def test(signum):
+ signal.alarm(1)
+ received = signal.sigwait([signum])
+ if received != signum:
+ raise Exception('received %s, not %s' % (received, signum))
+ ''')
+
+ @unittest.skipUnless(hasattr(signal, 'sigwaitinfo'),
+ 'need signal.sigwaitinfo()')
+ def test_sigwaitinfo(self):
+ self.wait_helper(signal.SIGALRM, '''
+ def test(signum):
+ signal.alarm(1)
+ info = signal.sigwaitinfo([signum])
+ if info.si_signo != signum:
+ raise Exception("info.si_signo != %s" % signum)
+ ''')
+
+ @unittest.skipUnless(hasattr(signal, 'sigtimedwait'),
+ 'need signal.sigtimedwait()')
+ def test_sigtimedwait(self):
+ self.wait_helper(signal.SIGALRM, '''
+ def test(signum):
+ signal.alarm(1)
+ info = signal.sigtimedwait([signum], 10.1000)
+ if info.si_signo != signum:
+ raise Exception('info.si_signo != %s' % signum)
+ ''')
+
+ @unittest.skipUnless(hasattr(signal, 'sigtimedwait'),
+ 'need signal.sigtimedwait()')
+ def test_sigtimedwait_poll(self):
+ # check that polling with sigtimedwait works
+ self.wait_helper(signal.SIGALRM, '''
+ def test(signum):
+ import os
+ os.kill(os.getpid(), signum)
+ info = signal.sigtimedwait([signum], 0)
+ if info.si_signo != signum:
+ raise Exception('info.si_signo != %s' % signum)
+ ''')
+
+ @unittest.skipUnless(hasattr(signal, 'sigtimedwait'),
+ 'need signal.sigtimedwait()')
+ def test_sigtimedwait_timeout(self):
+ self.wait_helper(signal.SIGALRM, '''
+ def test(signum):
+ received = signal.sigtimedwait([signum], 1.0)
+ if received is not None:
+ raise Exception("received=%r" % (received,))
+ ''')
+
+ @unittest.skipUnless(hasattr(signal, 'sigtimedwait'),
+ 'need signal.sigtimedwait()')
+ def test_sigtimedwait_negative_timeout(self):
+ signum = signal.SIGALRM
+ self.assertRaises(ValueError, signal.sigtimedwait, [signum], -1.0)
+
+ @unittest.skipUnless(hasattr(signal, 'sigwaitinfo'),
+ 'need signal.sigwaitinfo()')
+ def test_sigwaitinfo_interrupted(self):
+ self.wait_helper(signal.SIGUSR1, '''
+ def test(signum):
+ import errno
+
+ hndl_called = True
+ def alarm_handler(signum, frame):
+ hndl_called = False
+
+ signal.signal(signal.SIGALRM, alarm_handler)
+ signal.alarm(1)
+ try:
+ signal.sigwaitinfo([signal.SIGUSR1])
+ except OSError as e:
+ if e.errno == errno.EINTR:
+ if not hndl_called:
+ raise Exception("SIGALRM handler not called")
+ else:
+ raise Exception("Expected EINTR to be raised by sigwaitinfo")
+ else:
+ raise Exception("Expected EINTR to be raised by sigwaitinfo")
+ ''')
+
+ @unittest.skipUnless(hasattr(signal, 'sigwait'),
+ 'need signal.sigwait()')
+ @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+ 'need signal.pthread_sigmask()')
+ @unittest.skipIf(threading is None, "test needs threading module")
+ def test_sigwait_thread(self):
+ # Check that calling sigwait() from a thread doesn't suspend the whole
+ # process. A new interpreter is spawned to avoid problems when mixing
+ # threads and fork(): only async-safe functions are allowed between
+ # fork() and exec().
+ assert_python_ok("-c", """if True:
+ import os, threading, sys, time, signal
+
+ # the default handler terminates the process
+ signum = signal.SIGUSR1
+
+ def kill_later():
+ # wait until the main thread is waiting in sigwait()
+ time.sleep(1)
+ os.kill(os.getpid(), signum)
+
+ # the signal must be blocked by all the threads
+ signal.pthread_sigmask(signal.SIG_BLOCK, [signum])
+ killer = threading.Thread(target=kill_later)
+ killer.start()
+ received = signal.sigwait([signum])
+ if received != signum:
+ print("sigwait() received %s, not %s" % (received, signum),
+ file=sys.stderr)
+ sys.exit(1)
+ killer.join()
+ # unblock the signal, which should have been cleared by sigwait()
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
+ """)
+
+ @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+ 'need signal.pthread_sigmask()')
+ def test_pthread_sigmask_arguments(self):
+ self.assertRaises(TypeError, signal.pthread_sigmask)
+ self.assertRaises(TypeError, signal.pthread_sigmask, 1)
+ self.assertRaises(TypeError, signal.pthread_sigmask, 1, 2, 3)
+ self.assertRaises(OSError, signal.pthread_sigmask, 1700, [])
+
+ @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+ 'need signal.pthread_sigmask()')
+ def test_pthread_sigmask(self):
+ code = """if 1:
+ import signal
+ import os; import threading
+
+ def handler(signum, frame):
+ 1/0
+
+ def kill(signum):
+ os.kill(os.getpid(), signum)
+
+ def read_sigmask():
+ return signal.pthread_sigmask(signal.SIG_BLOCK, [])
+
+ signum = signal.SIGUSR1
+
+ # Install our signal handler
+ old_handler = signal.signal(signum, handler)
+
+ # Unblock SIGUSR1 (and copy the old mask) to test our signal handler
+ old_mask = signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
+ try:
+ kill(signum)
+ except ZeroDivisionError:
+ pass
+ else:
+ raise Exception("ZeroDivisionError not raised")
+
+ # Block and then raise SIGUSR1. The signal is blocked: the signal
+ # handler is not called, and the signal is now pending
+ signal.pthread_sigmask(signal.SIG_BLOCK, [signum])
+ kill(signum)
+
+ # Check the new mask
+ blocked = read_sigmask()
+ if signum not in blocked:
+ raise Exception("%s not in %s" % (signum, blocked))
+ if old_mask ^ blocked != {signum}:
+ raise Exception("%s ^ %s != {%s}" % (old_mask, blocked, signum))
+
+ # Unblock SIGUSR1
+ try:
+ # unblock the pending signal calls immediatly the signal handler
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
+ except ZeroDivisionError:
+ pass
+ else:
+ raise Exception("ZeroDivisionError not raised")
+ try:
+ kill(signum)
+ except ZeroDivisionError:
+ pass
+ else:
+ raise Exception("ZeroDivisionError not raised")
+
+ # Check the new mask
+ unblocked = read_sigmask()
+ if signum in unblocked:
+ raise Exception("%s in %s" % (signum, unblocked))
+ if blocked ^ unblocked != {signum}:
+ raise Exception("%s ^ %s != {%s}" % (blocked, unblocked, signum))
+ if old_mask != unblocked:
+ raise Exception("%s != %s" % (old_mask, unblocked))
+ """
+ assert_python_ok('-c', code)
+
+ @unittest.skipIf(sys.platform == 'freebsd6',
+ "issue #12392: send a signal to the main thread doesn't work "
+ "before the creation of the first thread on FreeBSD 6")
+ @unittest.skipUnless(hasattr(signal, 'pthread_kill'),
+ 'need signal.pthread_kill()')
+ def test_pthread_kill_main_thread(self):
+ # Test that a signal can be sent to the main thread with pthread_kill()
+ # before any other thread has been created (see issue #12392).
+ code = """if True:
+ import threading
+ import signal
+ import sys
+
+ def handler(signum, frame):
+ sys.exit(3)
+
+ signal.signal(signal.SIGUSR1, handler)
+ signal.pthread_kill(threading.get_ident(), signal.SIGUSR1)
+ sys.exit(2)
+ """
+
+ with spawn_python('-c', code) as process:
+ stdout, stderr = process.communicate()
+ exitcode = process.wait()
+ if exitcode != 3:
+ raise Exception("Child error (exit code %s): %s" %
+ (exitcode, stdout))
+
+
def test_main():
try:
- support.run_unittest(BasicSignalTests, InterProcessSignalTests,
+ support.run_unittest(PosixTests, InterProcessSignalTests,
WakeupSignalTests, SiginterruptTest,
- ItimerTest, WindowsSignalTests)
+ ItimerTest, WindowsSignalTests,
+ PendingSignalsTests)
finally:
support.reap_children()
diff --git a/Lib/test/test_smtpd.py b/Lib/test/test_smtpd.py
index 68ccc29..dd23565 100644
--- a/Lib/test/test_smtpd.py
+++ b/Lib/test/test_smtpd.py
@@ -239,49 +239,49 @@ class SMTPDChannelTest(TestCase):
self.assertEqual(self.channel.socket.last, b'501 Syntax: RSET\r\n')
def test_attribute_deprecations(self):
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__server
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__server = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__line
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__line = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__state
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__state = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__greeting
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__greeting = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__mailfrom
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__mailfrom = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__rcpttos
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__rcpttos = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__data
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__data = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__fqdn
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__fqdn = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__peer
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__peer = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__conn
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__conn = 'spam'
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__addr
- with support.check_warnings(('', PendingDeprecationWarning)):
+ with support.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__addr = 'spam'
def test_main():
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
index 2cb0d1a..05d97ef 100644
--- a/Lib/test/test_smtplib.py
+++ b/Lib/test/test_smtplib.py
@@ -72,6 +72,14 @@ class GeneralTests(unittest.TestCase):
smtp = smtplib.SMTP(HOST, self.port)
smtp.close()
+ def testSourceAddress(self):
+ mock_socket.reply_with(b"220 Hola mundo")
+ # connects
+ smtp = smtplib.SMTP(HOST, self.port,
+ source_address=('127.0.0.1',19876))
+ self.assertEqual(smtp.source_address, ('127.0.0.1', 19876))
+ smtp.close()
+
def testBasic2(self):
mock_socket.reply_with(b"220 Hola mundo")
# connects, include port in host name
@@ -204,6 +212,20 @@ class DebuggingServerTests(unittest.TestCase):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.quit()
+ def testSourceAddress(self):
+ # connect
+ port = support.find_unused_port()
+ try:
+ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost',
+ timeout=3, source_address=('127.0.0.1', port))
+ self.assertEqual(smtp.source_address, ('127.0.0.1', port))
+ self.assertEqual(smtp.local_hostname, 'localhost')
+ smtp.quit()
+ except IOError as e:
+ if e.errno == errno.EADDRINUSE:
+ self.skipTest("couldn't bind to port %d" % port)
+ raise
+
def testNOOP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, b'Ok')
@@ -560,6 +582,9 @@ sim_lists = {'list-1':['Mr.A@somewhere.com','Mrs.C@somewhereesle.com'],
# Simulated SMTP channel & server
class SimSMTPChannel(smtpd.SMTPChannel):
+ # For testing failures in QUIT when using the context manager API.
+ quit_response = None
+
def __init__(self, extra_features, *args, **kw):
self._extrafeatures = ''.join(
[ "250-{0}\r\n".format(x) for x in extra_features ])
@@ -610,19 +635,31 @@ class SimSMTPChannel(smtpd.SMTPChannel):
else:
self.push('550 No access for you!')
+ def smtp_QUIT(self, arg):
+ # args is ignored
+ if self.quit_response is None:
+ super(SimSMTPChannel, self).smtp_QUIT(arg)
+ else:
+ self.push(self.quit_response)
+ self.close_when_done()
+
def handle_error(self):
raise
class SimSMTPServer(smtpd.SMTPServer):
+ # For testing failures in QUIT when using the context manager API.
+ quit_response = None
+
def __init__(self, *args, **kw):
self._extra_features = []
smtpd.SMTPServer.__init__(self, *args, **kw)
def handle_accepted(self, conn, addr):
- self._SMTPchannel = SimSMTPChannel(self._extra_features,
- self, conn, addr)
+ self._SMTPchannel = SimSMTPChannel(
+ self._extra_features, self, conn, addr)
+ self._SMTPchannel.quit_response = self.quit_response
def process_message(self, peer, mailfrom, rcpttos, data):
pass
@@ -752,6 +789,25 @@ class SMTPSimTests(unittest.TestCase):
self.assertIn(sim_auth_credentials['cram-md5'], str(err))
smtp.close()
+ def test_with_statement(self):
+ with smtplib.SMTP(HOST, self.port) as smtp:
+ code, message = smtp.noop()
+ self.assertEqual(code, 250)
+ self.assertRaises(smtplib.SMTPServerDisconnected, smtp.send, b'foo')
+ with smtplib.SMTP(HOST, self.port) as smtp:
+ smtp.close()
+ self.assertRaises(smtplib.SMTPServerDisconnected, smtp.send, b'foo')
+
+ def test_with_statement_QUIT_failure(self):
+ self.serv.quit_response = '421 QUIT FAILED'
+ with self.assertRaises(smtplib.SMTPResponseException) as error:
+ with smtplib.SMTP(HOST, self.port) as smtp:
+ smtp.noop()
+ self.assertEqual(error.exception.smtp_code, 421)
+ self.assertEqual(error.exception.smtp_error, b'QUIT FAILED')
+ # We don't need to clean up self.serv.quit_response because a new
+ # server is always instantiated in the setUp().
+
#TODO: add tests for correct AUTH method fallback now that the
#test infrastructure can support it.
diff --git a/Lib/test/test_smtpnet.py b/Lib/test/test_smtpnet.py
index 0198ab6..86224ef 100644
--- a/Lib/test/test_smtpnet.py
+++ b/Lib/test/test_smtpnet.py
@@ -4,28 +4,60 @@ import unittest
from test import support
import smtplib
+ssl = support.import_module("ssl")
+
support.requires("network")
+
+class SmtpTest(unittest.TestCase):
+ testServer = 'smtp.gmail.com'
+ remotePort = 25
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+
+ def test_connect_starttls(self):
+ support.get_attribute(smtplib, 'SMTP_SSL')
+ with support.transient_internet(self.testServer):
+ server = smtplib.SMTP(self.testServer, self.remotePort)
+ try:
+ server.starttls(context=self.context)
+ except smtplib.SMTPException as e:
+ if e.args[0] == 'STARTTLS extension not supported by server.':
+ unittest.skip(e.args[0])
+ else:
+ raise
+ server.ehlo()
+ server.quit()
+
+
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
def test_connect(self):
support.get_attribute(smtplib, 'SMTP_SSL')
with support.transient_internet(self.testServer):
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
- server.ehlo()
- server.quit()
+ server.ehlo()
+ server.quit()
def test_connect_default_port(self):
support.get_attribute(smtplib, 'SMTP_SSL')
with support.transient_internet(self.testServer):
server = smtplib.SMTP_SSL(self.testServer)
- server.ehlo()
- server.quit()
+ server.ehlo()
+ server.quit()
+
+ def test_connect_using_sslcontext(self):
+ support.get_attribute(smtplib, 'SMTP_SSL')
+ with support.transient_internet(self.testServer):
+ server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=self.context)
+ server.ehlo()
+ server.quit()
+
def test_main():
- support.run_unittest(SmtpSSLTest)
+ support.run_unittest(SmtpTest, SmtpSSLTest)
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index d77b7dc..ede1038 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -7,6 +7,8 @@ import errno
import io
import socket
import select
+import tempfile
+import _testcapi
import time
import traceback
import queue
@@ -18,34 +20,15 @@ import contextlib
from weakref import proxy
import signal
import math
+import pickle
+import struct
try:
import fcntl
except ImportError:
fcntl = False
-def try_address(host, port=0, family=socket.AF_INET):
- """Try to bind a socket on the given host:port and return True
- if that has been possible."""
- try:
- sock = socket.socket(family, socket.SOCK_STREAM)
- sock.bind((host, port))
- except (socket.error, socket.gaierror):
- return False
- else:
- sock.close()
- return True
-
-def linux_version():
- try:
- # platform.release() is something like '2.6.33.7-desktop-2mnb'
- version_string = platform.release().split('-')[0]
- return tuple(map(int, version_string.split('.')))
- except ValueError:
- return 0, 0, 0
-
HOST = support.HOST
-MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf8') ## test unicode string and carriage return
-SUPPORTS_IPV6 = socket.has_ipv6 and try_address('::1', family=socket.AF_INET6)
+MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf-8') ## test unicode string and carriage return
try:
import _thread as thread
@@ -54,6 +37,33 @@ except ImportError:
thread = None
threading = None
+def _have_socket_can():
+ """Check whether CAN sockets are supported on this host."""
+ try:
+ s = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
+ except (AttributeError, socket.error, OSError):
+ return False
+ else:
+ s.close()
+ return True
+
+def _have_socket_rds():
+ """Check whether RDS sockets are supported on this host."""
+ try:
+ s = socket.socket(socket.PF_RDS, socket.SOCK_SEQPACKET, 0)
+ except (AttributeError, OSError):
+ return False
+ else:
+ s.close()
+ return True
+
+HAVE_SOCKET_CAN = _have_socket_can()
+
+HAVE_SOCKET_RDS = _have_socket_rds()
+
+# Size in bytes of the int type
+SIZEOF_INT = array.array("i").itemsize
+
class SocketTCPTest(unittest.TestCase):
def setUp(self):
@@ -75,6 +85,63 @@ class SocketUDPTest(unittest.TestCase):
self.serv.close()
self.serv = None
+class ThreadSafeCleanupTestCase(unittest.TestCase):
+ """Subclass of unittest.TestCase with thread-safe cleanup methods.
+
+ This subclass protects the addCleanup() and doCleanups() methods
+ with a recursive lock.
+ """
+
+ if threading:
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._cleanup_lock = threading.RLock()
+
+ def addCleanup(self, *args, **kwargs):
+ with self._cleanup_lock:
+ return super().addCleanup(*args, **kwargs)
+
+ def doCleanups(self, *args, **kwargs):
+ with self._cleanup_lock:
+ return super().doCleanups(*args, **kwargs)
+
+class SocketCANTest(unittest.TestCase):
+
+ """To be able to run this test, a `vcan0` CAN interface can be created with
+ the following commands:
+ # modprobe vcan
+ # ip link add dev vcan0 type vcan
+ # ifconfig vcan0 up
+ """
+ interface = 'vcan0'
+ bufsize = 128
+
+ def setUp(self):
+ self.s = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
+ self.addCleanup(self.s.close)
+ try:
+ self.s.bind((self.interface,))
+ except socket.error:
+ self.skipTest('network interface `%s` does not exist' %
+ self.interface)
+
+
+class SocketRDSTest(unittest.TestCase):
+
+ """To be able to run this test, the `rds` kernel module must be loaded:
+ # modprobe rds
+ """
+ bufsize = 8192
+
+ def setUp(self):
+ self.serv = socket.socket(socket.PF_RDS, socket.SOCK_SEQPACKET, 0)
+ self.addCleanup(self.serv.close)
+ try:
+ self.port = support.bind_port(self.serv)
+ except OSError:
+ self.skipTest('unable to bind RDS socket')
+
+
class ThreadableTest:
"""Threadable Test class
@@ -132,6 +199,7 @@ class ThreadableTest:
self.client_ready = threading.Event()
self.done = threading.Event()
self.queue = queue.Queue(1)
+ self.server_crashed = False
# Do some munging to start the client test.
methodname = self.id()
@@ -141,8 +209,12 @@ class ThreadableTest:
self.client_thread = thread.start_new_thread(
self.clientRun, (test_method,))
- self.__setUp()
- if not self.server_ready.is_set():
+ try:
+ self.__setUp()
+ except:
+ self.server_crashed = True
+ raise
+ finally:
self.server_ready.set()
self.client_ready.wait()
@@ -158,10 +230,16 @@ class ThreadableTest:
self.server_ready.wait()
self.clientSetUp()
self.client_ready.set()
+ if self.server_crashed:
+ self.clientTearDown()
+ return
if not hasattr(test_func, '__call__'):
raise TypeError("test_func must be a callable function")
try:
test_func()
+ except unittest._ExpectedFailure:
+ # We deliberately ignore expected failures
+ pass
except BaseException as e:
self.queue.put(e)
finally:
@@ -202,6 +280,48 @@ class ThreadedUDPSocketTest(SocketUDPTest, ThreadableTest):
self.cli = None
ThreadableTest.clientTearDown(self)
+class ThreadedCANSocketTest(SocketCANTest, ThreadableTest):
+
+ def __init__(self, methodName='runTest'):
+ SocketCANTest.__init__(self, methodName=methodName)
+ ThreadableTest.__init__(self)
+
+ def clientSetUp(self):
+ self.cli = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
+ try:
+ self.cli.bind((self.interface,))
+ except socket.error:
+ # skipTest should not be called here, and will be called in the
+ # server instead
+ pass
+
+ def clientTearDown(self):
+ self.cli.close()
+ self.cli = None
+ ThreadableTest.clientTearDown(self)
+
+class ThreadedRDSSocketTest(SocketRDSTest, ThreadableTest):
+
+ def __init__(self, methodName='runTest'):
+ SocketRDSTest.__init__(self, methodName=methodName)
+ ThreadableTest.__init__(self)
+
+ def clientSetUp(self):
+ self.cli = socket.socket(socket.PF_RDS, socket.SOCK_SEQPACKET, 0)
+ try:
+ # RDS sockets must be bound explicitly to send or receive data
+ self.cli.bind((HOST, 0))
+ self.cli_addr = self.cli.getsockname()
+ except OSError:
+ # skipTest should not be called here, and will be called in the
+ # server instead
+ pass
+
+ def clientTearDown(self):
+ self.cli.close()
+ self.cli = None
+ ThreadableTest.clientTearDown(self)
+
class SocketConnectedTest(ThreadedTCPSocketTest):
"""Socket tests for client-server connection.
@@ -257,6 +377,243 @@ class SocketPairTest(unittest.TestCase, ThreadableTest):
ThreadableTest.clientTearDown(self)
+# The following classes are used by the sendmsg()/recvmsg() tests.
+# Combining, for instance, ConnectedStreamTestMixin and TCPTestBase
+# gives a drop-in replacement for SocketConnectedTest, but different
+# address families can be used, and the attributes serv_addr and
+# cli_addr will be set to the addresses of the endpoints.
+
+class SocketTestBase(unittest.TestCase):
+ """A base class for socket tests.
+
+ Subclasses must provide methods newSocket() to return a new socket
+ and bindSock(sock) to bind it to an unused address.
+
+ Creates a socket self.serv and sets self.serv_addr to its address.
+ """
+
+ def setUp(self):
+ self.serv = self.newSocket()
+ self.bindServer()
+
+ def bindServer(self):
+ """Bind server socket and set self.serv_addr to its address."""
+ self.bindSock(self.serv)
+ self.serv_addr = self.serv.getsockname()
+
+ def tearDown(self):
+ self.serv.close()
+ self.serv = None
+
+
+class SocketListeningTestMixin(SocketTestBase):
+ """Mixin to listen on the server socket."""
+
+ def setUp(self):
+ super().setUp()
+ self.serv.listen(1)
+
+
+class ThreadedSocketTestMixin(ThreadSafeCleanupTestCase, SocketTestBase,
+ ThreadableTest):
+ """Mixin to add client socket and allow client/server tests.
+
+ Client socket is self.cli and its address is self.cli_addr. See
+ ThreadableTest for usage information.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ ThreadableTest.__init__(self)
+
+ def clientSetUp(self):
+ self.cli = self.newClientSocket()
+ self.bindClient()
+
+ def newClientSocket(self):
+ """Return a new socket for use as client."""
+ return self.newSocket()
+
+ def bindClient(self):
+ """Bind client socket and set self.cli_addr to its address."""
+ self.bindSock(self.cli)
+ self.cli_addr = self.cli.getsockname()
+
+ def clientTearDown(self):
+ self.cli.close()
+ self.cli = None
+ ThreadableTest.clientTearDown(self)
+
+
+class ConnectedStreamTestMixin(SocketListeningTestMixin,
+ ThreadedSocketTestMixin):
+ """Mixin to allow client/server stream tests with connected client.
+
+ Server's socket representing connection to client is self.cli_conn
+ and client's connection to server is self.serv_conn. (Based on
+ SocketConnectedTest.)
+ """
+
+ def setUp(self):
+ super().setUp()
+ # Indicate explicitly we're ready for the client thread to
+ # proceed and then perform the blocking call to accept
+ self.serverExplicitReady()
+ conn, addr = self.serv.accept()
+ self.cli_conn = conn
+
+ def tearDown(self):
+ self.cli_conn.close()
+ self.cli_conn = None
+ super().tearDown()
+
+ def clientSetUp(self):
+ super().clientSetUp()
+ self.cli.connect(self.serv_addr)
+ self.serv_conn = self.cli
+
+ def clientTearDown(self):
+ self.serv_conn.close()
+ self.serv_conn = None
+ super().clientTearDown()
+
+
+class UnixSocketTestBase(SocketTestBase):
+ """Base class for Unix-domain socket tests."""
+
+ # This class is used for file descriptor passing tests, so we
+ # create the sockets in a private directory so that other users
+ # can't send anything that might be problematic for a privileged
+ # user running the tests.
+
+ def setUp(self):
+ self.dir_path = tempfile.mkdtemp()
+ self.addCleanup(os.rmdir, self.dir_path)
+ super().setUp()
+
+ def bindSock(self, sock):
+ path = tempfile.mktemp(dir=self.dir_path)
+ sock.bind(path)
+ self.addCleanup(support.unlink, path)
+
+class UnixStreamBase(UnixSocketTestBase):
+ """Base class for Unix-domain SOCK_STREAM tests."""
+
+ def newSocket(self):
+ return socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+
+
+class InetTestBase(SocketTestBase):
+ """Base class for IPv4 socket tests."""
+
+ host = HOST
+
+ def setUp(self):
+ super().setUp()
+ self.port = self.serv_addr[1]
+
+ def bindSock(self, sock):
+ support.bind_port(sock, host=self.host)
+
+class TCPTestBase(InetTestBase):
+ """Base class for TCP-over-IPv4 tests."""
+
+ def newSocket(self):
+ return socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+class UDPTestBase(InetTestBase):
+ """Base class for UDP-over-IPv4 tests."""
+
+ def newSocket(self):
+ return socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+
+class SCTPStreamBase(InetTestBase):
+ """Base class for SCTP tests in one-to-one (SOCK_STREAM) mode."""
+
+ def newSocket(self):
+ return socket.socket(socket.AF_INET, socket.SOCK_STREAM,
+ socket.IPPROTO_SCTP)
+
+
+class Inet6TestBase(InetTestBase):
+ """Base class for IPv6 socket tests."""
+
+ # Don't use "localhost" here - it may not have an IPv6 address
+ # assigned to it by default (e.g. in /etc/hosts), and if someone
+ # has assigned it an IPv4-mapped address, then it's unlikely to
+ # work with the full IPv6 API.
+ host = "::1"
+
+class UDP6TestBase(Inet6TestBase):
+ """Base class for UDP-over-IPv6 tests."""
+
+ def newSocket(self):
+ return socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
+
+
+# Test-skipping decorators for use with ThreadableTest.
+
+def skipWithClientIf(condition, reason):
+ """Skip decorated test if condition is true, add client_skip decorator.
+
+ If the decorated object is not a class, sets its attribute
+ "client_skip" to a decorator which will return an empty function
+ if the test is to be skipped, or the original function if it is
+ not. This can be used to avoid running the client part of a
+ skipped test when using ThreadableTest.
+ """
+ def client_pass(*args, **kwargs):
+ pass
+ def skipdec(obj):
+ retval = unittest.skip(reason)(obj)
+ if not isinstance(obj, type):
+ retval.client_skip = lambda f: client_pass
+ return retval
+ def noskipdec(obj):
+ if not (isinstance(obj, type) or hasattr(obj, "client_skip")):
+ obj.client_skip = lambda f: f
+ return obj
+ return skipdec if condition else noskipdec
+
+
+def requireAttrs(obj, *attributes):
+ """Skip decorated test if obj is missing any of the given attributes.
+
+ Sets client_skip attribute as skipWithClientIf() does.
+ """
+ missing = [name for name in attributes if not hasattr(obj, name)]
+ return skipWithClientIf(
+ missing, "don't have " + ", ".join(name for name in missing))
+
+
+def requireSocket(*args):
+ """Skip decorated test if a socket cannot be created with given arguments.
+
+ When an argument is given as a string, will use the value of that
+ attribute of the socket module, or skip the test if it doesn't
+ exist. Sets client_skip attribute as skipWithClientIf() does.
+ """
+ err = None
+ missing = [obj for obj in args if
+ isinstance(obj, str) and not hasattr(socket, obj)]
+ if missing:
+ err = "don't have " + ", ".join(name for name in missing)
+ else:
+ callargs = [getattr(socket, obj) if isinstance(obj, str) else obj
+ for obj in args]
+ try:
+ s = socket.socket(*callargs)
+ except socket.error as e:
+ # XXX: check errno?
+ err = str(e)
+ else:
+ s.close()
+ return skipWithClientIf(
+ err is not None,
+ "can't create socket({0}): {1}".format(
+ ", ".join(str(o) for o in args), err))
+
+
#######################################################################
## Begin Tests
@@ -282,18 +639,13 @@ class GeneralModuleTests(unittest.TestCase):
def testSocketError(self):
# Testing socket module exceptions
- def raise_error(*args, **kwargs):
+ msg = "Error raising socket exception (%s)."
+ with self.assertRaises(socket.error, msg=msg % 'socket.error'):
raise socket.error
- def raise_herror(*args, **kwargs):
+ with self.assertRaises(socket.error, msg=msg % 'socket.herror'):
raise socket.herror
- def raise_gaierror(*args, **kwargs):
+ with self.assertRaises(socket.error, msg=msg % 'socket.gaierror'):
raise socket.gaierror
- self.assertRaises(socket.error, raise_error,
- "Error raising socket exception.")
- self.assertRaises(socket.error, raise_herror,
- "Error raising socket exception.")
- self.assertRaises(socket.error, raise_gaierror,
- "Error raising socket exception.")
def testSendtoErrors(self):
# Testing that sendto doens't masks failures. See #10169.
@@ -369,6 +721,52 @@ class GeneralModuleTests(unittest.TestCase):
if not fqhn in all_host_names:
self.fail("Error testing host resolution mechanisms. (fqdn: %s, all: %s)" % (fqhn, repr(all_host_names)))
+ @unittest.skipUnless(hasattr(socket, 'sethostname'), "test needs socket.sethostname()")
+ @unittest.skipUnless(hasattr(socket, 'gethostname'), "test needs socket.gethostname()")
+ def test_sethostname(self):
+ oldhn = socket.gethostname()
+ try:
+ socket.sethostname('new')
+ except socket.error as e:
+ if e.errno == errno.EPERM:
+ self.skipTest("test should be run as root")
+ else:
+ raise
+ try:
+ # running test as root!
+ self.assertEqual(socket.gethostname(), 'new')
+ # Should work with bytes objects too
+ socket.sethostname(b'bar')
+ self.assertEqual(socket.gethostname(), 'bar')
+ finally:
+ socket.sethostname(oldhn)
+
+ @unittest.skipUnless(hasattr(socket, 'if_nameindex'),
+ 'socket.if_nameindex() not available.')
+ def testInterfaceNameIndex(self):
+ interfaces = socket.if_nameindex()
+ for index, name in interfaces:
+ self.assertIsInstance(index, int)
+ self.assertIsInstance(name, str)
+ # interface indices are non-zero integers
+ self.assertGreater(index, 0)
+ _index = socket.if_nametoindex(name)
+ self.assertIsInstance(_index, int)
+ self.assertEqual(index, _index)
+ _name = socket.if_indextoname(index)
+ self.assertIsInstance(_name, str)
+ self.assertEqual(name, _name)
+
+ @unittest.skipUnless(hasattr(socket, 'if_nameindex'),
+ 'socket.if_nameindex() not available.')
+ def testInvalidInterfaceNameIndex(self):
+ # test nonexistent interface index/name
+ self.assertRaises(socket.error, socket.if_indextoname, 0)
+ self.assertRaises(socket.error, socket.if_nametoindex, '_DEADBEEF')
+ # test with invalid values
+ self.assertRaises(TypeError, socket.if_nametoindex, 0)
+ self.assertRaises(TypeError, socket.if_indextoname, '_DEADBEEF')
+
def testRefCountGetNameInfo(self):
# Testing reference count for getnameinfo
if hasattr(sys, "getrefcount"):
@@ -421,10 +819,8 @@ class GeneralModuleTests(unittest.TestCase):
# Find one service that exists, then check all the related interfaces.
# I've ordered this by protocols that have both a tcp and udp
# protocol, at least for modern Linuxes.
- if (sys.platform.startswith('linux') or
- sys.platform.startswith('freebsd') or
- sys.platform.startswith('netbsd') or
- sys.platform == 'darwin'):
+ if (sys.platform.startswith(('freebsd', 'netbsd'))
+ or sys.platform in ('linux', 'darwin')):
# avoid the 'echo' service on this platform, as there is an
# assumption breaking non-standard port/protocol entry
services = ('daytime', 'qotd', 'domain')
@@ -719,7 +1115,7 @@ class GeneralModuleTests(unittest.TestCase):
socket.getaddrinfo('localhost', 80)
socket.getaddrinfo('127.0.0.1', 80)
socket.getaddrinfo(None, 80)
- if SUPPORTS_IPV6:
+ if support.IPV6_ENABLED:
socket.getaddrinfo('::1', 80)
# port can be a string service name such as "http", a numeric
# port number or None
@@ -772,7 +1168,12 @@ class GeneralModuleTests(unittest.TestCase):
@unittest.skipUnless(support.is_resource_enabled('network'),
'network is not enabled')
def test_idna(self):
- support.requires('network')
+ # Check for internet access before running test (issue #12804).
+ try:
+ socket.gethostbyname('python.org')
+ except socket.gaierror as e:
+ if e.errno == socket.EAI_NODATA:
+ self.skipTest('internet access required for this test')
# these should all be successful
socket.gethostbyname('испытание.python.org')
socket.gethostbyname_ex('испытание.python.org')
@@ -839,14 +1240,20 @@ class GeneralModuleTests(unittest.TestCase):
fp.close()
self.assertEqual(repr(fp), "<_io.BufferedReader name=-1>")
- def testListenBacklog0(self):
+ def test_pickle(self):
+ sock = socket.socket()
+ with sock:
+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
+ self.assertRaises(TypeError, pickle.dumps, sock, protocol)
+
+ def test_listen_backlog0(self):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.bind((HOST, 0))
# backlog = 0
srv.listen(0)
srv.close()
- @unittest.skipUnless(SUPPORTS_IPV6, 'IPv6 required for this test.')
+ @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test.')
def test_flowinfo(self):
self.assertRaises(OverflowError, socket.getnameinfo,
('::1',0, 0xffffffff), 0)
@@ -854,6 +1261,222 @@ class GeneralModuleTests(unittest.TestCase):
self.assertRaises(OverflowError, s.bind, ('::1', 0, -10))
+@unittest.skipUnless(HAVE_SOCKET_CAN, 'SocketCan required for this test.')
+class BasicCANTest(unittest.TestCase):
+
+ def testCrucialConstants(self):
+ socket.AF_CAN
+ socket.PF_CAN
+ socket.CAN_RAW
+
+ def testCreateSocket(self):
+ with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
+ pass
+
+ def testBindAny(self):
+ with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
+ s.bind(('', ))
+
+ def testTooLongInterfaceName(self):
+ # most systems limit IFNAMSIZ to 16, take 1024 to be sure
+ with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
+ self.assertRaisesRegex(socket.error, 'interface name too long',
+ s.bind, ('x' * 1024,))
+
+ @unittest.skipUnless(hasattr(socket, "CAN_RAW_LOOPBACK"),
+ 'socket.CAN_RAW_LOOPBACK required for this test.')
+ def testLoopback(self):
+ with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
+ for loopback in (0, 1):
+ s.setsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_LOOPBACK,
+ loopback)
+ self.assertEqual(loopback,
+ s.getsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_LOOPBACK))
+
+ @unittest.skipUnless(hasattr(socket, "CAN_RAW_FILTER"),
+ 'socket.CAN_RAW_FILTER required for this test.')
+ def testFilter(self):
+ can_id, can_mask = 0x200, 0x700
+ can_filter = struct.pack("=II", can_id, can_mask)
+ with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
+ s.setsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_FILTER, can_filter)
+ self.assertEqual(can_filter,
+ s.getsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_FILTER, 8))
+
+
+@unittest.skipUnless(HAVE_SOCKET_CAN, 'SocketCan required for this test.')
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class CANTest(ThreadedCANSocketTest):
+
+ """The CAN frame structure is defined in <linux/can.h>:
+
+ struct can_frame {
+ canid_t can_id; /* 32 bit CAN_ID + EFF/RTR/ERR flags */
+ __u8 can_dlc; /* data length code: 0 .. 8 */
+ __u8 data[8] __attribute__((aligned(8)));
+ };
+ """
+ can_frame_fmt = "=IB3x8s"
+
+ def __init__(self, methodName='runTest'):
+ ThreadedCANSocketTest.__init__(self, methodName=methodName)
+
+ @classmethod
+ def build_can_frame(cls, can_id, data):
+ """Build a CAN frame."""
+ can_dlc = len(data)
+ data = data.ljust(8, b'\x00')
+ return struct.pack(cls.can_frame_fmt, can_id, can_dlc, data)
+
+ @classmethod
+ def dissect_can_frame(cls, frame):
+ """Dissect a CAN frame."""
+ can_id, can_dlc, data = struct.unpack(cls.can_frame_fmt, frame)
+ return (can_id, can_dlc, data[:can_dlc])
+
+ def testSendFrame(self):
+ cf, addr = self.s.recvfrom(self.bufsize)
+ self.assertEqual(self.cf, cf)
+ self.assertEqual(addr[0], self.interface)
+ self.assertEqual(addr[1], socket.AF_CAN)
+
+ def _testSendFrame(self):
+ self.cf = self.build_can_frame(0x00, b'\x01\x02\x03\x04\x05')
+ self.cli.send(self.cf)
+
+ def testSendMaxFrame(self):
+ cf, addr = self.s.recvfrom(self.bufsize)
+ self.assertEqual(self.cf, cf)
+
+ def _testSendMaxFrame(self):
+ self.cf = self.build_can_frame(0x00, b'\x07' * 8)
+ self.cli.send(self.cf)
+
+ def testSendMultiFrames(self):
+ cf, addr = self.s.recvfrom(self.bufsize)
+ self.assertEqual(self.cf1, cf)
+
+ cf, addr = self.s.recvfrom(self.bufsize)
+ self.assertEqual(self.cf2, cf)
+
+ def _testSendMultiFrames(self):
+ self.cf1 = self.build_can_frame(0x07, b'\x44\x33\x22\x11')
+ self.cli.send(self.cf1)
+
+ self.cf2 = self.build_can_frame(0x12, b'\x99\x22\x33')
+ self.cli.send(self.cf2)
+
+
+@unittest.skipUnless(HAVE_SOCKET_RDS, 'RDS sockets required for this test.')
+class BasicRDSTest(unittest.TestCase):
+
+ def testCrucialConstants(self):
+ socket.AF_RDS
+ socket.PF_RDS
+
+ def testCreateSocket(self):
+ with socket.socket(socket.PF_RDS, socket.SOCK_SEQPACKET, 0) as s:
+ pass
+
+ def testSocketBufferSize(self):
+ bufsize = 16384
+ with socket.socket(socket.PF_RDS, socket.SOCK_SEQPACKET, 0) as s:
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, bufsize)
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, bufsize)
+
+
+@unittest.skipUnless(HAVE_SOCKET_RDS, 'RDS sockets required for this test.')
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RDSTest(ThreadedRDSSocketTest):
+
+ def __init__(self, methodName='runTest'):
+ ThreadedRDSSocketTest.__init__(self, methodName=methodName)
+
+ def setUp(self):
+ super().setUp()
+ self.evt = threading.Event()
+
+ def testSendAndRecv(self):
+ data, addr = self.serv.recvfrom(self.bufsize)
+ self.assertEqual(self.data, data)
+ self.assertEqual(self.cli_addr, addr)
+
+ def _testSendAndRecv(self):
+ self.data = b'spam'
+ self.cli.sendto(self.data, 0, (HOST, self.port))
+
+ def testPeek(self):
+ data, addr = self.serv.recvfrom(self.bufsize, socket.MSG_PEEK)
+ self.assertEqual(self.data, data)
+ data, addr = self.serv.recvfrom(self.bufsize)
+ self.assertEqual(self.data, data)
+
+ def _testPeek(self):
+ self.data = b'spam'
+ self.cli.sendto(self.data, 0, (HOST, self.port))
+
+ @requireAttrs(socket.socket, 'recvmsg')
+ def testSendAndRecvMsg(self):
+ data, ancdata, msg_flags, addr = self.serv.recvmsg(self.bufsize)
+ self.assertEqual(self.data, data)
+
+ @requireAttrs(socket.socket, 'sendmsg')
+ def _testSendAndRecvMsg(self):
+ self.data = b'hello ' * 10
+ self.cli.sendmsg([self.data], (), 0, (HOST, self.port))
+
+ def testSendAndRecvMulti(self):
+ data, addr = self.serv.recvfrom(self.bufsize)
+ self.assertEqual(self.data1, data)
+
+ data, addr = self.serv.recvfrom(self.bufsize)
+ self.assertEqual(self.data2, data)
+
+ def _testSendAndRecvMulti(self):
+ self.data1 = b'bacon'
+ self.cli.sendto(self.data1, 0, (HOST, self.port))
+
+ self.data2 = b'egg'
+ self.cli.sendto(self.data2, 0, (HOST, self.port))
+
+ def testSelect(self):
+ r, w, x = select.select([self.serv], [], [], 3.0)
+ self.assertIn(self.serv, r)
+ data, addr = self.serv.recvfrom(self.bufsize)
+ self.assertEqual(self.data, data)
+
+ def _testSelect(self):
+ self.data = b'select'
+ self.cli.sendto(self.data, 0, (HOST, self.port))
+
+ def testCongestion(self):
+ # wait until the sender is done
+ self.evt.wait()
+
+ def _testCongestion(self):
+ # test the behavior in case of congestion
+ self.data = b'fill'
+ self.cli.setblocking(False)
+ try:
+ # try to lower the receiver's socket buffer size
+ self.cli.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 16384)
+ except OSError:
+ pass
+ with self.assertRaises(OSError) as cm:
+ try:
+ # fill the receiver's socket buffer
+ while True:
+ self.cli.sendto(self.data, 0, (HOST, self.port))
+ finally:
+ # signal the receiver we're done
+ self.evt.set()
+ # sendto() should have failed with ENOBUFS
+ self.assertEqual(cm.exception.errno, errno.ENOBUFS)
+ # and we should have received a congestion notification through poll
+ r, w, x = select.select([self.serv], [], [], 3.0)
+ self.assertIn(self.serv, r)
+
+
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicTCPTest(SocketConnectedTest):
@@ -992,6 +1615,1852 @@ class BasicUDPTest(ThreadedUDPSocketTest):
def _testRecvFromNegative(self):
self.cli.sendto(MSG, 0, (HOST, self.port))
+# Tests for the sendmsg()/recvmsg() interface. Where possible, the
+# same test code is used with different families and types of socket
+# (e.g. stream, datagram), and tests using recvmsg() are repeated
+# using recvmsg_into().
+#
+# The generic test classes such as SendmsgTests and
+# RecvmsgGenericTests inherit from SendrecvmsgBase and expect to be
+# supplied with sockets cli_sock and serv_sock representing the
+# client's and the server's end of the connection respectively, and
+# attributes cli_addr and serv_addr holding their (numeric where
+# appropriate) addresses.
+#
+# The final concrete test classes combine these with subclasses of
+# SocketTestBase which set up client and server sockets of a specific
+# type, and with subclasses of SendrecvmsgBase such as
+# SendrecvmsgDgramBase and SendrecvmsgConnectedBase which map these
+# sockets to cli_sock and serv_sock and override the methods and
+# attributes of SendrecvmsgBase to fill in destination addresses if
+# needed when sending, check for specific flags in msg_flags, etc.
+#
+# RecvmsgIntoMixin provides a version of doRecvmsg() implemented using
+# recvmsg_into().
+
+# XXX: like the other datagram (UDP) tests in this module, the code
+# here assumes that datagram delivery on the local machine will be
+# reliable.
+
+class SendrecvmsgBase(ThreadSafeCleanupTestCase):
+ # Base class for sendmsg()/recvmsg() tests.
+
+ # Time in seconds to wait before considering a test failed, or
+ # None for no timeout. Not all tests actually set a timeout.
+ fail_timeout = 3.0
+
+ def setUp(self):
+ self.misc_event = threading.Event()
+ super().setUp()
+
+ def sendToServer(self, msg):
+ # Send msg to the server.
+ return self.cli_sock.send(msg)
+
+ # Tuple of alternative default arguments for sendmsg() when called
+ # via sendmsgToServer() (e.g. to include a destination address).
+ sendmsg_to_server_defaults = ()
+
+ def sendmsgToServer(self, *args):
+ # Call sendmsg() on self.cli_sock with the given arguments,
+ # filling in any arguments which are not supplied with the
+ # corresponding items of self.sendmsg_to_server_defaults, if
+ # any.
+ return self.cli_sock.sendmsg(
+ *(args + self.sendmsg_to_server_defaults[len(args):]))
+
+ def doRecvmsg(self, sock, bufsize, *args):
+ # Call recvmsg() on sock with given arguments and return its
+ # result. Should be used for tests which can use either
+ # recvmsg() or recvmsg_into() - RecvmsgIntoMixin overrides
+ # this method with one which emulates it using recvmsg_into(),
+ # thus allowing the same test to be used for both methods.
+ result = sock.recvmsg(bufsize, *args)
+ self.registerRecvmsgResult(result)
+ return result
+
+ def registerRecvmsgResult(self, result):
+ # Called by doRecvmsg() with the return value of recvmsg() or
+ # recvmsg_into(). Can be overridden to arrange cleanup based
+ # on the returned ancillary data, for instance.
+ pass
+
+ def checkRecvmsgAddress(self, addr1, addr2):
+ # Called to compare the received address with the address of
+ # the peer.
+ self.assertEqual(addr1, addr2)
+
+ # Flags that are normally unset in msg_flags
+ msg_flags_common_unset = 0
+ for name in ("MSG_CTRUNC", "MSG_OOB"):
+ msg_flags_common_unset |= getattr(socket, name, 0)
+
+ # Flags that are normally set
+ msg_flags_common_set = 0
+
+ # Flags set when a complete record has been received (e.g. MSG_EOR
+ # for SCTP)
+ msg_flags_eor_indicator = 0
+
+ # Flags set when a complete record has not been received
+ # (e.g. MSG_TRUNC for datagram sockets)
+ msg_flags_non_eor_indicator = 0
+
+ def checkFlags(self, flags, eor=None, checkset=0, checkunset=0, ignore=0):
+ # Method to check the value of msg_flags returned by recvmsg[_into]().
+ #
+ # Checks that all bits in msg_flags_common_set attribute are
+ # set in "flags" and all bits in msg_flags_common_unset are
+ # unset.
+ #
+ # The "eor" argument specifies whether the flags should
+ # indicate that a full record (or datagram) has been received.
+ # If "eor" is None, no checks are done; otherwise, checks
+ # that:
+ #
+ # * if "eor" is true, all bits in msg_flags_eor_indicator are
+ # set and all bits in msg_flags_non_eor_indicator are unset
+ #
+ # * if "eor" is false, all bits in msg_flags_non_eor_indicator
+ # are set and all bits in msg_flags_eor_indicator are unset
+ #
+ # If "checkset" and/or "checkunset" are supplied, they require
+ # the given bits to be set or unset respectively, overriding
+ # what the attributes require for those bits.
+ #
+ # If any bits are set in "ignore", they will not be checked,
+ # regardless of the other inputs.
+ #
+ # Will raise Exception if the inputs require a bit to be both
+ # set and unset, and it is not ignored.
+
+ defaultset = self.msg_flags_common_set
+ defaultunset = self.msg_flags_common_unset
+
+ if eor:
+ defaultset |= self.msg_flags_eor_indicator
+ defaultunset |= self.msg_flags_non_eor_indicator
+ elif eor is not None:
+ defaultset |= self.msg_flags_non_eor_indicator
+ defaultunset |= self.msg_flags_eor_indicator
+
+ # Function arguments override defaults
+ defaultset &= ~checkunset
+ defaultunset &= ~checkset
+
+ # Merge arguments with remaining defaults, and check for conflicts
+ checkset |= defaultset
+ checkunset |= defaultunset
+ inboth = checkset & checkunset & ~ignore
+ if inboth:
+ raise Exception("contradictory set, unset requirements for flags "
+ "{0:#x}".format(inboth))
+
+ # Compare with given msg_flags value
+ mask = (checkset | checkunset) & ~ignore
+ self.assertEqual(flags & mask, checkset & mask)
+
+
+class RecvmsgIntoMixin(SendrecvmsgBase):
+ # Mixin to implement doRecvmsg() using recvmsg_into().
+
+ def doRecvmsg(self, sock, bufsize, *args):
+ buf = bytearray(bufsize)
+ result = sock.recvmsg_into([buf], *args)
+ self.registerRecvmsgResult(result)
+ self.assertGreaterEqual(result[0], 0)
+ self.assertLessEqual(result[0], bufsize)
+ return (bytes(buf[:result[0]]),) + result[1:]
+
+
+class SendrecvmsgDgramFlagsBase(SendrecvmsgBase):
+ # Defines flags to be checked in msg_flags for datagram sockets.
+
+ @property
+ def msg_flags_non_eor_indicator(self):
+ return super().msg_flags_non_eor_indicator | socket.MSG_TRUNC
+
+
+class SendrecvmsgSCTPFlagsBase(SendrecvmsgBase):
+ # Defines flags to be checked in msg_flags for SCTP sockets.
+
+ @property
+ def msg_flags_eor_indicator(self):
+ return super().msg_flags_eor_indicator | socket.MSG_EOR
+
+
+class SendrecvmsgConnectionlessBase(SendrecvmsgBase):
+ # Base class for tests on connectionless-mode sockets. Users must
+ # supply sockets on attributes cli and serv to be mapped to
+ # cli_sock and serv_sock respectively.
+
+ @property
+ def serv_sock(self):
+ return self.serv
+
+ @property
+ def cli_sock(self):
+ return self.cli
+
+ @property
+ def sendmsg_to_server_defaults(self):
+ return ([], [], 0, self.serv_addr)
+
+ def sendToServer(self, msg):
+ return self.cli_sock.sendto(msg, self.serv_addr)
+
+
+class SendrecvmsgConnectedBase(SendrecvmsgBase):
+ # Base class for tests on connected sockets. Users must supply
+ # sockets on attributes serv_conn and cli_conn (representing the
+ # connections *to* the server and the client), to be mapped to
+ # cli_sock and serv_sock respectively.
+
+ @property
+ def serv_sock(self):
+ return self.cli_conn
+
+ @property
+ def cli_sock(self):
+ return self.serv_conn
+
+ def checkRecvmsgAddress(self, addr1, addr2):
+ # Address is currently "unspecified" for a connected socket,
+ # so we don't examine it
+ pass
+
+
+class SendrecvmsgServerTimeoutBase(SendrecvmsgBase):
+ # Base class to set a timeout on server's socket.
+
+ def setUp(self):
+ super().setUp()
+ self.serv_sock.settimeout(self.fail_timeout)
+
+
+class SendmsgTests(SendrecvmsgServerTimeoutBase):
+ # Tests for sendmsg() which can use any socket type and do not
+ # involve recvmsg() or recvmsg_into().
+
+ def testSendmsg(self):
+ # Send a simple message with sendmsg().
+ self.assertEqual(self.serv_sock.recv(len(MSG)), MSG)
+
+ def _testSendmsg(self):
+ self.assertEqual(self.sendmsgToServer([MSG]), len(MSG))
+
+ def testSendmsgDataGenerator(self):
+ # Send from buffer obtained from a generator (not a sequence).
+ self.assertEqual(self.serv_sock.recv(len(MSG)), MSG)
+
+ def _testSendmsgDataGenerator(self):
+ self.assertEqual(self.sendmsgToServer((o for o in [MSG])),
+ len(MSG))
+
+ def testSendmsgAncillaryGenerator(self):
+ # Gather (empty) ancillary data from a generator.
+ self.assertEqual(self.serv_sock.recv(len(MSG)), MSG)
+
+ def _testSendmsgAncillaryGenerator(self):
+ self.assertEqual(self.sendmsgToServer([MSG], (o for o in [])),
+ len(MSG))
+
+ def testSendmsgArray(self):
+ # Send data from an array instead of the usual bytes object.
+ self.assertEqual(self.serv_sock.recv(len(MSG)), MSG)
+
+ def _testSendmsgArray(self):
+ self.assertEqual(self.sendmsgToServer([array.array("B", MSG)]),
+ len(MSG))
+
+ def testSendmsgGather(self):
+ # Send message data from more than one buffer (gather write).
+ self.assertEqual(self.serv_sock.recv(len(MSG)), MSG)
+
+ def _testSendmsgGather(self):
+ self.assertEqual(self.sendmsgToServer([MSG[:3], MSG[3:]]), len(MSG))
+
+ def testSendmsgBadArgs(self):
+ # Check that sendmsg() rejects invalid arguments.
+ self.assertEqual(self.serv_sock.recv(1000), b"done")
+
+ def _testSendmsgBadArgs(self):
+ self.assertRaises(TypeError, self.cli_sock.sendmsg)
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ b"not in an iterable")
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ object())
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [object()])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG, object()])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], object())
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [], object())
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [], 0, object())
+ self.sendToServer(b"done")
+
+ def testSendmsgBadCmsg(self):
+ # Check that invalid ancillary data items are rejected.
+ self.assertEqual(self.serv_sock.recv(1000), b"done")
+
+ def _testSendmsgBadCmsg(self):
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [object()])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [(object(), 0, b"data")])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [(0, object(), b"data")])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [(0, 0, object())])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [(0, 0)])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [(0, 0, b"data", 42)])
+ self.sendToServer(b"done")
+
+ @requireAttrs(socket, "CMSG_SPACE")
+ def testSendmsgBadMultiCmsg(self):
+ # Check that invalid ancillary data items are rejected when
+ # more than one item is present.
+ self.assertEqual(self.serv_sock.recv(1000), b"done")
+
+ @testSendmsgBadMultiCmsg.client_skip
+ def _testSendmsgBadMultiCmsg(self):
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [0, 0, b""])
+ self.assertRaises(TypeError, self.sendmsgToServer,
+ [MSG], [(0, 0, b""), object()])
+ self.sendToServer(b"done")
+
+ def testSendmsgExcessCmsgReject(self):
+ # Check that sendmsg() rejects excess ancillary data items
+ # when the number that can be sent is limited.
+ self.assertEqual(self.serv_sock.recv(1000), b"done")
+
+ def _testSendmsgExcessCmsgReject(self):
+ if not hasattr(socket, "CMSG_SPACE"):
+ # Can only send one item
+ with self.assertRaises(socket.error) as cm:
+ self.sendmsgToServer([MSG], [(0, 0, b""), (0, 0, b"")])
+ self.assertIsNone(cm.exception.errno)
+ self.sendToServer(b"done")
+
+ def testSendmsgAfterClose(self):
+ # Check that sendmsg() fails on a closed socket.
+ pass
+
+ def _testSendmsgAfterClose(self):
+ self.cli_sock.close()
+ self.assertRaises(socket.error, self.sendmsgToServer, [MSG])
+
+
+class SendmsgStreamTests(SendmsgTests):
+ # Tests for sendmsg() which require a stream socket and do not
+ # involve recvmsg() or recvmsg_into().
+
+ def testSendmsgExplicitNoneAddr(self):
+ # Check that peer address can be specified as None.
+ self.assertEqual(self.serv_sock.recv(len(MSG)), MSG)
+
+ def _testSendmsgExplicitNoneAddr(self):
+ self.assertEqual(self.sendmsgToServer([MSG], [], 0, None), len(MSG))
+
+ def testSendmsgTimeout(self):
+ # Check that timeout works with sendmsg().
+ self.assertEqual(self.serv_sock.recv(512), b"a"*512)
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+
+ def _testSendmsgTimeout(self):
+ try:
+ self.cli_sock.settimeout(0.03)
+ with self.assertRaises(socket.timeout):
+ while True:
+ self.sendmsgToServer([b"a"*512])
+ finally:
+ self.misc_event.set()
+
+ # XXX: would be nice to have more tests for sendmsg flags argument.
+
+ # Linux supports MSG_DONTWAIT when sending, but in general, it
+ # only works when receiving. Could add other platforms if they
+ # support it too.
+ @skipWithClientIf(sys.platform not in {"linux2"},
+ "MSG_DONTWAIT not known to work on this platform when "
+ "sending")
+ def testSendmsgDontWait(self):
+ # Check that MSG_DONTWAIT in flags causes non-blocking behaviour.
+ self.assertEqual(self.serv_sock.recv(512), b"a"*512)
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+
+ @testSendmsgDontWait.client_skip
+ def _testSendmsgDontWait(self):
+ try:
+ with self.assertRaises(socket.error) as cm:
+ while True:
+ self.sendmsgToServer([b"a"*512], [], socket.MSG_DONTWAIT)
+ self.assertIn(cm.exception.errno,
+ (errno.EAGAIN, errno.EWOULDBLOCK))
+ finally:
+ self.misc_event.set()
+
+
+class SendmsgConnectionlessTests(SendmsgTests):
+ # Tests for sendmsg() which require a connectionless-mode
+ # (e.g. datagram) socket, and do not involve recvmsg() or
+ # recvmsg_into().
+
+ def testSendmsgNoDestAddr(self):
+ # Check that sendmsg() fails when no destination address is
+ # given for unconnected socket.
+ pass
+
+ def _testSendmsgNoDestAddr(self):
+ self.assertRaises(socket.error, self.cli_sock.sendmsg,
+ [MSG])
+ self.assertRaises(socket.error, self.cli_sock.sendmsg,
+ [MSG], [], 0, None)
+
+
+class RecvmsgGenericTests(SendrecvmsgBase):
+ # Tests for recvmsg() which can also be emulated using
+ # recvmsg_into(), and can use any socket type.
+
+ def testRecvmsg(self):
+ # Receive a simple message with recvmsg[_into]().
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, len(MSG))
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsg(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgExplicitDefaults(self):
+ # Test recvmsg[_into]() with default arguments provided explicitly.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), 0, 0)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgExplicitDefaults(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgShorter(self):
+ # Receive a message smaller than buffer.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG) + 42)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgShorter(self):
+ self.sendToServer(MSG)
+
+ # FreeBSD < 8 doesn't always set the MSG_TRUNC flag when a truncated
+ # datagram is received (issue #13001).
+ @support.requires_freebsd_version(8)
+ def testRecvmsgTrunc(self):
+ # Receive part of message, check for truncation indicators.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG) - 3)
+ self.assertEqual(msg, MSG[:-3])
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=False)
+
+ @support.requires_freebsd_version(8)
+ def _testRecvmsgTrunc(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgShortAncillaryBuf(self):
+ # Test ancillary data buffer too small to hold any ancillary data.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), 1)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgShortAncillaryBuf(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgLongAncillaryBuf(self):
+ # Test large ancillary data buffer.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), 10240)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgLongAncillaryBuf(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgAfterClose(self):
+ # Check that recvmsg[_into]() fails on a closed socket.
+ self.serv_sock.close()
+ self.assertRaises(socket.error, self.doRecvmsg, self.serv_sock, 1024)
+
+ def _testRecvmsgAfterClose(self):
+ pass
+
+ def testRecvmsgTimeout(self):
+ # Check that timeout works.
+ try:
+ self.serv_sock.settimeout(0.03)
+ self.assertRaises(socket.timeout,
+ self.doRecvmsg, self.serv_sock, len(MSG))
+ finally:
+ self.misc_event.set()
+
+ def _testRecvmsgTimeout(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+
+ @requireAttrs(socket, "MSG_PEEK")
+ def testRecvmsgPeek(self):
+ # Check that MSG_PEEK in flags enables examination of pending
+ # data without consuming it.
+
+ # Receive part of data with MSG_PEEK.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG) - 3, 0,
+ socket.MSG_PEEK)
+ self.assertEqual(msg, MSG[:-3])
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ # Ignoring MSG_TRUNC here (so this test is the same for stream
+ # and datagram sockets). Some wording in POSIX seems to
+ # suggest that it needn't be set when peeking, but that may
+ # just be a slip.
+ self.checkFlags(flags, eor=False,
+ ignore=getattr(socket, "MSG_TRUNC", 0))
+
+ # Receive all data with MSG_PEEK.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), 0,
+ socket.MSG_PEEK)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ # Check that the same data can still be received normally.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, len(MSG))
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ @testRecvmsgPeek.client_skip
+ def _testRecvmsgPeek(self):
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket.socket, "sendmsg")
+ def testRecvmsgFromSendmsg(self):
+ # Test receiving with recvmsg[_into]() when message is sent
+ # using sendmsg().
+ self.serv_sock.settimeout(self.fail_timeout)
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, len(MSG))
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ @testRecvmsgFromSendmsg.client_skip
+ def _testRecvmsgFromSendmsg(self):
+ self.assertEqual(self.sendmsgToServer([MSG[:3], MSG[3:]]), len(MSG))
+
+
+class RecvmsgGenericStreamTests(RecvmsgGenericTests):
+ # Tests which require a stream socket and can use either recvmsg()
+ # or recvmsg_into().
+
+ def testRecvmsgEOF(self):
+ # Receive end-of-stream indicator (b"", peer socket closed).
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024)
+ self.assertEqual(msg, b"")
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=None) # Might not have end-of-record marker
+
+ def _testRecvmsgEOF(self):
+ self.cli_sock.close()
+
+ def testRecvmsgOverflow(self):
+ # Receive a message in more than one chunk.
+ seg1, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG) - 3)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=False)
+
+ seg2, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ msg = seg1 + seg2
+ self.assertEqual(msg, MSG)
+
+ def _testRecvmsgOverflow(self):
+ self.sendToServer(MSG)
+
+
+class RecvmsgTests(RecvmsgGenericTests):
+ # Tests for recvmsg() which can use any socket type.
+
+ def testRecvmsgBadArgs(self):
+ # Check that recvmsg() rejects invalid arguments.
+ self.assertRaises(TypeError, self.serv_sock.recvmsg)
+ self.assertRaises(ValueError, self.serv_sock.recvmsg,
+ -1, 0, 0)
+ self.assertRaises(ValueError, self.serv_sock.recvmsg,
+ len(MSG), -1, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg,
+ [bytearray(10)], 0, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg,
+ object(), 0, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg,
+ len(MSG), object(), 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg,
+ len(MSG), 0, object())
+
+ msg, ancdata, flags, addr = self.serv_sock.recvmsg(len(MSG), 0, 0)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgBadArgs(self):
+ self.sendToServer(MSG)
+
+
+class RecvmsgIntoTests(RecvmsgIntoMixin, RecvmsgGenericTests):
+ # Tests for recvmsg_into() which can use any socket type.
+
+ def testRecvmsgIntoBadArgs(self):
+ # Check that recvmsg_into() rejects invalid arguments.
+ buf = bytearray(len(MSG))
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into,
+ len(MSG), 0, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into,
+ buf, 0, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into,
+ [object()], 0, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into,
+ [b"I'm not writable"], 0, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into,
+ [buf, object()], 0, 0)
+ self.assertRaises(ValueError, self.serv_sock.recvmsg_into,
+ [buf], -1, 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into,
+ [buf], object(), 0)
+ self.assertRaises(TypeError, self.serv_sock.recvmsg_into,
+ [buf], 0, object())
+
+ nbytes, ancdata, flags, addr = self.serv_sock.recvmsg_into([buf], 0, 0)
+ self.assertEqual(nbytes, len(MSG))
+ self.assertEqual(buf, bytearray(MSG))
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgIntoBadArgs(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgIntoGenerator(self):
+ # Receive into buffer obtained from a generator (not a sequence).
+ buf = bytearray(len(MSG))
+ nbytes, ancdata, flags, addr = self.serv_sock.recvmsg_into(
+ (o for o in [buf]))
+ self.assertEqual(nbytes, len(MSG))
+ self.assertEqual(buf, bytearray(MSG))
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgIntoGenerator(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgIntoArray(self):
+ # Receive into an array rather than the usual bytearray.
+ buf = array.array("B", [0] * len(MSG))
+ nbytes, ancdata, flags, addr = self.serv_sock.recvmsg_into([buf])
+ self.assertEqual(nbytes, len(MSG))
+ self.assertEqual(buf.tobytes(), MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgIntoArray(self):
+ self.sendToServer(MSG)
+
+ def testRecvmsgIntoScatter(self):
+ # Receive into multiple buffers (scatter write).
+ b1 = bytearray(b"----")
+ b2 = bytearray(b"0123456789")
+ b3 = bytearray(b"--------------")
+ nbytes, ancdata, flags, addr = self.serv_sock.recvmsg_into(
+ [b1, memoryview(b2)[2:9], b3])
+ self.assertEqual(nbytes, len(b"Mary had a little lamb"))
+ self.assertEqual(b1, bytearray(b"Mary"))
+ self.assertEqual(b2, bytearray(b"01 had a 9"))
+ self.assertEqual(b3, bytearray(b"little lamb---"))
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True)
+
+ def _testRecvmsgIntoScatter(self):
+ self.sendToServer(b"Mary had a little lamb")
+
+
+class CmsgMacroTests(unittest.TestCase):
+ # Test the functions CMSG_LEN() and CMSG_SPACE(). Tests
+ # assumptions used by sendmsg() and recvmsg[_into](), which share
+ # code with these functions.
+
+ # Match the definition in socketmodule.c
+ socklen_t_limit = min(0x7fffffff, _testcapi.INT_MAX)
+
+ @requireAttrs(socket, "CMSG_LEN")
+ def testCMSG_LEN(self):
+ # Test CMSG_LEN() with various valid and invalid values,
+ # checking the assumptions used by recvmsg() and sendmsg().
+ toobig = self.socklen_t_limit - socket.CMSG_LEN(0) + 1
+ values = list(range(257)) + list(range(toobig - 257, toobig))
+
+ # struct cmsghdr has at least three members, two of which are ints
+ self.assertGreater(socket.CMSG_LEN(0), array.array("i").itemsize * 2)
+ for n in values:
+ ret = socket.CMSG_LEN(n)
+ # This is how recvmsg() calculates the data size
+ self.assertEqual(ret - socket.CMSG_LEN(0), n)
+ self.assertLessEqual(ret, self.socklen_t_limit)
+
+ self.assertRaises(OverflowError, socket.CMSG_LEN, -1)
+ # sendmsg() shares code with these functions, and requires
+ # that it reject values over the limit.
+ self.assertRaises(OverflowError, socket.CMSG_LEN, toobig)
+ self.assertRaises(OverflowError, socket.CMSG_LEN, sys.maxsize)
+
+ @requireAttrs(socket, "CMSG_SPACE")
+ def testCMSG_SPACE(self):
+ # Test CMSG_SPACE() with various valid and invalid values,
+ # checking the assumptions used by sendmsg().
+ toobig = self.socklen_t_limit - socket.CMSG_SPACE(1) + 1
+ values = list(range(257)) + list(range(toobig - 257, toobig))
+
+ last = socket.CMSG_SPACE(0)
+ # struct cmsghdr has at least three members, two of which are ints
+ self.assertGreater(last, array.array("i").itemsize * 2)
+ for n in values:
+ ret = socket.CMSG_SPACE(n)
+ self.assertGreaterEqual(ret, last)
+ self.assertGreaterEqual(ret, socket.CMSG_LEN(n))
+ self.assertGreaterEqual(ret, n + socket.CMSG_LEN(0))
+ self.assertLessEqual(ret, self.socklen_t_limit)
+ last = ret
+
+ self.assertRaises(OverflowError, socket.CMSG_SPACE, -1)
+ # sendmsg() shares code with these functions, and requires
+ # that it reject values over the limit.
+ self.assertRaises(OverflowError, socket.CMSG_SPACE, toobig)
+ self.assertRaises(OverflowError, socket.CMSG_SPACE, sys.maxsize)
+
+
+class SCMRightsTest(SendrecvmsgServerTimeoutBase):
+ # Tests for file descriptor passing on Unix-domain sockets.
+
+ # Invalid file descriptor value that's unlikely to evaluate to a
+ # real FD even if one of its bytes is replaced with a different
+ # value (which shouldn't actually happen).
+ badfd = -0x5555
+
+ def newFDs(self, n):
+ # Return a list of n file descriptors for newly-created files
+ # containing their list indices as ASCII numbers.
+ fds = []
+ for i in range(n):
+ fd, path = tempfile.mkstemp()
+ self.addCleanup(os.unlink, path)
+ self.addCleanup(os.close, fd)
+ os.write(fd, str(i).encode())
+ fds.append(fd)
+ return fds
+
+ def checkFDs(self, fds):
+ # Check that the file descriptors in the given list contain
+ # their correct list indices as ASCII numbers.
+ for n, fd in enumerate(fds):
+ os.lseek(fd, 0, os.SEEK_SET)
+ self.assertEqual(os.read(fd, 1024), str(n).encode())
+
+ def registerRecvmsgResult(self, result):
+ self.addCleanup(self.closeRecvmsgFDs, result)
+
+ def closeRecvmsgFDs(self, recvmsg_result):
+ # Close all file descriptors specified in the ancillary data
+ # of the given return value from recvmsg() or recvmsg_into().
+ for cmsg_level, cmsg_type, cmsg_data in recvmsg_result[1]:
+ if (cmsg_level == socket.SOL_SOCKET and
+ cmsg_type == socket.SCM_RIGHTS):
+ fds = array.array("i")
+ fds.frombytes(cmsg_data[:
+ len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+ for fd in fds:
+ os.close(fd)
+
+ def createAndSendFDs(self, n):
+ # Send n new file descriptors created by newFDs() to the
+ # server, with the constant MSG as the non-ancillary data.
+ self.assertEqual(
+ self.sendmsgToServer([MSG],
+ [(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", self.newFDs(n)))]),
+ len(MSG))
+
+ def checkRecvmsgFDs(self, numfds, result, maxcmsgs=1, ignoreflags=0):
+ # Check that constant MSG was received with numfds file
+ # descriptors in a maximum of maxcmsgs control messages (which
+ # must contain only complete integers). By default, check
+ # that MSG_CTRUNC is unset, but ignore any flags in
+ # ignoreflags.
+ msg, ancdata, flags, addr = result
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, checkunset=socket.MSG_CTRUNC,
+ ignore=ignoreflags)
+
+ self.assertIsInstance(ancdata, list)
+ self.assertLessEqual(len(ancdata), maxcmsgs)
+ fds = array.array("i")
+ for item in ancdata:
+ self.assertIsInstance(item, tuple)
+ cmsg_level, cmsg_type, cmsg_data = item
+ self.assertEqual(cmsg_level, socket.SOL_SOCKET)
+ self.assertEqual(cmsg_type, socket.SCM_RIGHTS)
+ self.assertIsInstance(cmsg_data, bytes)
+ self.assertEqual(len(cmsg_data) % SIZEOF_INT, 0)
+ fds.frombytes(cmsg_data)
+
+ self.assertEqual(len(fds), numfds)
+ self.checkFDs(fds)
+
+ def testFDPassSimple(self):
+ # Pass a single FD (array read from bytes object).
+ self.checkRecvmsgFDs(1, self.doRecvmsg(self.serv_sock,
+ len(MSG), 10240))
+
+ def _testFDPassSimple(self):
+ self.assertEqual(
+ self.sendmsgToServer(
+ [MSG],
+ [(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", self.newFDs(1)).tobytes())]),
+ len(MSG))
+
+ def testMultipleFDPass(self):
+ # Pass multiple FDs in a single array.
+ self.checkRecvmsgFDs(4, self.doRecvmsg(self.serv_sock,
+ len(MSG), 10240))
+
+ def _testMultipleFDPass(self):
+ self.createAndSendFDs(4)
+
+ @requireAttrs(socket, "CMSG_SPACE")
+ def testFDPassCMSG_SPACE(self):
+ # Test using CMSG_SPACE() to calculate ancillary buffer size.
+ self.checkRecvmsgFDs(
+ 4, self.doRecvmsg(self.serv_sock, len(MSG),
+ socket.CMSG_SPACE(4 * SIZEOF_INT)))
+
+ @testFDPassCMSG_SPACE.client_skip
+ def _testFDPassCMSG_SPACE(self):
+ self.createAndSendFDs(4)
+
+ def testFDPassCMSG_LEN(self):
+ # Test using CMSG_LEN() to calculate ancillary buffer size.
+ self.checkRecvmsgFDs(1,
+ self.doRecvmsg(self.serv_sock, len(MSG),
+ socket.CMSG_LEN(4 * SIZEOF_INT)),
+ # RFC 3542 says implementations may set
+ # MSG_CTRUNC if there isn't enough space
+ # for trailing padding.
+ ignoreflags=socket.MSG_CTRUNC)
+
+ def _testFDPassCMSG_LEN(self):
+ self.createAndSendFDs(1)
+
+ # Issue #12958: The following test has problems on Mac OS X
+ @support.anticipate_failure(sys.platform == "darwin")
+ @requireAttrs(socket, "CMSG_SPACE")
+ def testFDPassSeparate(self):
+ # Pass two FDs in two separate arrays. Arrays may be combined
+ # into a single control message by the OS.
+ self.checkRecvmsgFDs(2,
+ self.doRecvmsg(self.serv_sock, len(MSG), 10240),
+ maxcmsgs=2)
+
+ @testFDPassSeparate.client_skip
+ @support.anticipate_failure(sys.platform == "darwin")
+ def _testFDPassSeparate(self):
+ fd0, fd1 = self.newFDs(2)
+ self.assertEqual(
+ self.sendmsgToServer([MSG], [(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", [fd0])),
+ (socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", [fd1]))]),
+ len(MSG))
+
+ # Issue #12958: The following test has problems on Mac OS X
+ @support.anticipate_failure(sys.platform == "darwin")
+ @requireAttrs(socket, "CMSG_SPACE")
+ def testFDPassSeparateMinSpace(self):
+ # Pass two FDs in two separate arrays, receiving them into the
+ # minimum space for two arrays.
+ self.checkRecvmsgFDs(2,
+ self.doRecvmsg(self.serv_sock, len(MSG),
+ socket.CMSG_SPACE(SIZEOF_INT) +
+ socket.CMSG_LEN(SIZEOF_INT)),
+ maxcmsgs=2, ignoreflags=socket.MSG_CTRUNC)
+
+ @testFDPassSeparateMinSpace.client_skip
+ @support.anticipate_failure(sys.platform == "darwin")
+ def _testFDPassSeparateMinSpace(self):
+ fd0, fd1 = self.newFDs(2)
+ self.assertEqual(
+ self.sendmsgToServer([MSG], [(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", [fd0])),
+ (socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", [fd1]))]),
+ len(MSG))
+
+ def sendAncillaryIfPossible(self, msg, ancdata):
+ # Try to send msg and ancdata to server, but if the system
+ # call fails, just send msg with no ancillary data.
+ try:
+ nbytes = self.sendmsgToServer([msg], ancdata)
+ except socket.error as e:
+ # Check that it was the system call that failed
+ self.assertIsInstance(e.errno, int)
+ nbytes = self.sendmsgToServer([msg])
+ self.assertEqual(nbytes, len(msg))
+
+ def testFDPassEmpty(self):
+ # Try to pass an empty FD array. Can receive either no array
+ # or an empty array.
+ self.checkRecvmsgFDs(0, self.doRecvmsg(self.serv_sock,
+ len(MSG), 10240),
+ ignoreflags=socket.MSG_CTRUNC)
+
+ def _testFDPassEmpty(self):
+ self.sendAncillaryIfPossible(MSG, [(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ b"")])
+
+ def testFDPassPartialInt(self):
+ # Try to pass a truncated FD array.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), 10240)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, ignore=socket.MSG_CTRUNC)
+ self.assertLessEqual(len(ancdata), 1)
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
+ self.assertEqual(cmsg_level, socket.SOL_SOCKET)
+ self.assertEqual(cmsg_type, socket.SCM_RIGHTS)
+ self.assertLess(len(cmsg_data), SIZEOF_INT)
+
+ def _testFDPassPartialInt(self):
+ self.sendAncillaryIfPossible(
+ MSG,
+ [(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", [self.badfd]).tobytes()[:-1])])
+
+ @requireAttrs(socket, "CMSG_SPACE")
+ def testFDPassPartialIntInMiddle(self):
+ # Try to pass two FD arrays, the first of which is truncated.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), 10240)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, ignore=socket.MSG_CTRUNC)
+ self.assertLessEqual(len(ancdata), 2)
+ fds = array.array("i")
+ # Arrays may have been combined in a single control message
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
+ self.assertEqual(cmsg_level, socket.SOL_SOCKET)
+ self.assertEqual(cmsg_type, socket.SCM_RIGHTS)
+ fds.frombytes(cmsg_data[:
+ len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+ self.assertLessEqual(len(fds), 2)
+ self.checkFDs(fds)
+
+ @testFDPassPartialIntInMiddle.client_skip
+ def _testFDPassPartialIntInMiddle(self):
+ fd0, fd1 = self.newFDs(2)
+ self.sendAncillaryIfPossible(
+ MSG,
+ [(socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", [fd0, self.badfd]).tobytes()[:-1]),
+ (socket.SOL_SOCKET,
+ socket.SCM_RIGHTS,
+ array.array("i", [fd1]))])
+
+ def checkTruncatedHeader(self, result, ignoreflags=0):
+ # Check that no ancillary data items are returned when data is
+ # truncated inside the cmsghdr structure.
+ msg, ancdata, flags, addr = result
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True, checkset=socket.MSG_CTRUNC,
+ ignore=ignoreflags)
+
+ def testCmsgTruncNoBufSize(self):
+ # Check that no ancillary data is received when no buffer size
+ # is specified.
+ self.checkTruncatedHeader(self.doRecvmsg(self.serv_sock, len(MSG)),
+ # BSD seems to set MSG_CTRUNC only
+ # if an item has been partially
+ # received.
+ ignoreflags=socket.MSG_CTRUNC)
+
+ def _testCmsgTruncNoBufSize(self):
+ self.createAndSendFDs(1)
+
+ def testCmsgTrunc0(self):
+ # Check that no ancillary data is received when buffer size is 0.
+ self.checkTruncatedHeader(self.doRecvmsg(self.serv_sock, len(MSG), 0),
+ ignoreflags=socket.MSG_CTRUNC)
+
+ def _testCmsgTrunc0(self):
+ self.createAndSendFDs(1)
+
+ # Check that no ancillary data is returned for various non-zero
+ # (but still too small) buffer sizes.
+
+ def testCmsgTrunc1(self):
+ self.checkTruncatedHeader(self.doRecvmsg(self.serv_sock, len(MSG), 1))
+
+ def _testCmsgTrunc1(self):
+ self.createAndSendFDs(1)
+
+ def testCmsgTrunc2Int(self):
+ # The cmsghdr structure has at least three members, two of
+ # which are ints, so we still shouldn't see any ancillary
+ # data.
+ self.checkTruncatedHeader(self.doRecvmsg(self.serv_sock, len(MSG),
+ SIZEOF_INT * 2))
+
+ def _testCmsgTrunc2Int(self):
+ self.createAndSendFDs(1)
+
+ def testCmsgTruncLen0Minus1(self):
+ self.checkTruncatedHeader(self.doRecvmsg(self.serv_sock, len(MSG),
+ socket.CMSG_LEN(0) - 1))
+
+ def _testCmsgTruncLen0Minus1(self):
+ self.createAndSendFDs(1)
+
+ # The following tests try to truncate the control message in the
+ # middle of the FD array.
+
+ def checkTruncatedArray(self, ancbuf, maxdata, mindata=0):
+ # Check that file descriptor data is truncated to between
+ # mindata and maxdata bytes when received with buffer size
+ # ancbuf, and that any complete file descriptor numbers are
+ # valid.
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), ancbuf)
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, checkset=socket.MSG_CTRUNC)
+
+ if mindata == 0 and ancdata == []:
+ return
+ self.assertEqual(len(ancdata), 1)
+ cmsg_level, cmsg_type, cmsg_data = ancdata[0]
+ self.assertEqual(cmsg_level, socket.SOL_SOCKET)
+ self.assertEqual(cmsg_type, socket.SCM_RIGHTS)
+ self.assertGreaterEqual(len(cmsg_data), mindata)
+ self.assertLessEqual(len(cmsg_data), maxdata)
+ fds = array.array("i")
+ fds.frombytes(cmsg_data[:
+ len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+ self.checkFDs(fds)
+
+ def testCmsgTruncLen0(self):
+ self.checkTruncatedArray(ancbuf=socket.CMSG_LEN(0), maxdata=0)
+
+ def _testCmsgTruncLen0(self):
+ self.createAndSendFDs(1)
+
+ def testCmsgTruncLen0Plus1(self):
+ self.checkTruncatedArray(ancbuf=socket.CMSG_LEN(0) + 1, maxdata=1)
+
+ def _testCmsgTruncLen0Plus1(self):
+ self.createAndSendFDs(2)
+
+ def testCmsgTruncLen1(self):
+ self.checkTruncatedArray(ancbuf=socket.CMSG_LEN(SIZEOF_INT),
+ maxdata=SIZEOF_INT)
+
+ def _testCmsgTruncLen1(self):
+ self.createAndSendFDs(2)
+
+ def testCmsgTruncLen2Minus1(self):
+ self.checkTruncatedArray(ancbuf=socket.CMSG_LEN(2 * SIZEOF_INT) - 1,
+ maxdata=(2 * SIZEOF_INT) - 1)
+
+ def _testCmsgTruncLen2Minus1(self):
+ self.createAndSendFDs(2)
+
+
+class RFC3542AncillaryTest(SendrecvmsgServerTimeoutBase):
+ # Test sendmsg() and recvmsg[_into]() using the ancillary data
+ # features of the RFC 3542 Advanced Sockets API for IPv6.
+ # Currently we can only handle certain data items (e.g. traffic
+ # class, hop limit, MTU discovery and fragmentation settings)
+ # without resorting to unportable means such as the struct module,
+ # but the tests here are aimed at testing the ancillary data
+ # handling in sendmsg() and recvmsg() rather than the IPv6 API
+ # itself.
+
+ # Test value to use when setting hop limit of packet
+ hop_limit = 2
+
+ # Test value to use when setting traffic class of packet.
+ # -1 means "use kernel default".
+ traffic_class = -1
+
+ def ancillaryMapping(self, ancdata):
+ # Given ancillary data list ancdata, return a mapping from
+ # pairs (cmsg_level, cmsg_type) to corresponding cmsg_data.
+ # Check that no (level, type) pair appears more than once.
+ d = {}
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
+ self.assertNotIn((cmsg_level, cmsg_type), d)
+ d[(cmsg_level, cmsg_type)] = cmsg_data
+ return d
+
+ def checkHopLimit(self, ancbufsize, maxhop=255, ignoreflags=0):
+ # Receive hop limit into ancbufsize bytes of ancillary data
+ # space. Check that data is MSG, ancillary data is not
+ # truncated (but ignore any flags in ignoreflags), and hop
+ # limit is between 0 and maxhop inclusive.
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVHOPLIMIT, 1)
+ self.misc_event.set()
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), ancbufsize)
+
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, checkunset=socket.MSG_CTRUNC,
+ ignore=ignoreflags)
+
+ self.assertEqual(len(ancdata), 1)
+ self.assertIsInstance(ancdata[0], tuple)
+ cmsg_level, cmsg_type, cmsg_data = ancdata[0]
+ self.assertEqual(cmsg_level, socket.IPPROTO_IPV6)
+ self.assertEqual(cmsg_type, socket.IPV6_HOPLIMIT)
+ self.assertIsInstance(cmsg_data, bytes)
+ self.assertEqual(len(cmsg_data), SIZEOF_INT)
+ a = array.array("i")
+ a.frombytes(cmsg_data)
+ self.assertGreaterEqual(a[0], 0)
+ self.assertLessEqual(a[0], maxhop)
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testRecvHopLimit(self):
+ # Test receiving the packet hop limit as ancillary data.
+ self.checkHopLimit(ancbufsize=10240)
+
+ @testRecvHopLimit.client_skip
+ def _testRecvHopLimit(self):
+ # Need to wait until server has asked to receive ancillary
+ # data, as implementations are not required to buffer it
+ # otherwise.
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testRecvHopLimitCMSG_SPACE(self):
+ # Test receiving hop limit, using CMSG_SPACE to calculate buffer size.
+ self.checkHopLimit(ancbufsize=socket.CMSG_SPACE(SIZEOF_INT))
+
+ @testRecvHopLimitCMSG_SPACE.client_skip
+ def _testRecvHopLimitCMSG_SPACE(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ # Could test receiving into buffer sized using CMSG_LEN, but RFC
+ # 3542 says portable applications must provide space for trailing
+ # padding. Implementations may set MSG_CTRUNC if there isn't
+ # enough space for the padding.
+
+ @requireAttrs(socket.socket, "sendmsg")
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testSetHopLimit(self):
+ # Test setting hop limit on outgoing packet and receiving it
+ # at the other end.
+ self.checkHopLimit(ancbufsize=10240, maxhop=self.hop_limit)
+
+ @testSetHopLimit.client_skip
+ def _testSetHopLimit(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.assertEqual(
+ self.sendmsgToServer([MSG],
+ [(socket.IPPROTO_IPV6, socket.IPV6_HOPLIMIT,
+ array.array("i", [self.hop_limit]))]),
+ len(MSG))
+
+ def checkTrafficClassAndHopLimit(self, ancbufsize, maxhop=255,
+ ignoreflags=0):
+ # Receive traffic class and hop limit into ancbufsize bytes of
+ # ancillary data space. Check that data is MSG, ancillary
+ # data is not truncated (but ignore any flags in ignoreflags),
+ # and traffic class and hop limit are in range (hop limit no
+ # more than maxhop).
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVHOPLIMIT, 1)
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVTCLASS, 1)
+ self.misc_event.set()
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), ancbufsize)
+
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, checkunset=socket.MSG_CTRUNC,
+ ignore=ignoreflags)
+ self.assertEqual(len(ancdata), 2)
+ ancmap = self.ancillaryMapping(ancdata)
+
+ tcdata = ancmap[(socket.IPPROTO_IPV6, socket.IPV6_TCLASS)]
+ self.assertEqual(len(tcdata), SIZEOF_INT)
+ a = array.array("i")
+ a.frombytes(tcdata)
+ self.assertGreaterEqual(a[0], 0)
+ self.assertLessEqual(a[0], 255)
+
+ hldata = ancmap[(socket.IPPROTO_IPV6, socket.IPV6_HOPLIMIT)]
+ self.assertEqual(len(hldata), SIZEOF_INT)
+ a = array.array("i")
+ a.frombytes(hldata)
+ self.assertGreaterEqual(a[0], 0)
+ self.assertLessEqual(a[0], maxhop)
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testRecvTrafficClassAndHopLimit(self):
+ # Test receiving traffic class and hop limit as ancillary data.
+ self.checkTrafficClassAndHopLimit(ancbufsize=10240)
+
+ @testRecvTrafficClassAndHopLimit.client_skip
+ def _testRecvTrafficClassAndHopLimit(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testRecvTrafficClassAndHopLimitCMSG_SPACE(self):
+ # Test receiving traffic class and hop limit, using
+ # CMSG_SPACE() to calculate buffer size.
+ self.checkTrafficClassAndHopLimit(
+ ancbufsize=socket.CMSG_SPACE(SIZEOF_INT) * 2)
+
+ @testRecvTrafficClassAndHopLimitCMSG_SPACE.client_skip
+ def _testRecvTrafficClassAndHopLimitCMSG_SPACE(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket.socket, "sendmsg")
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testSetTrafficClassAndHopLimit(self):
+ # Test setting traffic class and hop limit on outgoing packet,
+ # and receiving them at the other end.
+ self.checkTrafficClassAndHopLimit(ancbufsize=10240,
+ maxhop=self.hop_limit)
+
+ @testSetTrafficClassAndHopLimit.client_skip
+ def _testSetTrafficClassAndHopLimit(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.assertEqual(
+ self.sendmsgToServer([MSG],
+ [(socket.IPPROTO_IPV6, socket.IPV6_TCLASS,
+ array.array("i", [self.traffic_class])),
+ (socket.IPPROTO_IPV6, socket.IPV6_HOPLIMIT,
+ array.array("i", [self.hop_limit]))]),
+ len(MSG))
+
+ @requireAttrs(socket.socket, "sendmsg")
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testOddCmsgSize(self):
+ # Try to send ancillary data with first item one byte too
+ # long. Fall back to sending with correct size if this fails,
+ # and check that second item was handled correctly.
+ self.checkTrafficClassAndHopLimit(ancbufsize=10240,
+ maxhop=self.hop_limit)
+
+ @testOddCmsgSize.client_skip
+ def _testOddCmsgSize(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ try:
+ nbytes = self.sendmsgToServer(
+ [MSG],
+ [(socket.IPPROTO_IPV6, socket.IPV6_TCLASS,
+ array.array("i", [self.traffic_class]).tobytes() + b"\x00"),
+ (socket.IPPROTO_IPV6, socket.IPV6_HOPLIMIT,
+ array.array("i", [self.hop_limit]))])
+ except socket.error as e:
+ self.assertIsInstance(e.errno, int)
+ nbytes = self.sendmsgToServer(
+ [MSG],
+ [(socket.IPPROTO_IPV6, socket.IPV6_TCLASS,
+ array.array("i", [self.traffic_class])),
+ (socket.IPPROTO_IPV6, socket.IPV6_HOPLIMIT,
+ array.array("i", [self.hop_limit]))])
+ self.assertEqual(nbytes, len(MSG))
+
+ # Tests for proper handling of truncated ancillary data
+
+ def checkHopLimitTruncatedHeader(self, ancbufsize, ignoreflags=0):
+ # Receive hop limit into ancbufsize bytes of ancillary data
+ # space, which should be too small to contain the ancillary
+ # data header (if ancbufsize is None, pass no second argument
+ # to recvmsg()). Check that data is MSG, MSG_CTRUNC is set
+ # (unless included in ignoreflags), and no ancillary data is
+ # returned.
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVHOPLIMIT, 1)
+ self.misc_event.set()
+ args = () if ancbufsize is None else (ancbufsize,)
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), *args)
+
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.assertEqual(ancdata, [])
+ self.checkFlags(flags, eor=True, checkset=socket.MSG_CTRUNC,
+ ignore=ignoreflags)
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testCmsgTruncNoBufSize(self):
+ # Check that no ancillary data is received when no ancillary
+ # buffer size is provided.
+ self.checkHopLimitTruncatedHeader(ancbufsize=None,
+ # BSD seems to set
+ # MSG_CTRUNC only if an item
+ # has been partially
+ # received.
+ ignoreflags=socket.MSG_CTRUNC)
+
+ @testCmsgTruncNoBufSize.client_skip
+ def _testCmsgTruncNoBufSize(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testSingleCmsgTrunc0(self):
+ # Check that no ancillary data is received when ancillary
+ # buffer size is zero.
+ self.checkHopLimitTruncatedHeader(ancbufsize=0,
+ ignoreflags=socket.MSG_CTRUNC)
+
+ @testSingleCmsgTrunc0.client_skip
+ def _testSingleCmsgTrunc0(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ # Check that no ancillary data is returned for various non-zero
+ # (but still too small) buffer sizes.
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testSingleCmsgTrunc1(self):
+ self.checkHopLimitTruncatedHeader(ancbufsize=1)
+
+ @testSingleCmsgTrunc1.client_skip
+ def _testSingleCmsgTrunc1(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testSingleCmsgTrunc2Int(self):
+ self.checkHopLimitTruncatedHeader(ancbufsize=2 * SIZEOF_INT)
+
+ @testSingleCmsgTrunc2Int.client_skip
+ def _testSingleCmsgTrunc2Int(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testSingleCmsgTruncLen0Minus1(self):
+ self.checkHopLimitTruncatedHeader(ancbufsize=socket.CMSG_LEN(0) - 1)
+
+ @testSingleCmsgTruncLen0Minus1.client_skip
+ def _testSingleCmsgTruncLen0Minus1(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT")
+ def testSingleCmsgTruncInData(self):
+ # Test truncation of a control message inside its associated
+ # data. The message may be returned with its data truncated,
+ # or not returned at all.
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVHOPLIMIT, 1)
+ self.misc_event.set()
+ msg, ancdata, flags, addr = self.doRecvmsg(
+ self.serv_sock, len(MSG), socket.CMSG_LEN(SIZEOF_INT) - 1)
+
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, checkset=socket.MSG_CTRUNC)
+
+ self.assertLessEqual(len(ancdata), 1)
+ if ancdata:
+ cmsg_level, cmsg_type, cmsg_data = ancdata[0]
+ self.assertEqual(cmsg_level, socket.IPPROTO_IPV6)
+ self.assertEqual(cmsg_type, socket.IPV6_HOPLIMIT)
+ self.assertLess(len(cmsg_data), SIZEOF_INT)
+
+ @testSingleCmsgTruncInData.client_skip
+ def _testSingleCmsgTruncInData(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ def checkTruncatedSecondHeader(self, ancbufsize, ignoreflags=0):
+ # Receive traffic class and hop limit into ancbufsize bytes of
+ # ancillary data space, which should be large enough to
+ # contain the first item, but too small to contain the header
+ # of the second. Check that data is MSG, MSG_CTRUNC is set
+ # (unless included in ignoreflags), and only one ancillary
+ # data item is returned.
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVHOPLIMIT, 1)
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVTCLASS, 1)
+ self.misc_event.set()
+ msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock,
+ len(MSG), ancbufsize)
+
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, checkset=socket.MSG_CTRUNC,
+ ignore=ignoreflags)
+
+ self.assertEqual(len(ancdata), 1)
+ cmsg_level, cmsg_type, cmsg_data = ancdata[0]
+ self.assertEqual(cmsg_level, socket.IPPROTO_IPV6)
+ self.assertIn(cmsg_type, {socket.IPV6_TCLASS, socket.IPV6_HOPLIMIT})
+ self.assertEqual(len(cmsg_data), SIZEOF_INT)
+ a = array.array("i")
+ a.frombytes(cmsg_data)
+ self.assertGreaterEqual(a[0], 0)
+ self.assertLessEqual(a[0], 255)
+
+ # Try the above test with various buffer sizes.
+
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testSecondCmsgTrunc0(self):
+ self.checkTruncatedSecondHeader(socket.CMSG_SPACE(SIZEOF_INT),
+ ignoreflags=socket.MSG_CTRUNC)
+
+ @testSecondCmsgTrunc0.client_skip
+ def _testSecondCmsgTrunc0(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testSecondCmsgTrunc1(self):
+ self.checkTruncatedSecondHeader(socket.CMSG_SPACE(SIZEOF_INT) + 1)
+
+ @testSecondCmsgTrunc1.client_skip
+ def _testSecondCmsgTrunc1(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testSecondCmsgTrunc2Int(self):
+ self.checkTruncatedSecondHeader(socket.CMSG_SPACE(SIZEOF_INT) +
+ 2 * SIZEOF_INT)
+
+ @testSecondCmsgTrunc2Int.client_skip
+ def _testSecondCmsgTrunc2Int(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testSecondCmsgTruncLen0Minus1(self):
+ self.checkTruncatedSecondHeader(socket.CMSG_SPACE(SIZEOF_INT) +
+ socket.CMSG_LEN(0) - 1)
+
+ @testSecondCmsgTruncLen0Minus1.client_skip
+ def _testSecondCmsgTruncLen0Minus1(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+ @requireAttrs(socket, "CMSG_SPACE", "IPV6_RECVHOPLIMIT", "IPV6_HOPLIMIT",
+ "IPV6_RECVTCLASS", "IPV6_TCLASS")
+ def testSecomdCmsgTruncInData(self):
+ # Test truncation of the second of two control messages inside
+ # its associated data.
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVHOPLIMIT, 1)
+ self.serv_sock.setsockopt(socket.IPPROTO_IPV6,
+ socket.IPV6_RECVTCLASS, 1)
+ self.misc_event.set()
+ msg, ancdata, flags, addr = self.doRecvmsg(
+ self.serv_sock, len(MSG),
+ socket.CMSG_SPACE(SIZEOF_INT) + socket.CMSG_LEN(SIZEOF_INT) - 1)
+
+ self.assertEqual(msg, MSG)
+ self.checkRecvmsgAddress(addr, self.cli_addr)
+ self.checkFlags(flags, eor=True, checkset=socket.MSG_CTRUNC)
+
+ cmsg_types = {socket.IPV6_TCLASS, socket.IPV6_HOPLIMIT}
+
+ cmsg_level, cmsg_type, cmsg_data = ancdata.pop(0)
+ self.assertEqual(cmsg_level, socket.IPPROTO_IPV6)
+ cmsg_types.remove(cmsg_type)
+ self.assertEqual(len(cmsg_data), SIZEOF_INT)
+ a = array.array("i")
+ a.frombytes(cmsg_data)
+ self.assertGreaterEqual(a[0], 0)
+ self.assertLessEqual(a[0], 255)
+
+ if ancdata:
+ cmsg_level, cmsg_type, cmsg_data = ancdata.pop(0)
+ self.assertEqual(cmsg_level, socket.IPPROTO_IPV6)
+ cmsg_types.remove(cmsg_type)
+ self.assertLess(len(cmsg_data), SIZEOF_INT)
+
+ self.assertEqual(ancdata, [])
+
+ @testSecomdCmsgTruncInData.client_skip
+ def _testSecomdCmsgTruncInData(self):
+ self.assertTrue(self.misc_event.wait(timeout=self.fail_timeout))
+ self.sendToServer(MSG)
+
+
+# Derive concrete test classes for different socket types.
+
+class SendrecvmsgUDPTestBase(SendrecvmsgDgramFlagsBase,
+ SendrecvmsgConnectionlessBase,
+ ThreadedSocketTestMixin, UDPTestBase):
+ pass
+
+@requireAttrs(socket.socket, "sendmsg")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class SendmsgUDPTest(SendmsgConnectionlessTests, SendrecvmsgUDPTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgUDPTest(RecvmsgTests, SendrecvmsgUDPTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg_into")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgIntoUDPTest(RecvmsgIntoTests, SendrecvmsgUDPTestBase):
+ pass
+
+
+class SendrecvmsgUDP6TestBase(SendrecvmsgDgramFlagsBase,
+ SendrecvmsgConnectionlessBase,
+ ThreadedSocketTestMixin, UDP6TestBase):
+ pass
+
+@requireAttrs(socket.socket, "sendmsg")
+@unittest.skipUnless(socket.has_ipv6, "Python not built with IPv6 support")
+@requireSocket("AF_INET6", "SOCK_DGRAM")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class SendmsgUDP6Test(SendmsgConnectionlessTests, SendrecvmsgUDP6TestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg")
+@unittest.skipUnless(socket.has_ipv6, "Python not built with IPv6 support")
+@requireSocket("AF_INET6", "SOCK_DGRAM")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgUDP6Test(RecvmsgTests, SendrecvmsgUDP6TestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg_into")
+@unittest.skipUnless(socket.has_ipv6, "Python not built with IPv6 support")
+@requireSocket("AF_INET6", "SOCK_DGRAM")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgIntoUDP6Test(RecvmsgIntoTests, SendrecvmsgUDP6TestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg")
+@unittest.skipUnless(socket.has_ipv6, "Python not built with IPv6 support")
+@requireAttrs(socket, "IPPROTO_IPV6")
+@requireSocket("AF_INET6", "SOCK_DGRAM")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgRFC3542AncillaryUDP6Test(RFC3542AncillaryTest,
+ SendrecvmsgUDP6TestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg_into")
+@unittest.skipUnless(socket.has_ipv6, "Python not built with IPv6 support")
+@requireAttrs(socket, "IPPROTO_IPV6")
+@requireSocket("AF_INET6", "SOCK_DGRAM")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgIntoRFC3542AncillaryUDP6Test(RecvmsgIntoMixin,
+ RFC3542AncillaryTest,
+ SendrecvmsgUDP6TestBase):
+ pass
+
+
+class SendrecvmsgTCPTestBase(SendrecvmsgConnectedBase,
+ ConnectedStreamTestMixin, TCPTestBase):
+ pass
+
+@requireAttrs(socket.socket, "sendmsg")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class SendmsgTCPTest(SendmsgStreamTests, SendrecvmsgTCPTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgTCPTest(RecvmsgTests, RecvmsgGenericStreamTests,
+ SendrecvmsgTCPTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg_into")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgIntoTCPTest(RecvmsgIntoTests, RecvmsgGenericStreamTests,
+ SendrecvmsgTCPTestBase):
+ pass
+
+
+class SendrecvmsgSCTPStreamTestBase(SendrecvmsgSCTPFlagsBase,
+ SendrecvmsgConnectedBase,
+ ConnectedStreamTestMixin, SCTPStreamBase):
+ pass
+
+@requireAttrs(socket.socket, "sendmsg")
+@requireSocket("AF_INET", "SOCK_STREAM", "IPPROTO_SCTP")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class SendmsgSCTPStreamTest(SendmsgStreamTests, SendrecvmsgSCTPStreamTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg")
+@requireSocket("AF_INET", "SOCK_STREAM", "IPPROTO_SCTP")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgSCTPStreamTest(RecvmsgTests, RecvmsgGenericStreamTests,
+ SendrecvmsgSCTPStreamTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg_into")
+@requireSocket("AF_INET", "SOCK_STREAM", "IPPROTO_SCTP")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgIntoSCTPStreamTest(RecvmsgIntoTests, RecvmsgGenericStreamTests,
+ SendrecvmsgSCTPStreamTestBase):
+ pass
+
+
+class SendrecvmsgUnixStreamTestBase(SendrecvmsgConnectedBase,
+ ConnectedStreamTestMixin, UnixStreamBase):
+ pass
+
+@requireAttrs(socket.socket, "sendmsg")
+@requireAttrs(socket, "AF_UNIX")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class SendmsgUnixStreamTest(SendmsgStreamTests, SendrecvmsgUnixStreamTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg")
+@requireAttrs(socket, "AF_UNIX")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgUnixStreamTest(RecvmsgTests, RecvmsgGenericStreamTests,
+ SendrecvmsgUnixStreamTestBase):
+ pass
+
+@requireAttrs(socket.socket, "recvmsg_into")
+@requireAttrs(socket, "AF_UNIX")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgIntoUnixStreamTest(RecvmsgIntoTests, RecvmsgGenericStreamTests,
+ SendrecvmsgUnixStreamTestBase):
+ pass
+
+@requireAttrs(socket.socket, "sendmsg", "recvmsg")
+@requireAttrs(socket, "AF_UNIX", "SOL_SOCKET", "SCM_RIGHTS")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgSCMRightsStreamTest(SCMRightsTest, SendrecvmsgUnixStreamTestBase):
+ pass
+
+@requireAttrs(socket.socket, "sendmsg", "recvmsg_into")
+@requireAttrs(socket, "AF_UNIX", "SOL_SOCKET", "SCM_RIGHTS")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class RecvmsgIntoSCMRightsStreamTest(RecvmsgIntoMixin, SCMRightsTest,
+ SendrecvmsgUnixStreamTestBase):
+ pass
+
+
+# Test interrupting the interruptible send/receive methods with a
+# signal when a timeout is set. These tests avoid having multiple
+# threads alive during the test so that the OS cannot deliver the
+# signal to the wrong one.
+
+class InterruptedTimeoutBase(unittest.TestCase):
+ # Base class for interrupted send/receive tests. Installs an
+ # empty handler for SIGALRM and removes it on teardown, along with
+ # any scheduled alarms.
+
+ def setUp(self):
+ super().setUp()
+ orig_alrm_handler = signal.signal(signal.SIGALRM,
+ lambda signum, frame: None)
+ self.addCleanup(signal.signal, signal.SIGALRM, orig_alrm_handler)
+ self.addCleanup(self.setAlarm, 0)
+
+ # Timeout for socket operations
+ timeout = 4.0
+
+ # Provide setAlarm() method to schedule delivery of SIGALRM after
+ # given number of seconds, or cancel it if zero, and an
+ # appropriate time value to use. Use setitimer() if available.
+ if hasattr(signal, "setitimer"):
+ alarm_time = 0.05
+
+ def setAlarm(self, seconds):
+ signal.setitimer(signal.ITIMER_REAL, seconds)
+ else:
+ # Old systems may deliver the alarm up to one second early
+ alarm_time = 2
+
+ def setAlarm(self, seconds):
+ signal.alarm(seconds)
+
+
+# Require siginterrupt() in order to ensure that system calls are
+# interrupted by default.
+@requireAttrs(signal, "siginterrupt")
+@unittest.skipUnless(hasattr(signal, "alarm") or hasattr(signal, "setitimer"),
+ "Don't have signal.alarm or signal.setitimer")
+class InterruptedRecvTimeoutTest(InterruptedTimeoutBase, UDPTestBase):
+ # Test interrupting the recv*() methods with signals when a
+ # timeout is set.
+
+ def setUp(self):
+ super().setUp()
+ self.serv.settimeout(self.timeout)
+
+ def checkInterruptedRecv(self, func, *args, **kwargs):
+ # Check that func(*args, **kwargs) raises socket.error with an
+ # errno of EINTR when interrupted by a signal.
+ self.setAlarm(self.alarm_time)
+ with self.assertRaises(socket.error) as cm:
+ func(*args, **kwargs)
+ self.assertNotIsInstance(cm.exception, socket.timeout)
+ self.assertEqual(cm.exception.errno, errno.EINTR)
+
+ def testInterruptedRecvTimeout(self):
+ self.checkInterruptedRecv(self.serv.recv, 1024)
+
+ def testInterruptedRecvIntoTimeout(self):
+ self.checkInterruptedRecv(self.serv.recv_into, bytearray(1024))
+
+ def testInterruptedRecvfromTimeout(self):
+ self.checkInterruptedRecv(self.serv.recvfrom, 1024)
+
+ def testInterruptedRecvfromIntoTimeout(self):
+ self.checkInterruptedRecv(self.serv.recvfrom_into, bytearray(1024))
+
+ @requireAttrs(socket.socket, "recvmsg")
+ def testInterruptedRecvmsgTimeout(self):
+ self.checkInterruptedRecv(self.serv.recvmsg, 1024)
+
+ @requireAttrs(socket.socket, "recvmsg_into")
+ def testInterruptedRecvmsgIntoTimeout(self):
+ self.checkInterruptedRecv(self.serv.recvmsg_into, [bytearray(1024)])
+
+
+# Require siginterrupt() in order to ensure that system calls are
+# interrupted by default.
+@requireAttrs(signal, "siginterrupt")
+@unittest.skipUnless(hasattr(signal, "alarm") or hasattr(signal, "setitimer"),
+ "Don't have signal.alarm or signal.setitimer")
+@unittest.skipUnless(thread, 'Threading required for this test.')
+class InterruptedSendTimeoutTest(InterruptedTimeoutBase,
+ ThreadSafeCleanupTestCase,
+ SocketListeningTestMixin, TCPTestBase):
+ # Test interrupting the interruptible send*() methods with signals
+ # when a timeout is set.
+
+ def setUp(self):
+ super().setUp()
+ self.serv_conn = self.newSocket()
+ self.addCleanup(self.serv_conn.close)
+ # Use a thread to complete the connection, but wait for it to
+ # terminate before running the test, so that there is only one
+ # thread to accept the signal.
+ cli_thread = threading.Thread(target=self.doConnect)
+ cli_thread.start()
+ self.cli_conn, addr = self.serv.accept()
+ self.addCleanup(self.cli_conn.close)
+ cli_thread.join()
+ self.serv_conn.settimeout(self.timeout)
+
+ def doConnect(self):
+ self.serv_conn.connect(self.serv_addr)
+
+ def checkInterruptedSend(self, func, *args, **kwargs):
+ # Check that func(*args, **kwargs), run in a loop, raises
+ # socket.error with an errno of EINTR when interrupted by a
+ # signal.
+ with self.assertRaises(socket.error) as cm:
+ while True:
+ self.setAlarm(self.alarm_time)
+ func(*args, **kwargs)
+ self.assertNotIsInstance(cm.exception, socket.timeout)
+ self.assertEqual(cm.exception.errno, errno.EINTR)
+
+ # Issue #12958: The following tests have problems on Mac OS X
+ @support.anticipate_failure(sys.platform == "darwin")
+ def testInterruptedSendTimeout(self):
+ self.checkInterruptedSend(self.serv_conn.send, b"a"*512)
+
+ @support.anticipate_failure(sys.platform == "darwin")
+ def testInterruptedSendtoTimeout(self):
+ # Passing an actual address here as Python's wrapper for
+ # sendto() doesn't allow passing a zero-length one; POSIX
+ # requires that the address is ignored since the socket is
+ # connection-mode, however.
+ self.checkInterruptedSend(self.serv_conn.sendto, b"a"*512,
+ self.serv_addr)
+
+ @support.anticipate_failure(sys.platform == "darwin")
+ @requireAttrs(socket.socket, "sendmsg")
+ def testInterruptedSendmsgTimeout(self):
+ self.checkInterruptedSend(self.serv_conn.sendmsg, [b"a"*512])
+
+
@unittest.skipUnless(thread, 'Threading required for this test.')
class TCPCloserTest(ThreadedTCPSocketTest):
@@ -1068,11 +3537,8 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest):
pass
if hasattr(socket, "SOCK_NONBLOCK"):
+ @support.requires_linux_version(2, 6, 28)
def testInitNonBlocking(self):
- v = linux_version()
- if v < (2, 6, 28):
- self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
- % ".".join(map(str, v)))
# reinit server socket
self.serv.close()
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM |
@@ -1174,7 +3640,7 @@ class FileObjectClassTestCase(SocketConnectedTest):
"""
bufsize = -1 # Use default buffer size
- encoding = 'utf8'
+ encoding = 'utf-8'
errors = 'strict'
newline = None
@@ -1355,7 +3821,7 @@ class FileObjectInterruptedTestCase(unittest.TestCase):
@staticmethod
def _raise_eintr():
- raise socket.error(errno.EINTR)
+ raise socket.error(errno.EINTR, "interrupted")
def _textiowrap_mock_socket(self, mock, buffering=-1):
raw = socket.SocketIO(mock, "r")
@@ -1395,7 +3861,7 @@ class FileObjectInterruptedTestCase(unittest.TestCase):
data = b''
else:
data = ''
- expecting = expecting.decode('utf8')
+ expecting = expecting.decode('utf-8')
while len(data) != len(expecting):
part = fo.read(size)
if not part:
@@ -1557,7 +4023,7 @@ class UnicodeReadFileObjectClassTestCase(FileObjectClassTestCase):
"""Tests for socket.makefile() in text mode (rather than binary)"""
read_mode = 'r'
- read_msg = MSG.decode('utf8')
+ read_msg = MSG.decode('utf-8')
write_mode = 'wb'
write_msg = MSG
newline = ''
@@ -1569,7 +4035,7 @@ class UnicodeWriteFileObjectClassTestCase(FileObjectClassTestCase):
read_mode = 'rb'
read_msg = MSG
write_mode = 'w'
- write_msg = MSG.decode('utf8')
+ write_msg = MSG.decode('utf-8')
newline = ''
@@ -1577,9 +4043,9 @@ class UnicodeReadWriteFileObjectClassTestCase(FileObjectClassTestCase):
"""Tests for socket.makefile() in text mode (rather than binary)"""
read_mode = 'r'
- read_msg = MSG.decode('utf8')
+ read_msg = MSG.decode('utf-8')
write_mode = 'w'
- write_msg = MSG.decode('utf8')
+ write_msg = MSG.decode('utf-8')
newline = ''
@@ -1851,6 +4317,78 @@ class TestLinuxAbstractNamespace(unittest.TestCase):
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
self.assertRaises(socket.error, s.bind, address)
+ def testStrName(self):
+ # Check that an abstract name can be passed as a string.
+ s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ try:
+ s.bind("\x00python\x00test\x00")
+ self.assertEqual(s.getsockname(), b"\x00python\x00test\x00")
+ finally:
+ s.close()
+
+class TestUnixDomain(unittest.TestCase):
+
+ def setUp(self):
+ self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+
+ def tearDown(self):
+ self.sock.close()
+
+ def encoded(self, path):
+ # Return the given path encoded in the file system encoding,
+ # or skip the test if this is not possible.
+ try:
+ return os.fsencode(path)
+ except UnicodeEncodeError:
+ self.skipTest(
+ "Pathname {0!a} cannot be represented in file "
+ "system encoding {1!r}".format(
+ path, sys.getfilesystemencoding()))
+
+ def bind(self, sock, path):
+ # Bind the socket
+ try:
+ sock.bind(path)
+ except OSError as e:
+ if str(e) == "AF_UNIX path too long":
+ self.skipTest(
+ "Pathname {0!a} is too long to serve as a AF_UNIX path"
+ .format(path))
+ else:
+ raise
+
+ def testStrAddr(self):
+ # Test binding to and retrieving a normal string pathname.
+ path = os.path.abspath(support.TESTFN)
+ self.bind(self.sock, path)
+ self.addCleanup(support.unlink, path)
+ self.assertEqual(self.sock.getsockname(), path)
+
+ def testBytesAddr(self):
+ # Test binding to a bytes pathname.
+ path = os.path.abspath(support.TESTFN)
+ self.bind(self.sock, self.encoded(path))
+ self.addCleanup(support.unlink, path)
+ self.assertEqual(self.sock.getsockname(), path)
+
+ def testSurrogateescapeBind(self):
+ # Test binding to a valid non-ASCII pathname, with the
+ # non-ASCII bytes supplied using surrogateescape encoding.
+ path = os.path.abspath(support.TESTFN_UNICODE)
+ b = self.encoded(path)
+ self.bind(self.sock, b.decode("ascii", "surrogateescape"))
+ self.addCleanup(support.unlink, path)
+ self.assertEqual(self.sock.getsockname(), path)
+
+ def testUnencodableAddr(self):
+ # Test binding to a pathname that cannot be encoded in the
+ # file system encoding.
+ if support.TESTFN_UNENCODABLE is None:
+ self.skipTest("No unencodable filename available")
+ path = os.path.abspath(support.TESTFN_UNENCODABLE)
+ self.bind(self.sock, path)
+ self.addCleanup(support.unlink, path)
+ self.assertEqual(self.sock.getsockname(), path)
@unittest.skipUnless(thread, 'Threading required for this test.')
class BufferIOTest(SocketConnectedTest):
@@ -2051,11 +4589,8 @@ class ContextManagersTest(ThreadedTCPSocketTest):
"SOCK_CLOEXEC not defined")
@unittest.skipUnless(fcntl, "module fcntl not available")
class CloexecConstantTest(unittest.TestCase):
+ @support.requires_linux_version(2, 6, 28)
def test_SOCK_CLOEXEC(self):
- v = linux_version()
- if v < (2, 6, 28):
- self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
- % ".".join(map(str, v)))
with socket.socket(socket.AF_INET,
socket.SOCK_STREAM | socket.SOCK_CLOEXEC) as s:
self.assertTrue(s.type & socket.SOCK_CLOEXEC)
@@ -2073,11 +4608,8 @@ class NonblockConstantTest(unittest.TestCase):
self.assertFalse(s.type & socket.SOCK_NONBLOCK)
self.assertEqual(s.gettimeout(), None)
+ @support.requires_linux_version(2, 6, 28)
def test_SOCK_NONBLOCK(self):
- v = linux_version()
- if v < (2, 6, 28):
- self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
- % ".".join(map(str, v)))
# a lot of it seems silly and redundant, but I wanted to test that
# changing back and forth worked ok
with socket.socket(socket.AF_INET,
@@ -2133,11 +4665,40 @@ def test_main():
])
if hasattr(socket, "socketpair"):
tests.append(BasicSocketPairTest)
- if sys.platform == 'linux2':
+ if hasattr(socket, "AF_UNIX"):
+ tests.append(TestUnixDomain)
+ if sys.platform == 'linux':
tests.append(TestLinuxAbstractNamespace)
if isTipcAvailable():
tests.append(TIPCTest)
tests.append(TIPCThreadableTest)
+ tests.extend([BasicCANTest, CANTest])
+ tests.extend([BasicRDSTest, RDSTest])
+ tests.extend([
+ CmsgMacroTests,
+ SendmsgUDPTest,
+ RecvmsgUDPTest,
+ RecvmsgIntoUDPTest,
+ SendmsgUDP6Test,
+ RecvmsgUDP6Test,
+ RecvmsgRFC3542AncillaryUDP6Test,
+ RecvmsgIntoRFC3542AncillaryUDP6Test,
+ RecvmsgIntoUDP6Test,
+ SendmsgTCPTest,
+ RecvmsgTCPTest,
+ RecvmsgIntoTCPTest,
+ SendmsgSCTPStreamTest,
+ RecvmsgSCTPStreamTest,
+ RecvmsgIntoSCTPStreamTest,
+ SendmsgUnixStreamTest,
+ RecvmsgUnixStreamTest,
+ RecvmsgIntoUnixStreamTest,
+ RecvmsgSCMRightsStreamTest,
+ RecvmsgIntoSCMRightsStreamTest,
+ # These are slow when setitimer() is not available
+ InterruptedRecvTimeoutTest,
+ InterruptedSendTimeoutTest,
+ ])
thread_info = support.threading_setup()
support.run_unittest(*tests)
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index 1b08f2e..c6ce075 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -42,6 +42,9 @@ ONLYCERT = data_file("ssl_cert.pem")
ONLYKEY = data_file("ssl_key.pem")
BYTES_ONLYCERT = os.fsencode(ONLYCERT)
BYTES_ONLYKEY = os.fsencode(ONLYKEY)
+CERTFILE_PROTECTED = data_file("keycert.passwd.pem")
+ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem")
+KEY_PASSWORD = "somepass"
CAPATH = data_file("capath")
BYTES_CAPATH = os.fsencode(CAPATH)
@@ -53,6 +56,8 @@ WRONGCERT = data_file("XXXnonexisting.pem")
BADKEY = data_file("badkey.pem")
NOKIACERT = data_file("nokia.pem")
+DHFILE = data_file("dh512.pem")
+BYTES_DHFILE = os.fsencode(DHFILE)
def handle_error(prefix):
exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
@@ -95,7 +100,14 @@ class BasicSocketTests(unittest.TestCase):
ssl.CERT_NONE
ssl.CERT_OPTIONAL
ssl.CERT_REQUIRED
+ ssl.OP_CIPHER_SERVER_PREFERENCE
+ ssl.OP_SINGLE_DH_USE
+ if ssl.HAS_ECDH:
+ ssl.OP_SINGLE_ECDH_USE
+ if ssl.OPENSSL_VERSION_INFO >= (1, 0):
+ ssl.OP_NO_COMPRESSION
self.assertIn(ssl.HAS_SNI, {True, False})
+ self.assertIn(ssl.HAS_ECDH, {True, False})
def test_random(self):
v = ssl.RAND_status()
@@ -103,6 +115,16 @@ class BasicSocketTests(unittest.TestCase):
sys.stdout.write("\n RAND_status is %d (%s)\n"
% (v, (v and "sufficient randomness") or
"insufficient randomness"))
+
+ data, is_cryptographic = ssl.RAND_pseudo_bytes(16)
+ self.assertEqual(len(data), 16)
+ self.assertEqual(is_cryptographic, v == 1)
+ if v:
+ data = ssl.RAND_bytes(16)
+ self.assertEqual(len(data), 16)
+ else:
+ self.assertRaises(ssl.SSLError, ssl.RAND_bytes, 16)
+
try:
ssl.RAND_egd(1)
except TypeError:
@@ -337,6 +359,25 @@ class BasicSocketTests(unittest.TestCase):
self.assertRaises(ValueError, ctx.wrap_socket, sock, True,
server_hostname="some.hostname")
+ def test_unknown_channel_binding(self):
+ # should raise ValueError for unknown type
+ s = socket.socket(socket.AF_INET)
+ ss = ssl.wrap_socket(s)
+ with self.assertRaises(ValueError):
+ ss.get_channel_binding("unknown-type")
+
+ @unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
+ "'tls-unique' channel binding not available")
+ def test_tls_unique_channel_binding(self):
+ # unconnected should return None for known type
+ s = socket.socket(socket.AF_INET)
+ ss = ssl.wrap_socket(s)
+ self.assertIsNone(ss.get_channel_binding("tls-unique"))
+ # the same for server-side
+ s = socket.socket(socket.AF_INET)
+ ss = ssl.wrap_socket(s, server_side=True, certfile=CERTFILE)
+ self.assertIsNone(ss.get_channel_binding("tls-unique"))
+
class ContextTests(unittest.TestCase):
@skip_if_broken_ubuntu_ssl
@@ -427,6 +468,60 @@ class ContextTests(unittest.TestCase):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with self.assertRaisesRegex(ssl.SSLError, "key values mismatch"):
ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY)
+ # Password protected key and cert
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD)
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode())
+ ctx.load_cert_chain(CERTFILE_PROTECTED,
+ password=bytearray(KEY_PASSWORD.encode()))
+ ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD)
+ ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD.encode())
+ ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED,
+ bytearray(KEY_PASSWORD.encode()))
+ with self.assertRaisesRegex(TypeError, "should be a string"):
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=True)
+ with self.assertRaises(ssl.SSLError):
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password="badpass")
+ with self.assertRaisesRegex(ValueError, "cannot be longer"):
+ # openssl has a fixed limit on the password buffer.
+ # PEM_BUFSIZE is generally set to 1kb.
+ # Return a string larger than this.
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=b'a' * 102400)
+ # Password callback
+ def getpass_unicode():
+ return KEY_PASSWORD
+ def getpass_bytes():
+ return KEY_PASSWORD.encode()
+ def getpass_bytearray():
+ return bytearray(KEY_PASSWORD.encode())
+ def getpass_badpass():
+ return "badpass"
+ def getpass_huge():
+ return b'a' * (1024 * 1024)
+ def getpass_bad_type():
+ return 9
+ def getpass_exception():
+ raise Exception('getpass error')
+ class GetPassCallable:
+ def __call__(self):
+ return KEY_PASSWORD
+ def getpass(self):
+ return KEY_PASSWORD
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_unicode)
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytes)
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytearray)
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=GetPassCallable())
+ ctx.load_cert_chain(CERTFILE_PROTECTED,
+ password=GetPassCallable().getpass)
+ with self.assertRaises(ssl.SSLError):
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_badpass)
+ with self.assertRaisesRegex(ValueError, "cannot be longer"):
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_huge)
+ with self.assertRaisesRegex(TypeError, "must return a string"):
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bad_type)
+ with self.assertRaisesRegex(Exception, "getpass error"):
+ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_exception)
+ # Make sure the password function isn't called if it isn't needed
+ ctx.load_cert_chain(CERTFILE, password=getpass_exception)
def test_load_verify_locations(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
@@ -447,6 +542,19 @@ class ContextTests(unittest.TestCase):
# Issue #10989: crash if the second argument type is invalid
self.assertRaises(TypeError, ctx.load_verify_locations, None, True)
+ def test_load_dh_params(self):
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ ctx.load_dh_params(DHFILE)
+ if os.name != 'nt':
+ ctx.load_dh_params(BYTES_DHFILE)
+ self.assertRaises(TypeError, ctx.load_dh_params)
+ self.assertRaises(TypeError, ctx.load_dh_params, None)
+ with self.assertRaises(FileNotFoundError) as cm:
+ ctx.load_dh_params(WRONGCERT)
+ self.assertEqual(cm.exception.errno, errno.ENOENT)
+ with self.assertRaisesRegex(ssl.SSLError, "PEM routines"):
+ ctx.load_dh_params(CERTFILE)
+
@skip_if_broken_ubuntu_ssl
def test_session_stats(self):
for proto in PROTOCOLS:
@@ -471,6 +579,16 @@ class ContextTests(unittest.TestCase):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_default_verify_paths()
+ @unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build")
+ def test_set_ecdh_curve(self):
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ ctx.set_ecdh_curve("prime256v1")
+ ctx.set_ecdh_curve(b"prime256v1")
+ self.assertRaises(TypeError, ctx.set_ecdh_curve)
+ self.assertRaises(TypeError, ctx.set_ecdh_curve, None)
+ self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo")
+ self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo")
+
class NetworkedTests(unittest.TestCase):
@@ -533,13 +651,10 @@ class NetworkedTests(unittest.TestCase):
try:
s.do_handshake()
break
- except ssl.SSLError as err:
- if err.args[0] == ssl.SSL_ERROR_WANT_READ:
- select.select([s], [], [], 5.0)
- elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
- select.select([], [s], [], 5.0)
- else:
- raise
+ except ssl.SSLWantReadError:
+ select.select([s], [], [], 5.0)
+ except ssl.SSLWantWriteError:
+ select.select([], [s], [], 5.0)
# SSL established
self.assertTrue(s.getpeercert())
finally:
@@ -659,37 +774,39 @@ class NetworkedTests(unittest.TestCase):
count += 1
s.do_handshake()
break
- except ssl.SSLError as err:
- if err.args[0] == ssl.SSL_ERROR_WANT_READ:
- select.select([s], [], [])
- elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
- select.select([], [s], [])
- else:
- raise
+ except ssl.SSLWantReadError:
+ select.select([s], [], [])
+ except ssl.SSLWantWriteError:
+ select.select([], [s], [])
s.close()
if support.verbose:
sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count)
def test_get_server_certificate(self):
- with support.transient_internet("svn.python.org"):
- pem = ssl.get_server_certificate(("svn.python.org", 443))
- if not pem:
- self.fail("No server certificate on svn.python.org:443!")
+ def _test_get_server_certificate(host, port, cert=None):
+ with support.transient_internet(host):
+ pem = ssl.get_server_certificate((host, port))
+ if not pem:
+ self.fail("No server certificate on %s:%s!" % (host, port))
- try:
- pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=CERTFILE)
- except ssl.SSLError as x:
- #should fail
+ try:
+ pem = ssl.get_server_certificate((host, port), ca_certs=CERTFILE)
+ except ssl.SSLError as x:
+ #should fail
+ if support.verbose:
+ sys.stdout.write("%s\n" % x)
+ else:
+ self.fail("Got server certificate %s for %s:%s!" % (pem, host, port))
+
+ pem = ssl.get_server_certificate((host, port), ca_certs=cert)
+ if not pem:
+ self.fail("No server certificate on %s:%s!" % (host, port))
if support.verbose:
- sys.stdout.write("%s\n" % x)
- else:
- self.fail("Got server certificate %s for svn.python.org!" % pem)
+ sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem))
- pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
- if not pem:
- self.fail("No server certificate on svn.python.org:443!")
- if support.verbose:
- sys.stdout.write("\nVerified certificate for svn.python.org:443 is\n%s\n" % pem)
+ _test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT)
+ if support.IPV6_ENABLED:
+ _test_get_server_certificate('ipv6.google.com', 443)
def test_ciphers(self):
remote = ("svn.python.org", 443)
@@ -838,6 +955,11 @@ else:
self.sslconn = None
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: connection is now unencrypted...\n")
+ elif stripped == b'CB tls-unique':
+ if support.verbose and self.server.connectionchatty:
+ sys.stdout.write(" server: read CB tls-unique from client, sending our CB data...\n")
+ data = self.sslconn.get_channel_binding("tls-unique")
+ self.write(repr(data).encode("us-ascii") + b"\n")
else:
if (support.verbose and
self.server.connectionchatty):
@@ -946,12 +1068,11 @@ else:
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
- except ssl.SSLError as err:
- if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
- ssl.SSL_ERROR_WANT_WRITE):
- return
- elif err.args[0] == ssl.SSL_ERROR_EOF:
- return self.handle_close()
+ except (ssl.SSLWantReadError, ssl.SSLWantWriteError):
+ return
+ except ssl.SSLEOFError:
+ return self.handle_close()
+ except ssl.SSLError:
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
@@ -1099,7 +1220,12 @@ else:
if connectionchatty:
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
+ stats = {
+ 'compression': s.compression(),
+ 'cipher': s.cipher(),
+ }
s.close()
+ return stats
def try_protocol_combo(server_protocol, client_protocol, expect_success,
certsreqs=None, server_options=0, client_options=0):
@@ -1251,7 +1377,8 @@ else:
t.join()
@skip_if_broken_ubuntu_ssl
- @unittest.skipUnless(hasattr(ssl, 'PROTOCOL_SSLv2'), "need SSLv2")
+ @unittest.skipUnless(hasattr(ssl, 'PROTOCOL_SSLv2'),
+ "OpenSSL is compiled without SSLv2 support")
def test_protocol_sslv2(self):
"""Connecting to an SSLv2 server with various client options"""
if support.verbose:
@@ -1557,6 +1684,15 @@ else:
)
# consume data
s.read()
+
+ # Make sure sendmsg et al are disallowed to avoid
+ # inadvertent disclosure of data and/or corruption
+ # of the encrypted data stream
+ self.assertRaises(NotImplementedError, s.sendmsg, [b"data"])
+ self.assertRaises(NotImplementedError, s.recvmsg, 100)
+ self.assertRaises(NotImplementedError,
+ s.recvmsg_into, bytearray(100))
+
s.write(b"over\n")
s.close()
@@ -1625,6 +1761,98 @@ else:
s.connect((HOST, server.port))
self.assertIn("no shared cipher", str(server.conn_errors[0]))
+ @unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
+ "'tls-unique' channel binding not available")
+ def test_tls_unique_channel_binding(self):
+ """Test tls-unique channel binding."""
+ if support.verbose:
+ sys.stdout.write("\n")
+
+ server = ThreadedEchoServer(CERTFILE,
+ certreqs=ssl.CERT_NONE,
+ ssl_version=ssl.PROTOCOL_TLSv1,
+ cacerts=CERTFILE,
+ chatty=True,
+ connectionchatty=False)
+ with server:
+ s = ssl.wrap_socket(socket.socket(),
+ server_side=False,
+ certfile=CERTFILE,
+ ca_certs=CERTFILE,
+ cert_reqs=ssl.CERT_NONE,
+ ssl_version=ssl.PROTOCOL_TLSv1)
+ s.connect((HOST, server.port))
+ # get the data
+ cb_data = s.get_channel_binding("tls-unique")
+ if support.verbose:
+ sys.stdout.write(" got channel binding data: {0!r}\n"
+ .format(cb_data))
+
+ # check if it is sane
+ self.assertIsNotNone(cb_data)
+ self.assertEqual(len(cb_data), 12) # True for TLSv1
+
+ # and compare with the peers version
+ s.write(b"CB tls-unique\n")
+ peer_data_repr = s.read().strip()
+ self.assertEqual(peer_data_repr,
+ repr(cb_data).encode("us-ascii"))
+ s.close()
+
+ # now, again
+ s = ssl.wrap_socket(socket.socket(),
+ server_side=False,
+ certfile=CERTFILE,
+ ca_certs=CERTFILE,
+ cert_reqs=ssl.CERT_NONE,
+ ssl_version=ssl.PROTOCOL_TLSv1)
+ s.connect((HOST, server.port))
+ new_cb_data = s.get_channel_binding("tls-unique")
+ if support.verbose:
+ sys.stdout.write(" got another channel binding data: {0!r}\n"
+ .format(new_cb_data))
+ # is it really unique
+ self.assertNotEqual(cb_data, new_cb_data)
+ self.assertIsNotNone(cb_data)
+ self.assertEqual(len(cb_data), 12) # True for TLSv1
+ s.write(b"CB tls-unique\n")
+ peer_data_repr = s.read().strip()
+ self.assertEqual(peer_data_repr,
+ repr(new_cb_data).encode("us-ascii"))
+ s.close()
+
+ def test_compression(self):
+ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ context.load_cert_chain(CERTFILE)
+ stats = server_params_test(context, context,
+ chatty=True, connectionchatty=True)
+ if support.verbose:
+ sys.stdout.write(" got compression: {!r}\n".format(stats['compression']))
+ self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' })
+
+ @unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'),
+ "ssl.OP_NO_COMPRESSION needed for this test")
+ def test_compression_disabled(self):
+ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ context.load_cert_chain(CERTFILE)
+ context.options |= ssl.OP_NO_COMPRESSION
+ stats = server_params_test(context, context,
+ chatty=True, connectionchatty=True)
+ self.assertIs(stats['compression'], None)
+
+ def test_dh_params(self):
+ # Check we can get a connection with ephemeral Diffie-Hellman
+ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ context.load_cert_chain(CERTFILE)
+ context.load_dh_params(DHFILE)
+ context.set_ciphers("kEDH")
+ stats = server_params_test(context, context,
+ chatty=True, connectionchatty=True)
+ cipher = stats["cipher"][0]
+ parts = cipher.split("-")
+ if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts:
+ self.fail("Non-DH cipher: " + cipher[0])
+
def test_main(verbose=False):
if support.verbose:
diff --git a/Lib/test/test_string.py b/Lib/test/test_string.py
index a352ee3..1615732 100644
--- a/Lib/test/test_string.py
+++ b/Lib/test/test_string.py
@@ -26,15 +26,38 @@ class ModuleTest(unittest.TestCase):
self.assertEqual(string.capwords('\taBc\tDeF\t'), 'Abc Def')
self.assertEqual(string.capwords('\taBc\tDeF\t', '\t'), '\tAbc\tDef\t')
- def test_formatter(self):
+ def test_basic_formatter(self):
fmt = string.Formatter()
self.assertEqual(fmt.format("foo"), "foo")
-
self.assertEqual(fmt.format("foo{0}", "bar"), "foobar")
self.assertEqual(fmt.format("foo{1}{0}-{1}", "bar", 6), "foo6bar-6")
+
+ def test_conversion_specifiers(self):
+ fmt = string.Formatter()
self.assertEqual(fmt.format("-{arg!r}-", arg='test'), "-'test'-")
+ self.assertEqual(fmt.format("{0!s}", 'test'), 'test')
+ self.assertRaises(ValueError, fmt.format, "{0!h}", 'test')
+
+ def test_name_lookup(self):
+ fmt = string.Formatter()
+ class AnyAttr:
+ def __getattr__(self, attr):
+ return attr
+ x = AnyAttr()
+ self.assertEqual(fmt.format("{0.lumber}{0.jack}", x), 'lumberjack')
+ with self.assertRaises(AttributeError):
+ fmt.format("{0.lumber}{0.jack}", '')
+
+ def test_index_lookup(self):
+ fmt = string.Formatter()
+ lookup = ["eggs", "and", "spam"]
+ self.assertEqual(fmt.format("{0[2]}{0[0]}", lookup), 'spameggs')
+ with self.assertRaises(IndexError):
+ fmt.format("{0[2]}{0[0]}", [])
+ with self.assertRaises(KeyError):
+ fmt.format("{0[2]}{0[0]}", {})
- # override get_value ############################################
+ def test_override_get_value(self):
class NamespaceFormatter(string.Formatter):
def __init__(self, namespace={}):
string.Formatter.__init__(self)
@@ -54,7 +77,7 @@ class ModuleTest(unittest.TestCase):
self.assertEqual(fmt.format("{greeting}, world!"), 'hello, world!')
- # override format_field #########################################
+ def test_override_format_field(self):
class CallFormatter(string.Formatter):
def format_field(self, value, format_spec):
return format(value(), format_spec)
@@ -63,18 +86,18 @@ class ModuleTest(unittest.TestCase):
self.assertEqual(fmt.format('*{0}*', lambda : 'result'), '*result*')
- # override convert_field ########################################
+ def test_override_convert_field(self):
class XFormatter(string.Formatter):
def convert_field(self, value, conversion):
if conversion == 'x':
return None
- return super(XFormatter, self).convert_field(value, conversion)
+ return super().convert_field(value, conversion)
fmt = XFormatter()
self.assertEqual(fmt.format("{0!r}:{0!x}", 'foo', 'foo'), "'foo':None")
- # override parse ################################################
+ def test_override_parse(self):
class BarFormatter(string.Formatter):
# returns an iterable that contains tuples of the form:
# (literal_text, field_name, format_spec, conversion)
@@ -90,7 +113,7 @@ class ModuleTest(unittest.TestCase):
fmt = BarFormatter()
self.assertEqual(fmt.format('*|+0:^10s|*', 'foo'), '* foo *')
- # test all parameters used
+ def test_check_unused_args(self):
class CheckAllUsedFormatter(string.Formatter):
def check_unused_args(self, used_args, args, kwargs):
# Track which arguments actually got used
@@ -112,28 +135,13 @@ class ModuleTest(unittest.TestCase):
self.assertRaises(ValueError, fmt.format, "{0}", 10, 20, i=100)
self.assertRaises(ValueError, fmt.format, "{i}", 10, 20, i=100)
- def test_vformat_assert(self):
- cls = string.Formatter()
- kwargs = {
- "i": 100
- }
- self.assertRaises(ValueError, cls._vformat,
- cls.format, "{0}", kwargs, set(), -2)
-
- def test_convert_field(self):
- cls = string.Formatter()
- self.assertEqual(cls.format("{0!s}", 'foo'), 'foo')
- self.assertRaises(ValueError, cls.format, "{0!h}", 'foo')
-
- def test_get_field(self):
- cls = string.Formatter()
- class MyClass:
- name = 'lumberjack'
- x = MyClass()
- self.assertEqual(cls.format("{0.name}", x), 'lumberjack')
-
- lookup = ["eggs", "and", "spam"]
- self.assertEqual(cls.format("{0[2]}", lookup), 'spam')
+ def test_vformat_recursion_limit(self):
+ fmt = string.Formatter()
+ args = ()
+ kwargs = dict(i=100)
+ with self.assertRaises(ValueError) as err:
+ fmt._vformat("{i}", args, kwargs, set(), -1)
+ self.assertIn("recursion", str(err.exception))
def test_main():
diff --git a/Lib/test/test_strlit.py b/Lib/test/test_strlit.py
index 6bdc6e4..1f041c8 100644
--- a/Lib/test/test_strlit.py
+++ b/Lib/test/test_strlit.py
@@ -2,10 +2,10 @@ r"""Test correct treatment of various string literals by the parser.
There are four types of string literals:
- 'abc' -- normal str
- r'abc' -- raw str
- b'xyz' -- normal bytes
- br'xyz' -- raw bytes
+ 'abc' -- normal str
+ r'abc' -- raw str
+ b'xyz' -- normal bytes
+ br'xyz' | rb'xyz' -- raw bytes
The difference between normal and raw strings is of course that in a
raw string, \ escapes (while still used to determine the end of the
@@ -103,12 +103,25 @@ class TestLiterals(unittest.TestCase):
def test_eval_bytes_raw(self):
self.assertEqual(eval(""" br'x' """), b'x')
+ self.assertEqual(eval(""" rb'x' """), b'x')
self.assertEqual(eval(r""" br'\x01' """), b'\\' + b'x01')
+ self.assertEqual(eval(r""" rb'\x01' """), b'\\' + b'x01')
self.assertEqual(eval(""" br'\x01' """), byte(1))
+ self.assertEqual(eval(""" rb'\x01' """), byte(1))
self.assertEqual(eval(r""" br'\x81' """), b"\\" + b"x81")
+ self.assertEqual(eval(r""" rb'\x81' """), b"\\" + b"x81")
self.assertRaises(SyntaxError, eval, """ br'\x81' """)
+ self.assertRaises(SyntaxError, eval, """ rb'\x81' """)
self.assertEqual(eval(r""" br'\u1881' """), b"\\" + b"u1881")
+ self.assertEqual(eval(r""" rb'\u1881' """), b"\\" + b"u1881")
self.assertRaises(SyntaxError, eval, """ br'\u1881' """)
+ self.assertRaises(SyntaxError, eval, """ rb'\u1881' """)
+ self.assertRaises(SyntaxError, eval, """ bb'' """)
+ self.assertRaises(SyntaxError, eval, """ rr'' """)
+ self.assertRaises(SyntaxError, eval, """ brr'' """)
+ self.assertRaises(SyntaxError, eval, """ bbr'' """)
+ self.assertRaises(SyntaxError, eval, """ rrb'' """)
+ self.assertRaises(SyntaxError, eval, """ rbb'' """)
def check_encoding(self, encoding, extra=""):
modname = "xx_" + encoding.replace("-", "_")
@@ -131,7 +144,7 @@ class TestLiterals(unittest.TestCase):
self.assertRaises(SyntaxError, self.check_encoding, "utf-8", extra)
def test_file_utf8(self):
- self.check_encoding("utf8")
+ self.check_encoding("utf-8")
def test_file_iso_8859_1(self):
self.check_encoding("iso-8859-1")
diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py
index 2ccaad2..dc75858 100644
--- a/Lib/test/test_struct.py
+++ b/Lib/test/test_struct.py
@@ -8,9 +8,19 @@ from test.support import run_unittest
ISBIGENDIAN = sys.byteorder == "big"
IS32BIT = sys.maxsize == 0x7fffffff
-integer_codes = 'b', 'B', 'h', 'H', 'i', 'I', 'l', 'L', 'q', 'Q'
+integer_codes = 'b', 'B', 'h', 'H', 'i', 'I', 'l', 'L', 'q', 'Q', 'n', 'N'
byteorders = '', '@', '=', '<', '>', '!'
+def iter_integer_formats(byteorders=byteorders):
+ for code in integer_codes:
+ for byteorder in byteorders:
+ if (byteorder in ('', '@') and code in ('q', 'Q') and
+ not HAVE_LONG_LONG):
+ continue
+ if (byteorder not in ('', '@') and code in ('n', 'N')):
+ continue
+ yield code, byteorder
+
# Native 'q' packing isn't available on systems that don't have the C
# long long type.
try:
@@ -141,14 +151,13 @@ class StructTest(unittest.TestCase):
}
# standard integer sizes
- for code in integer_codes:
- for byteorder in '=', '<', '>', '!':
- format = byteorder+code
- size = struct.calcsize(format)
- self.assertEqual(size, expected_size[code])
+ for code, byteorder in iter_integer_formats(('=', '<', '>', '!')):
+ format = byteorder+code
+ size = struct.calcsize(format)
+ self.assertEqual(size, expected_size[code])
# native integer sizes
- native_pairs = 'bB', 'hH', 'iI', 'lL'
+ native_pairs = 'bB', 'hH', 'iI', 'lL', 'nN'
if HAVE_LONG_LONG:
native_pairs += 'qQ',
for format_pair in native_pairs:
@@ -166,9 +175,11 @@ class StructTest(unittest.TestCase):
if HAVE_LONG_LONG:
self.assertLessEqual(8, struct.calcsize('q'))
self.assertLessEqual(struct.calcsize('l'), struct.calcsize('q'))
+ self.assertGreaterEqual(struct.calcsize('n'), struct.calcsize('i'))
+ self.assertGreaterEqual(struct.calcsize('n'), struct.calcsize('P'))
def test_integers(self):
- # Integer tests (bBhHiIlLqQ).
+ # Integer tests (bBhHiIlLqQnN).
import binascii
class IntTester(unittest.TestCase):
@@ -182,11 +193,11 @@ class StructTest(unittest.TestCase):
self.byteorder)
self.bytesize = struct.calcsize(format)
self.bitsize = self.bytesize * 8
- if self.code in tuple('bhilq'):
+ if self.code in tuple('bhilqn'):
self.signed = True
self.min_value = -(2**(self.bitsize-1))
self.max_value = 2**(self.bitsize-1) - 1
- elif self.code in tuple('BHILQ'):
+ elif self.code in tuple('BHILQN'):
self.signed = False
self.min_value = 0
self.max_value = 2**self.bitsize - 1
@@ -316,14 +327,23 @@ class StructTest(unittest.TestCase):
struct.pack, self.format,
obj)
- for code in integer_codes:
- for byteorder in byteorders:
- if (byteorder in ('', '@') and code in ('q', 'Q') and
- not HAVE_LONG_LONG):
- continue
+ for code, byteorder in iter_integer_formats():
+ format = byteorder+code
+ t = IntTester(format)
+ t.run()
+
+ def test_nN_code(self):
+ # n and N don't exist in standard sizes
+ def assertStructError(func, *args, **kwargs):
+ with self.assertRaises(struct.error) as cm:
+ func(*args, **kwargs)
+ self.assertIn("bad char in struct format", str(cm.exception))
+ for code in 'nN':
+ for byteorder in ('=', '<', '>', '!'):
format = byteorder+code
- t = IntTester(format)
- t.run()
+ assertStructError(struct.calcsize, format)
+ assertStructError(struct.pack, format, 0)
+ assertStructError(struct.unpack, format, b"")
def test_p_code(self):
# Test p ("Pascal string") code.
@@ -377,14 +397,10 @@ class StructTest(unittest.TestCase):
self.assertRaises(OverflowError, struct.pack, ">f", big)
def test_1530559(self):
- for byteorder in '', '@', '=', '<', '>', '!':
- for code in integer_codes:
- if (byteorder in ('', '@') and code in ('q', 'Q') and
- not HAVE_LONG_LONG):
- continue
- format = byteorder + code
- self.assertRaises(struct.error, struct.pack, format, 1.0)
- self.assertRaises(struct.error, struct.pack, format, 1.5)
+ for code, byteorder in iter_integer_formats():
+ format = byteorder + code
+ self.assertRaises(struct.error, struct.pack, format, 1.0)
+ self.assertRaises(struct.error, struct.pack, format, 1.5)
self.assertRaises(struct.error, struct.pack, 'P', 1.0)
self.assertRaises(struct.error, struct.pack, 'P', 1.5)
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index 6150e88..0f8d1ca 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -60,6 +60,8 @@ class BaseTestCase(unittest.TestCase):
# shutdown time. That frustrates tests trying to check stderr produced
# from a spawned Python process.
actual = support.strip_python_stderr(stderr)
+ # strip_python_stderr also strips whitespace, so we do too.
+ expected = expected.strip()
self.assertEqual(actual, expected, msg)
@@ -71,6 +73,15 @@ class ProcessTestCase(BaseTestCase):
"import sys; sys.exit(47)"])
self.assertEqual(rc, 47)
+ def test_call_timeout(self):
+ # call() function with timeout argument; we want to test that the child
+ # process gets killed when the timeout expires. If the child isn't
+ # killed, this call will deadlock since subprocess.call waits for the
+ # child.
+ self.assertRaises(subprocess.TimeoutExpired, subprocess.call,
+ [sys.executable, "-c", "while True: pass"],
+ timeout=0.1)
+
def test_check_call_zero(self):
# check_call() function with zero return code
rc = subprocess.check_call([sys.executable, "-c",
@@ -113,6 +124,21 @@ class ProcessTestCase(BaseTestCase):
self.fail("Expected ValueError when stdout arg supplied.")
self.assertIn('stdout', c.exception.args[0])
+ def test_check_output_timeout(self):
+ # check_output() function with timeout arg
+ with self.assertRaises(subprocess.TimeoutExpired) as c:
+ output = subprocess.check_output(
+ [sys.executable, "-c",
+ "import sys, time\n"
+ "sys.stdout.write('BDFL')\n"
+ "sys.stdout.flush()\n"
+ "time.sleep(3600)"],
+ # Some heavily loaded buildbots (sparc Debian 3.x) require
+ # this much time to start and print.
+ timeout=3)
+ self.fail("Expected TimeoutExpired.")
+ self.assertEqual(c.exception.output, b'BDFL')
+
def test_call_kwargs(self):
# call() function with keyword args
newenv = os.environ.copy()
@@ -312,6 +338,31 @@ class ProcessTestCase(BaseTestCase):
rc = subprocess.call([sys.executable, "-c", cmd], stdout=1)
self.assertEqual(rc, 2)
+ def test_stdout_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'for i in range(10240):'
+ 'print("x" * 1024)'],
+ stdout=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stdout, None)
+
+ def test_stderr_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys\n'
+ 'for i in range(10240):'
+ 'sys.stderr.write("x" * 1024)'],
+ stderr=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stderr, None)
+
+ def test_stdin_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys;'
+ 'sys.stdin.read(1)'],
+ stdin=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stdin, None)
+
def test_cwd(self):
tmpdir = tempfile.gettempdir()
# We cannot use os.path.realpath to canonicalize the path,
@@ -400,6 +451,41 @@ class ProcessTestCase(BaseTestCase):
self.assertEqual(stdout, b"banana")
self.assertStderrEqual(stderr, b"pineapple")
+ def test_communicate_timeout(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys,os,time;'
+ 'sys.stderr.write("pineapple\\n");'
+ 'time.sleep(1);'
+ 'sys.stderr.write("pear\\n");'
+ 'sys.stdout.write(sys.stdin.read())'],
+ universal_newlines=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ self.assertRaises(subprocess.TimeoutExpired, p.communicate, "banana",
+ timeout=0.3)
+ # Make sure we can keep waiting for it, and that we get the whole output
+ # after it completes.
+ (stdout, stderr) = p.communicate()
+ self.assertEqual(stdout, "banana")
+ self.assertStderrEqual(stderr.encode(), b"pineapple\npear\n")
+
+ def test_communicate_timeout_large_ouput(self):
+ # Test an expiring timeout while the child is outputting lots of data.
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys,os,time;'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'],
+ stdout=subprocess.PIPE)
+ self.assertRaises(subprocess.TimeoutExpired, p.communicate, timeout=0.4)
+ (stdout, _) = p.communicate()
+ self.assertEqual(len(stdout), 4 * 64 * 1024)
+
# Test for the fd leak reported in http://bugs.python.org/issue2791.
def test_communicate_pipe_fd_leak(self):
for stdin_pipe in (False, True):
@@ -436,24 +522,21 @@ class ProcessTestCase(BaseTestCase):
# This test will probably deadlock rather than fail, if
# communicate() does not work properly.
x, y = os.pipe()
- if mswindows:
- pipe_buf = 512
- else:
- pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
os.close(x)
os.close(y)
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stdout.write(sys.stdin.read(47));'
- 'sys.stderr.write("xyz"*%d);'
- 'sys.stdout.write(sys.stdin.read())' % pipe_buf],
+ 'sys.stderr.write("x" * %d);'
+ 'sys.stdout.write(sys.stdin.read())' %
+ support.PIPE_MAX_SIZE],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
self.addCleanup(p.stdin.close)
- string_to_write = b"abc"*pipe_buf
+ string_to_write = b"a" * support.PIPE_MAX_SIZE
(stdout, stderr) = p.communicate(string_to_write)
self.assertEqual(stdout, string_to_write)
@@ -603,30 +686,32 @@ class ProcessTestCase(BaseTestCase):
self.assertEqual(subprocess.list2cmdline(['ab', '']),
'ab ""')
-
def test_poll(self):
- p = subprocess.Popen([sys.executable,
- "-c", "import time; time.sleep(1)"])
- count = 0
- while p.poll() is None:
- time.sleep(0.1)
- count += 1
- # We expect that the poll loop probably went around about 10 times,
- # but, based on system scheduling we can't control, it's possible
- # poll() never returned None. It "should be" very rare that it
- # didn't go around at least twice.
- self.assertGreaterEqual(count, 2)
+ p = subprocess.Popen([sys.executable, "-c",
+ "import os; os.read(0, 1)"],
+ stdin=subprocess.PIPE)
+ self.addCleanup(p.stdin.close)
+ self.assertIsNone(p.poll())
+ os.write(p.stdin.fileno(), b'A')
+ p.wait()
# Subsequent invocations should just return the returncode
self.assertEqual(p.poll(), 0)
-
def test_wait(self):
- p = subprocess.Popen([sys.executable,
- "-c", "import time; time.sleep(2)"])
+ p = subprocess.Popen([sys.executable, "-c", "pass"])
self.assertEqual(p.wait(), 0)
# Subsequent invocations should just return the returncode
self.assertEqual(p.wait(), 0)
+ def test_wait_timeout(self):
+ p = subprocess.Popen([sys.executable,
+ "-c", "import time; time.sleep(0.1)"])
+ with self.assertRaises(subprocess.TimeoutExpired) as c:
+ p.wait(timeout=0.01)
+ self.assertIn("0.01", str(c.exception)) # For coverage of __str__.
+ # Some heavily loaded buildbots (sparc Debian 3.x) require this much
+ # time to start.
+ self.assertEqual(p.wait(timeout=3), 0)
def test_invalid_bufsize(self):
# an invalid type of the bufsize argument should raise
@@ -705,25 +790,29 @@ class ProcessTestCase(BaseTestCase):
p = subprocess.Popen([sys.executable, "-c", 'pass'],
stdin=subprocess.PIPE)
self.addCleanup(p.stdin.close)
- time.sleep(2)
+ p.wait()
p.communicate(b"x" * 2**20)
- @unittest.skipUnless(hasattr(signal, 'SIGALRM'),
- "Requires signal.SIGALRM")
+ @unittest.skipUnless(hasattr(signal, 'SIGUSR1'),
+ "Requires signal.SIGUSR1")
+ @unittest.skipUnless(hasattr(os, 'kill'),
+ "Requires os.kill")
+ @unittest.skipUnless(hasattr(os, 'getppid'),
+ "Requires os.getppid")
def test_communicate_eintr(self):
# Issue #12493: communicate() should handle EINTR
def handler(signum, frame):
pass
- old_handler = signal.signal(signal.SIGALRM, handler)
- self.addCleanup(signal.signal, signal.SIGALRM, old_handler)
+ old_handler = signal.signal(signal.SIGUSR1, handler)
+ self.addCleanup(signal.signal, signal.SIGUSR1, old_handler)
- # the process is running for 2 seconds
- args = [sys.executable, "-c", 'import time; time.sleep(2)']
+ args = [sys.executable, "-c",
+ 'import os, signal;'
+ 'os.kill(os.getppid(), signal.SIGUSR1)']
for stream in ('stdout', 'stderr'):
kw = {stream: subprocess.PIPE}
with subprocess.Popen(args, **kw) as process:
- signal.alarm(1)
- # communicate() will be interrupted by SIGALRM
+ # communicate() will be interrupted by SIGUSR1
process.communicate()
@@ -1254,6 +1343,11 @@ class POSIXProcessTestCase(BaseTestCase):
exitcode = subprocess.call([abs_program, "-c", "pass"])
self.assertEqual(exitcode, 0)
+ # absolute bytes path as a string
+ cmd = b"'" + abs_program + b"' -c pass"
+ exitcode = subprocess.call(cmd, shell=True)
+ self.assertEqual(exitcode, 0)
+
# bytes program, unicode PATH
env = os.environ.copy()
env["PATH"] = path
@@ -1437,7 +1531,7 @@ class POSIXProcessTestCase(BaseTestCase):
stdout, stderr = p.communicate()
self.assertEqual(0, p.returncode, "sigchild_ignore.py exited"
" non-zero with this error:\n%s" %
- stderr.decode('utf8'))
+ stderr.decode('utf-8'))
def test_select_unbuffered(self):
# Issue #11459: bufsize=0 should really set the pipes as
@@ -1684,28 +1778,6 @@ class ProcessTestCaseNoPoll(ProcessTestCase):
ProcessTestCase.tearDown(self)
-@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False),
- "_posixsubprocess extension module not found.")
-class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase):
- @classmethod
- def setUpClass(cls):
- global subprocess
- assert subprocess._posixsubprocess
- # Reimport subprocess while forcing _posixsubprocess to not exist.
- with support.check_warnings(('.*_posixsubprocess .* not being used.*',
- RuntimeWarning)):
- subprocess = support.import_fresh_module(
- 'subprocess', blocked=['_posixsubprocess'])
- assert not subprocess._posixsubprocess
-
- @classmethod
- def tearDownClass(cls):
- global subprocess
- # Reimport subprocess as it should be, restoring order to the universe.
- subprocess = support.import_fresh_module('subprocess')
- assert subprocess._posixsubprocess
-
-
class HelperFunctionTests(unittest.TestCase):
@unittest.skipIf(mswindows, "errno and EINTR make no sense on windows")
def test_eintr_retry_call(self):
@@ -1814,7 +1886,6 @@ def test_main():
unit_tests = (ProcessTestCase,
POSIXProcessTestCase,
Win32ProcessTestCase,
- ProcessTestCasePOSIXPurePython,
CommandTests,
ProcessTestCaseNoPoll,
HelperFunctionTests,
diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py
index 914216d..298cae0 100644
--- a/Lib/test/test_super.py
+++ b/Lib/test/test_super.py
@@ -81,6 +81,16 @@ class TestSuper(unittest.TestCase):
self.assertEqual(E().f(), 'AE')
+ def test___class___set(self):
+ # See issue #12370
+ class X(A):
+ def f(self):
+ return super().f()
+ __class__ = 413
+ x = X()
+ self.assertEqual(x.f(), 'A')
+ self.assertEqual(x.__class__, 413)
+
def test_main():
support.run_unittest(TestSuper)
diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py
new file mode 100644
index 0000000..394e210
--- /dev/null
+++ b/Lib/test/test_support.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+
+import sys
+import os
+import unittest
+import socket
+import tempfile
+import errno
+from test import support
+
+TESTFN = support.TESTFN
+TESTDIRN = os.path.basename(tempfile.mkdtemp(dir='.'))
+
+
+class TestSupport(unittest.TestCase):
+ def setUp(self):
+ support.unlink(TESTFN)
+ support.rmtree(TESTDIRN)
+ tearDown = setUp
+
+ def test_import_module(self):
+ support.import_module("ftplib")
+ self.assertRaises(unittest.SkipTest, support.import_module, "foo")
+
+ def test_import_fresh_module(self):
+ support.import_fresh_module("ftplib")
+
+ def test_get_attribute(self):
+ self.assertEqual(support.get_attribute(self, "test_get_attribute"),
+ self.test_get_attribute)
+ self.assertRaises(unittest.SkipTest, support.get_attribute, self, "foo")
+
+ @unittest.skip("failing buildbots")
+ def test_get_original_stdout(self):
+ self.assertEqual(support.get_original_stdout(), sys.stdout)
+
+ def test_unload(self):
+ import sched
+ self.assertIn("sched", sys.modules)
+ support.unload("sched")
+ self.assertNotIn("sched", sys.modules)
+
+ def test_unlink(self):
+ with open(TESTFN, "w") as f:
+ pass
+ support.unlink(TESTFN)
+ self.assertFalse(os.path.exists(TESTFN))
+ support.unlink(TESTFN)
+
+ def test_rmtree(self):
+ os.mkdir(TESTDIRN)
+ os.mkdir(os.path.join(TESTDIRN, TESTDIRN))
+ support.rmtree(TESTDIRN)
+ self.assertFalse(os.path.exists(TESTDIRN))
+ support.rmtree(TESTDIRN)
+
+ def test_forget(self):
+ mod_filename = TESTFN + '.py'
+ with open(mod_filename, 'w') as f:
+ print('foo = 1', file=f)
+ sys.path.insert(0, os.curdir)
+ try:
+ mod = __import__(TESTFN)
+ self.assertIn(TESTFN, sys.modules)
+
+ support.forget(TESTFN)
+ self.assertNotIn(TESTFN, sys.modules)
+ finally:
+ del sys.path[0]
+ support.unlink(mod_filename)
+
+ def test_HOST(self):
+ s = socket.socket()
+ s.bind((support.HOST, 0))
+ s.close()
+
+ def test_find_unused_port(self):
+ port = support.find_unused_port()
+ s = socket.socket()
+ s.bind((support.HOST, port))
+ s.close()
+
+ def test_bind_port(self):
+ s = socket.socket()
+ support.bind_port(s)
+ s.listen(1)
+ s.close()
+
+ def test_temp_cwd(self):
+ here = os.getcwd()
+ with support.temp_cwd(name=TESTFN):
+ self.assertEqual(os.path.basename(os.getcwd()), TESTFN)
+ self.assertFalse(os.path.exists(TESTFN))
+ self.assertTrue(os.path.basename(os.getcwd()), here)
+
+ def test_sortdict(self):
+ self.assertEqual(support.sortdict({3:3, 2:2, 1:1}), "{1: 1, 2: 2, 3: 3}")
+
+ def test_make_bad_fd(self):
+ fd = support.make_bad_fd()
+ with self.assertRaises(OSError) as cm:
+ os.write(fd, b"foo")
+ self.assertEqual(cm.exception.errno, errno.EBADF)
+
+ def test_check_syntax_error(self):
+ support.check_syntax_error(self, "def class")
+ self.assertRaises(AssertionError, support.check_syntax_error, self, "1")
+
+ def test_CleanImport(self):
+ import importlib
+ with support.CleanImport("asyncore"):
+ importlib.import_module("asyncore")
+
+ def test_DirsOnSysPath(self):
+ with support.DirsOnSysPath('foo', 'bar'):
+ self.assertIn("foo", sys.path)
+ self.assertIn("bar", sys.path)
+ self.assertNotIn("foo", sys.path)
+ self.assertNotIn("bar", sys.path)
+
+ def test_captured_stdout(self):
+ with support.captured_stdout() as s:
+ print("hello")
+ self.assertEqual(s.getvalue(), "hello\n")
+
+ def test_captured_stderr(self):
+ with support.captured_stderr() as s:
+ print("hello", file=sys.stderr)
+ self.assertEqual(s.getvalue(), "hello\n")
+
+ def test_captured_stdin(self):
+ with support.captured_stdin() as s:
+ print("hello", file=sys.stdin)
+ self.assertEqual(s.getvalue(), "hello\n")
+
+ def test_gc_collect(self):
+ support.gc_collect()
+
+ def test_python_is_optimized(self):
+ self.assertIsInstance(support.python_is_optimized(), bool)
+
+ def test_swap_attr(self):
+ class Obj:
+ x = 1
+ obj = Obj()
+ with support.swap_attr(obj, "x", 5):
+ self.assertEqual(obj.x, 5)
+ self.assertEqual(obj.x, 1)
+
+ def test_swap_item(self):
+ D = {"item":1}
+ with support.swap_item(D, "item", 5):
+ self.assertEqual(D["item"], 5)
+ self.assertEqual(D["item"], 1)
+
+ # XXX -follows a list of untested API
+ # make_legacy_pyc
+ # is_resource_enabled
+ # requires
+ # fcmp
+ # umaks
+ # findfile
+ # check_warnings
+ # EnvironmentVarGuard
+ # TransientResource
+ # transient_internet
+ # run_with_locale
+ # set_memlimit
+ # bigmemtest
+ # precisionbigmemtest
+ # bigaddrspacetest
+ # requires_resource
+ # run_doctest
+ # threading_cleanup
+ # reap_threads
+ # reap_children
+ # strip_python_stderr
+ # args_from_interpreter_flags
+ # can_symlink
+ # skip_unless_symlink
+
+
+def test_main():
+ tests = [TestSupport]
+ support.run_unittest(*tests)
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 3268b1a..2afc261 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -303,6 +303,7 @@ class SysModuleTest(unittest.TestCase):
self.assertEqual(sys.getdlopenflags(), oldflags+1)
sys.setdlopenflags(oldflags)
+ @test.support.refcount_test
def test_refcount(self):
# n here must be a global in order for this test to pass while
# tracing with a python function. Tracing calls PyFrame_FastToLocals
@@ -342,7 +343,7 @@ class SysModuleTest(unittest.TestCase):
# Test sys._current_frames() in a WITH_THREADS build.
@test.support.reap_threads
def current_frames_with_threads(self):
- import threading, _thread
+ import threading
import traceback
# Spawn a thread that blocks at a known place. Then the main
@@ -356,7 +357,7 @@ class SysModuleTest(unittest.TestCase):
g456()
def g456():
- thread_info.append(_thread.get_ident())
+ thread_info.append(threading.get_ident())
entered_g.set()
leave_g.wait()
@@ -372,7 +373,7 @@ class SysModuleTest(unittest.TestCase):
d = sys._current_frames()
- main_id = _thread.get_ident()
+ main_id = threading.get_ident()
self.assertIn(main_id, d)
self.assertIn(thread_id, d)
@@ -446,6 +447,7 @@ class SysModuleTest(unittest.TestCase):
self.assertIsInstance(sys.maxsize, int)
self.assertIsInstance(sys.maxunicode, int)
+ self.assertEqual(sys.maxunicode, 0x10FFFF)
self.assertIsInstance(sys.platform, str)
self.assertIsInstance(sys.prefix, str)
self.assertIsInstance(sys.version, str)
@@ -473,6 +475,14 @@ class SysModuleTest(unittest.TestCase):
if not sys.platform.startswith('win'):
self.assertIsInstance(sys.abiflags, str)
+ @unittest.skipUnless(hasattr(sys, 'thread_info'),
+ 'Threading required for this test.')
+ def test_thread_info(self):
+ info = sys.thread_info
+ self.assertEqual(len(info), 3)
+ self.assertIn(info.name, ('nt', 'os2', 'pthread', 'solaris', None))
+ self.assertIn(info.lock, ('semaphore', 'mutex+cond', None))
+
def test_43581(self):
# Can't use sys.stdout, as this is a StringIO object when
# the test runs under regrtest.
@@ -500,7 +510,7 @@ class SysModuleTest(unittest.TestCase):
def test_sys_flags(self):
self.assertTrue(sys.flags)
- attrs = ("debug", "division_warning",
+ attrs = ("debug",
"inspect", "interactive", "optimize", "dont_write_bytecode",
"no_user_site", "no_site", "ignore_environment", "verbose",
"bytes_warning", "quiet", "hash_randomization")
@@ -659,21 +669,21 @@ class SizeofTest(unittest.TestCase):
return inner
check(get_cell().__closure__[0], size(h + 'P'))
# code
- check(get_cell().__code__, size(h + '5i8Pi3P'))
+ check(get_cell().__code__, size(h + '5i9Pi3P'))
# complex
check(complex(0,1), size(h + '2d'))
# method_descriptor (descriptor object)
- check(str.lower, size(h + '2PP'))
+ check(str.lower, size(h + '3PP'))
# classmethod_descriptor (descriptor object)
# XXX
# member_descriptor (descriptor object)
import datetime
- check(datetime.timedelta.days, size(h + '2PP'))
+ check(datetime.timedelta.days, size(h + '3PP'))
# getset_descriptor (descriptor object)
import collections
- check(collections.defaultdict.default_factory, size(h + '2PP'))
+ check(collections.defaultdict.default_factory, size(h + '3PP'))
# wrapper_descriptor (descriptor object)
- check(int.__add__, size(h + '2P2P'))
+ check(int.__add__, size(h + '3P2P'))
# method-wrapper (descriptor object)
check({}.__iter__, size(h + '2P'))
# dict
@@ -723,7 +733,7 @@ class SizeofTest(unittest.TestCase):
check(x, size(vh + '12P3i' + CO_MAXBLOCKS*'3i' + 'P' + extras*'P'))
# function
def func(): pass
- check(func, size(h + '11P'))
+ check(func, size(h + '12P'))
class c():
@staticmethod
def foo():
@@ -732,9 +742,9 @@ class SizeofTest(unittest.TestCase):
def bar(cls):
pass
# staticmethod
- check(foo, size(h + 'P'))
+ check(foo, size(h + 'PP'))
# classmethod
- check(bar, size(h + 'P'))
+ check(bar, size(h + 'PP'))
# generator
def get_gen(): yield 1
check(get_gen(), size(h + 'Pi2P'))
@@ -763,8 +773,8 @@ class SizeofTest(unittest.TestCase):
check(int(PyLong_BASE), size(vh) + 2*self.longdigit)
check(int(PyLong_BASE**2-1), size(vh) + 2*self.longdigit)
check(int(PyLong_BASE**2), size(vh) + 3*self.longdigit)
- # memory
- check(memoryview(b''), size(h + 'PP2P2i7P'))
+ # memoryview
+ check(memoryview(b''), size(h + 'PPiP4P2i5P3cP'))
# module
check(unittest, size(h + '3P'))
# None
@@ -821,19 +831,41 @@ class SizeofTest(unittest.TestCase):
# type
# (PyTypeObject + PyNumberMethods + PyMappingMethods +
# PySequenceMethods + PyBufferProcs)
- s = size(vh + 'P2P15Pl4PP9PP11PI') + size('16Pi17P 3P 10P 2P 2P')
+ s = size(vh + 'P2P15Pl4PP9PP11PI') + size('16Pi17P 3P 10P 2P 3P')
check(int, s)
# class
class newstyleclass(object): pass
check(newstyleclass, s)
# unicode
- usize = len('\0'.encode('unicode-internal'))
- samples = ['', '1'*100]
- # we need to test for both sizes, because we don't know if the string
- # has been cached
+ # each tuple contains a string and its expected character size
+ # don't put any static strings here, as they may contain
+ # wchar_t or UTF-8 representations
+ samples = ['1'*100, '\xff'*50,
+ '\u0100'*40, '\uffff'*100,
+ '\U00010000'*30, '\U0010ffff'*100]
+ asciifields = h + "PPiP"
+ compactfields = asciifields + "PPP"
+ unicodefields = compactfields + "P"
for s in samples:
- basicsize = size(h + 'PPPiP') + usize * (len(s) + 1)
- check(s, basicsize)
+ maxchar = ord(max(s))
+ if maxchar < 128:
+ L = size(asciifields) + len(s) + 1
+ elif maxchar < 256:
+ L = size(compactfields) + len(s) + 1
+ elif maxchar < 65536:
+ L = size(compactfields) + 2*(len(s) + 1)
+ else:
+ L = size(compactfields) + 4*(len(s) + 1)
+ check(s, L)
+ # verify that the UTF-8 size is accounted for
+ s = chr(0x4000) # 4 bytes canonical representation
+ check(s, size(compactfields) + 4)
+ # compile() will trigger the generation of the UTF-8
+ # representation as a side effect
+ compile(s, "<stdin>", "eval")
+ check(s, size(compactfields) + 4 + 4)
+ # TODO: add check that forces the presence of wchar_t representation
+ # TODO: add check that forces layout of unicodefields
# weakref
import weakref
check(weakref.ref(int), size(h + '2Pl2P'))
@@ -850,7 +882,7 @@ class SizeofTest(unittest.TestCase):
check = self.check_sizeof
# _ast.AST
import _ast
- check(_ast.AST(), size(h + ''))
+ check(_ast.AST(), size(h + 'P'))
# imp.NullImporter
import imp
check(imp.NullImporter(self.file.name), size(h + ''))
diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py
index cf3976c..578e95d 100644
--- a/Lib/test/test_sys_settrace.py
+++ b/Lib/test/test_sys_settrace.py
@@ -251,6 +251,7 @@ class TraceTestCase(unittest.TestCase):
def setUp(self):
self.using_gc = gc.isenabled()
gc.disable()
+ self.addCleanup(sys.settrace, sys.gettrace())
def tearDown(self):
if self.using_gc:
@@ -389,6 +390,9 @@ class TraceTestCase(unittest.TestCase):
class RaisingTraceFuncTestCase(unittest.TestCase):
+ def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
+
def trace(self, frame, event, arg):
"""A trace function that raises an exception in response to a
specific trace event."""
@@ -688,6 +692,10 @@ def no_jump_without_trace_function():
class JumpTestCase(unittest.TestCase):
+ def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
+ sys.settrace(None)
+
def compare_jump_output(self, expected, received):
if received != expected:
self.fail( "Outputs don't match:\n" +
@@ -739,6 +747,8 @@ class JumpTestCase(unittest.TestCase):
def test_18_no_jump_to_non_integers(self):
self.run_test(no_jump_to_non_integers)
def test_19_no_jump_without_trace_function(self):
+ # Must set sys.settrace(None) in setUp(), else condition is not
+ # triggered.
no_jump_without_trace_function()
def test_20_large_function(self):
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
index aabb6fa..a2e6fbc 100644
--- a/Lib/test/test_sysconfig.py
+++ b/Lib/test/test_sysconfig.py
@@ -1,25 +1,23 @@
-"""Tests for sysconfig."""
-
import unittest
import sys
import os
import subprocess
import shutil
-from copy import copy, deepcopy
+from copy import copy
from test.support import (run_unittest, TESTFN, unlink,
captured_stdout, skip_unless_symlink)
import sysconfig
from sysconfig import (get_paths, get_platform, get_config_vars,
- get_path, get_path_names, _INSTALL_SCHEMES,
+ get_path, get_path_names, _SCHEMES,
_get_default_scheme, _expand_vars,
get_scheme_names, get_config_var, _main)
+
class TestSysConfig(unittest.TestCase):
def setUp(self):
- """Make a copy of sys.path"""
super(TestSysConfig, self).setUp()
self.sys_path = sys.path[:]
# patching os.uname
@@ -38,11 +36,17 @@ class TestSysConfig(unittest.TestCase):
self.join = os.path.join
self.isabs = os.path.isabs
self.splitdrive = os.path.splitdrive
- self._config_vars = copy(sysconfig._CONFIG_VARS)
- self.old_environ = deepcopy(os.environ)
+ self._config_vars = sysconfig._CONFIG_VARS
+ sysconfig._CONFIG_VARS = copy(sysconfig._CONFIG_VARS)
+ self._added_envvars = []
+ self._changed_envvars = []
+ for var in ('MACOSX_DEPLOYMENT_TARGET', 'PATH'):
+ if var in os.environ:
+ self._changed_envvars.append((var, os.environ[var]))
+ else:
+ self._added_envvars.append(var)
def tearDown(self):
- """Restore sys.path"""
sys.path[:] = self.sys_path
self._cleanup_testfn()
if self.uname is not None:
@@ -56,14 +60,11 @@ class TestSysConfig(unittest.TestCase):
os.path.join = self.join
os.path.isabs = self.isabs
os.path.splitdrive = self.splitdrive
- sysconfig._CONFIG_VARS = copy(self._config_vars)
- for key, value in self.old_environ.items():
- if os.environ.get(key) != value:
- os.environ[key] = value
-
- for key in list(os.environ.keys()):
- if key not in self.old_environ:
- del os.environ[key]
+ sysconfig._CONFIG_VARS = self._config_vars
+ for var, value in self._changed_envvars:
+ os.environ[var] = value
+ for var in self._added_envvars:
+ os.environ.pop(var, None)
super(TestSysConfig, self).tearDown()
@@ -81,27 +82,25 @@ class TestSysConfig(unittest.TestCase):
shutil.rmtree(path)
def test_get_path_names(self):
- self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS)
+ self.assertEqual(get_path_names(), _SCHEMES.options('posix_prefix'))
def test_get_paths(self):
scheme = get_paths()
default_scheme = _get_default_scheme()
wanted = _expand_vars(default_scheme, None)
- wanted = list(wanted.items())
- wanted.sort()
- scheme = list(scheme.items())
- scheme.sort()
+ wanted = sorted(wanted.items())
+ scheme = sorted(scheme.items())
self.assertEqual(scheme, wanted)
def test_get_path(self):
- # xxx make real tests here
- for scheme in _INSTALL_SCHEMES:
- for name in _INSTALL_SCHEMES[scheme]:
+ # XXX make real tests here
+ for scheme in _SCHEMES:
+ for name in _SCHEMES[scheme]:
res = get_path(name, scheme)
def test_get_config_vars(self):
cvars = get_config_vars()
- self.assertTrue(isinstance(cvars, dict))
+ self.assertIsInstance(cvars, dict)
self.assertTrue(cvars)
def test_get_platform(self):
@@ -135,8 +134,6 @@ class TestSysConfig(unittest.TestCase):
('Darwin Kernel Version 8.11.1: '
'Wed Oct 10 18:23:28 PDT 2007; '
'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC'))
-
-
get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
@@ -151,7 +148,6 @@ class TestSysConfig(unittest.TestCase):
finally:
sys.maxsize = maxint
-
self._set_uname(('Darwin', 'macziade', '8.11.1',
('Darwin Kernel Version 8.11.1: '
'Wed Oct 10 18:23:28 PDT 2007; '
@@ -209,9 +205,9 @@ class TestSysConfig(unittest.TestCase):
get_config_vars()['CFLAGS'] = ('-arch %s -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
- '-dynamic -DNDEBUG -g -O3'%(arch,))
+ '-dynamic -DNDEBUG -g -O3' % arch)
- self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,))
+ self.assertEqual(get_platform(), 'macosx-10.4-%s' % arch)
# linux debian sarge
os.name = 'posix'
@@ -239,8 +235,8 @@ class TestSysConfig(unittest.TestCase):
# On Windows, the EXE needs to know where pythonXY.dll is at so we have
# to add the directory to the path.
if sys.platform == "win32":
- os.environ["Path"] = "{};{}".format(
- os.path.dirname(sys.executable), os.environ["Path"])
+ os.environ["PATH"] = "{};{}".format(
+ os.path.dirname(sys.executable), os.environ["PATH"])
# Issue 7880
def get(python):
@@ -286,7 +282,6 @@ class TestSysConfig(unittest.TestCase):
self.assertIn(ldflags, ldshared)
-
@unittest.skipUnless(sys.platform == "darwin", "test only relevant on MacOSX")
def test_platform_in_subprocess(self):
my_platform = sysconfig.get_platform()
@@ -312,28 +307,29 @@ class TestSysConfig(unittest.TestCase):
self.assertEqual(status, 0)
self.assertEqual(my_platform, test_platform)
-
# Test with MACOSX_DEPLOYMENT_TARGET in the environment, and
# using a value that is unlikely to be the default one.
env = os.environ.copy()
env['MACOSX_DEPLOYMENT_TARGET'] = '10.1'
- p = subprocess.Popen([
- sys.executable, '-c',
- 'import sysconfig; print(sysconfig.get_platform())',
- ],
- stdout=subprocess.PIPE,
- stderr=open('/dev/null'),
- env=env)
- test_platform = p.communicate()[0].strip()
- test_platform = test_platform.decode('utf-8')
- status = p.wait()
+ with open('/dev/null') as dev_null:
+ p = subprocess.Popen([
+ sys.executable, '-c',
+ 'import sysconfig; print(sysconfig.get_platform())',
+ ],
+ stdout=subprocess.PIPE,
+ stderr=dev_null,
+ env=env)
+ test_platform = p.communicate()[0].strip()
+ test_platform = test_platform.decode('utf-8')
+ status = p.wait()
- self.assertEqual(status, 0)
- self.assertEqual(my_platform, test_platform)
+ self.assertEqual(status, 0)
+ self.assertEqual(my_platform, test_platform)
class MakefileTests(unittest.TestCase):
+
@unittest.skipIf(sys.platform.startswith('win'),
'Test is not Windows compatible')
def test_get_makefile_filename(self):
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 1757e44..ce543df 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -21,6 +21,10 @@ try:
import bz2
except ImportError:
bz2 = None
+try:
+ import lzma
+except ImportError:
+ lzma = None
def md5sum(data):
return md5(data).hexdigest()
@@ -29,6 +33,7 @@ TEMPDIR = os.path.abspath(support.TESTFN) + "-tardir"
tarname = support.findfile("testtar.tar")
gzipname = os.path.join(TEMPDIR, "testtar.tar.gz")
bz2name = os.path.join(TEMPDIR, "testtar.tar.bz2")
+xzname = os.path.join(TEMPDIR, "testtar.tar.xz")
tmpname = os.path.join(TEMPDIR, "tmp.tar")
md5_regtype = "65f477c818ad9e15f7feab0c6d37742f"
@@ -82,7 +87,7 @@ class UstarReadTest(ReadTest):
def test_fileobj_iter(self):
self.tar.extract("ustar/regtype", TEMPDIR)
tarinfo = self.tar.getmember("ustar/regtype")
- with open(os.path.join(TEMPDIR, "ustar/regtype"), "rU") as fobj1:
+ with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1:
lines1 = fobj1.readlines()
fobj2 = self.tar.extractfile(tarinfo)
try:
@@ -201,13 +206,15 @@ class CommonReadTest(ReadTest):
_open = gzip.GzipFile
elif self.mode.endswith(":bz2"):
_open = bz2.BZ2File
+ elif self.mode.endswith(":xz"):
+ _open = lzma.LZMAFile
else:
- _open = open
+ _open = io.FileIO
for char in (b'\0', b'a'):
# Test if EOFHeaderError ('\0') and InvalidHeaderError ('a')
# are ignored correctly.
- with _open(tmpname, "wb") as fobj:
+ with _open(tmpname, "w") as fobj:
fobj.write(char * 1024)
fobj.write(tarfile.TarInfo("foo").tobuf())
@@ -222,6 +229,10 @@ class CommonReadTest(ReadTest):
class MiscReadTest(CommonReadTest):
def test_no_name_argument(self):
+ if self.mode.endswith(("bz2", "xz")):
+ # BZ2File and LZMAFile have no name attribute.
+ self.skipTest("no name attribute")
+
with open(self.tarname, "rb") as fobj:
tar = tarfile.open(fileobj=fobj, mode=self.mode)
self.assertEqual(tar.name, os.path.abspath(fobj.name))
@@ -262,10 +273,12 @@ class MiscReadTest(CommonReadTest):
_open = gzip.GzipFile
elif self.mode.endswith(":bz2"):
_open = bz2.BZ2File
+ elif self.mode.endswith(":xz"):
+ _open = lzma.LZMAFile
else:
- _open = open
- fobj = _open(self.tarname, "rb")
- try:
+ _open = io.FileIO
+
+ with _open(self.tarname) as fobj:
fobj.seek(offset)
# Test if the tarfile starts with the second member.
@@ -278,8 +291,6 @@ class MiscReadTest(CommonReadTest):
self.assertEqual(tar.extractfile(t).read(), data,
"seek back did not work")
tar.close()
- finally:
- fobj.close()
def test_fail_comp(self):
# For Gzip and Bz2 Tests: fail with a ReadError on an uncompressed file.
@@ -523,6 +534,18 @@ class DetectReadTest(unittest.TestCase):
testfunc(bz2name, "r|*")
testfunc(bz2name, "r|bz2")
+ if lzma:
+ self.assertRaises(tarfile.ReadError, tarfile.open, tarname, mode="r:xz")
+ self.assertRaises(tarfile.ReadError, tarfile.open, tarname, mode="r|xz")
+ self.assertRaises(tarfile.ReadError, tarfile.open, xzname, mode="r:")
+ self.assertRaises(tarfile.ReadError, tarfile.open, xzname, mode="r|")
+
+ testfunc(xzname, "r")
+ testfunc(xzname, "r:*")
+ testfunc(xzname, "r:xz")
+ testfunc(xzname, "r|*")
+ testfunc(xzname, "r|xz")
+
def test_detect_file(self):
self._test_modes(self._testfunc_file)
@@ -720,7 +743,7 @@ class GNUReadTest(LongnameTest):
# Return True if the platform knows the st_blocks stat attribute and
# uses st_blocks units of 512 bytes, and if the filesystem is able to
# store holes in files.
- if sys.platform == "linux2":
+ if sys.platform.startswith("linux"):
# Linux evidentially has 512 byte st_blocks units.
name = os.path.join(TEMPDIR, "sparse-test")
with open(name, "wb") as fobj:
@@ -910,7 +933,7 @@ class WriteTest(WriteTestBase):
try:
for name in ("foo", "bar", "baz"):
name = os.path.join(tempdir, name)
- open(name, "wb").close()
+ support.create_empty_file(name)
exclude = os.path.isfile
@@ -937,7 +960,7 @@ class WriteTest(WriteTestBase):
try:
for name in ("foo", "bar", "baz"):
name = os.path.join(tempdir, name)
- open(name, "wb").close()
+ support.create_empty_file(name)
def filter(tarinfo):
if os.path.basename(tarinfo.name) == "bar":
@@ -976,7 +999,7 @@ class WriteTest(WriteTestBase):
# and compare the stored name with the original.
foo = os.path.join(TEMPDIR, "foo")
if not dir:
- open(foo, "w").close()
+ support.create_empty_file(foo)
else:
os.mkdir(foo)
@@ -1093,6 +1116,9 @@ class StreamWriteTest(WriteTestBase):
data = dec.decompress(data)
self.assertTrue(len(dec.unused_data) == 0,
"found trailing data")
+ elif self.mode.endswith("xz"):
+ with lzma.LZMAFile(tmpname) as fobj:
+ data = fobj.read()
else:
with open(tmpname, "rb") as fobj:
data = fobj.read()
@@ -1336,7 +1362,7 @@ class UstarUnicodeTest(unittest.TestCase):
self._test_unicode_filename("utf7")
def test_utf8_filename(self):
- self._test_unicode_filename("utf8")
+ self._test_unicode_filename("utf-8")
def _test_unicode_filename(self, encoding):
tar = tarfile.open(tmpname, "w", format=self.format, encoding=encoding, errors="strict")
@@ -1415,7 +1441,7 @@ class GNUUnicodeTest(UstarUnicodeTest):
def test_bad_pax_header(self):
# Test for issue #8633. GNU tar <= 1.23 creates raw binary fields
# without a hdrcharset=BINARY header.
- for encoding, name in (("utf8", "pax/bad-pax-\udce4\udcf6\udcfc"),
+ for encoding, name in (("utf-8", "pax/bad-pax-\udce4\udcf6\udcfc"),
("iso8859-1", "pax/bad-pax-\xe4\xf6\xfc"),):
with tarfile.open(tarname, encoding=encoding, errors="surrogateescape") as tar:
try:
@@ -1430,7 +1456,7 @@ class PAXUnicodeTest(UstarUnicodeTest):
def test_binary_header(self):
# Test a POSIX.1-2008 compatible header with a hdrcharset=BINARY field.
- for encoding, name in (("utf8", "pax/hdrcharset-\udce4\udcf6\udcfc"),
+ for encoding, name in (("utf-8", "pax/hdrcharset-\udce4\udcf6\udcfc"),
("iso8859-1", "pax/hdrcharset-\xe4\xf6\xfc"),):
with tarfile.open(tarname, encoding=encoding, errors="surrogateescape") as tar:
try:
@@ -1507,6 +1533,12 @@ class AppendTest(unittest.TestCase):
self._create_testtar("w:bz2")
self.assertRaises(tarfile.ReadError, tarfile.open, tmpname, "a")
+ def test_append_lzma(self):
+ if lzma is None:
+ self.skipTest("lzma module not available")
+ self._create_testtar("w:xz")
+ self.assertRaises(tarfile.ReadError, tarfile.open, tmpname, "a")
+
# Append mode is supposed to fail if the tarfile to append to
# does not end with a zero block.
def _test_error(self, data):
@@ -1785,6 +1817,21 @@ class Bz2PartialReadTest(unittest.TestCase):
self._test_partial_input("r:bz2")
+class LzmaMiscReadTest(MiscReadTest):
+ tarname = xzname
+ mode = "r:xz"
+class LzmaUstarReadTest(UstarReadTest):
+ tarname = xzname
+ mode = "r:xz"
+class LzmaStreamReadTest(StreamReadTest):
+ tarname = xzname
+ mode = "r|xz"
+class LzmaWriteTest(WriteTest):
+ mode = "w:xz"
+class LzmaStreamWriteTest(StreamWriteTest):
+ mode = "w|xz"
+
+
def test_main():
support.unlink(TEMPDIR)
os.makedirs(TEMPDIR)
@@ -1847,6 +1894,20 @@ def test_main():
Bz2PartialReadTest,
]
+ if lzma:
+ # Create testtar.tar.xz and add lzma-specific tests.
+ support.unlink(xzname)
+ with lzma.LZMAFile(xzname, "w") as tar:
+ tar.write(data)
+
+ tests += [
+ LzmaMiscReadTest,
+ LzmaUstarReadTest,
+ LzmaStreamReadTest,
+ LzmaWriteTest,
+ LzmaStreamWriteTest,
+ ]
+
try:
support.run_unittest(*tests)
finally:
diff --git a/Lib/test/test_telnetlib.py b/Lib/test/test_telnetlib.py
index 87418f5..e58c10f 100644
--- a/Lib/test/test_telnetlib.py
+++ b/Lib/test/test_telnetlib.py
@@ -35,6 +35,7 @@ class GeneralTests(TestCase):
def tearDown(self):
self.thread.join()
+ del self.thread # Clear out any dangling Thread objects.
def testBasic(self):
# connects
diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py
index 50cf3b4..d79f319 100644
--- a/Lib/test/test_tempfile.py
+++ b/Lib/test/test_tempfile.py
@@ -21,7 +21,7 @@ has_spawnl = hasattr(os, 'spawnl')
# TEST_FILES may need to be tweaked for systems depending on the maximum
# number of files that can be opened at one time (see ulimit -n)
-if sys.platform in ('openbsd3', 'openbsd4'):
+if sys.platform.startswith('openbsd'):
TEST_FILES = 48
else:
TEST_FILES = 100
@@ -31,7 +31,7 @@ else:
# threads is not done here.
# Common functionality.
-class TC(unittest.TestCase):
+class BaseTestCase(unittest.TestCase):
str_check = re.compile(r"[a-zA-Z0-9_-]{6}$")
@@ -45,11 +45,6 @@ class TC(unittest.TestCase):
self._warnings_manager.__exit__(None, None, None)
- def failOnException(self, what, ei=None):
- if ei is None:
- ei = sys.exc_info()
- self.fail("%s raised %s: %s" % (what, ei[0], ei[1]))
-
def nameCheck(self, name, dir, pre, suf):
(ndir, nbase) = os.path.split(name)
npre = nbase[:len(pre)]
@@ -68,9 +63,8 @@ class TC(unittest.TestCase):
"random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/"
% nbase)
-test_classes = []
-class test_exports(TC):
+class TestExports(BaseTestCase):
def test_exports(self):
# There are no surprising symbols in the tempfile module
dict = tempfile.__dict__
@@ -97,10 +91,8 @@ class test_exports(TC):
self.assertTrue(len(unexp) == 0,
"unexpected keys: %s" % unexp)
-test_classes.append(test_exports)
-
-class test__RandomNameSequence(TC):
+class TestRandomNameSequence(BaseTestCase):
"""Test the internal iterator object _RandomNameSequence."""
def setUp(self):
@@ -128,13 +120,10 @@ class test__RandomNameSequence(TC):
i = 0
r = self.r
- try:
- for s in r:
- i += 1
- if i == 20:
- break
- except:
- self.failOnException("iteration")
+ for s in r:
+ i += 1
+ if i == 20:
+ break
@unittest.skipUnless(hasattr(os, 'fork'),
"os.fork is required for this test")
@@ -167,10 +156,8 @@ class test__RandomNameSequence(TC):
self.assertNotEqual(child_value, parent_value)
-test_classes.append(test__RandomNameSequence)
-
-class test__candidate_tempdir_list(TC):
+class TestCandidateTempdirList(BaseTestCase):
"""Test the internal function _candidate_tempdir_list."""
def test_nonempty_list(self):
@@ -209,13 +196,11 @@ class test__candidate_tempdir_list(TC):
# Not practical to try to verify the presence of OS-specific
# paths in this list.
-test_classes.append(test__candidate_tempdir_list)
-
# We test _get_default_tempdir by testing gettempdir.
-class test__get_candidate_names(TC):
+class TestGetCandidateNames(BaseTestCase):
"""Test the internal function _get_candidate_names."""
def test_retval(self):
@@ -230,10 +215,8 @@ class test__get_candidate_names(TC):
self.assertTrue(a is b)
-test_classes.append(test__get_candidate_names)
-
-class test__mkstemp_inner(TC):
+class TestMkstempInner(BaseTestCase):
"""Test the internal function _mkstemp_inner."""
class mkstemped:
@@ -258,10 +241,7 @@ class test__mkstemp_inner(TC):
def do_create(self, dir=None, pre="", suf="", bin=1):
if dir is None:
dir = tempfile.gettempdir()
- try:
- file = self.mkstemped(dir, pre, suf, bin)
- except:
- self.failOnException("_mkstemp_inner")
+ file = self.mkstemped(dir, pre, suf, bin)
self.nameCheck(file.name, dir, pre, suf)
return file
@@ -353,10 +333,8 @@ class test__mkstemp_inner(TC):
os.lseek(f.fd, 0, os.SEEK_SET)
self.assertEqual(os.read(f.fd, 20), b"blat")
-test_classes.append(test__mkstemp_inner)
-
-class test_gettempprefix(TC):
+class TestGetTempPrefix(BaseTestCase):
"""Test gettempprefix()."""
def test_sane_template(self):
@@ -376,19 +354,14 @@ class test_gettempprefix(TC):
d = tempfile.mkdtemp(prefix="")
try:
p = os.path.join(d, p)
- try:
- fd = os.open(p, os.O_RDWR | os.O_CREAT)
- except:
- self.failOnException("os.open")
+ fd = os.open(p, os.O_RDWR | os.O_CREAT)
os.close(fd)
os.unlink(p)
finally:
os.rmdir(d)
-test_classes.append(test_gettempprefix)
-
-class test_gettempdir(TC):
+class TestGetTempDir(BaseTestCase):
"""Test gettempdir()."""
def test_directory_exists(self):
@@ -406,12 +379,9 @@ class test_gettempdir(TC):
# sneaky: just instantiate a NamedTemporaryFile, which
# defaults to writing into the directory returned by
# gettempdir.
- try:
- file = tempfile.NamedTemporaryFile()
- file.write(b"blat")
- file.close()
- except:
- self.failOnException("create file in %s" % tempfile.gettempdir())
+ file = tempfile.NamedTemporaryFile()
+ file.write(b"blat")
+ file.close()
def test_same_thing(self):
# gettempdir always returns the same object
@@ -420,23 +390,18 @@ class test_gettempdir(TC):
self.assertTrue(a is b)
-test_classes.append(test_gettempdir)
-
-class test_mkstemp(TC):
+class TestMkstemp(BaseTestCase):
"""Test mkstemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
- try:
- (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
- (ndir, nbase) = os.path.split(name)
- adir = os.path.abspath(dir)
- self.assertEqual(adir, ndir,
- "Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
- except:
- self.failOnException("mkstemp")
+ (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
+ (ndir, nbase) = os.path.split(name)
+ adir = os.path.abspath(dir)
+ self.assertEqual(adir, ndir,
+ "Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
try:
self.nameCheck(name, dir, pre, suf)
@@ -461,19 +426,14 @@ class test_mkstemp(TC):
finally:
os.rmdir(dir)
-test_classes.append(test_mkstemp)
-
-class test_mkdtemp(TC):
+class TestMkdtemp(BaseTestCase):
"""Test mkdtemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
- try:
- name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
- except:
- self.failOnException("mkdtemp")
+ name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
try:
self.nameCheck(name, dir, pre, suf)
@@ -528,10 +488,8 @@ class test_mkdtemp(TC):
finally:
os.rmdir(dir)
-test_classes.append(test_mkdtemp)
-
-class test_mktemp(TC):
+class TestMktemp(BaseTestCase):
"""Test mktemp()."""
# For safety, all use of mktemp must occur in a private directory.
@@ -560,10 +518,7 @@ class test_mktemp(TC):
self._unlink(self.name)
def do_create(self, pre="", suf=""):
- try:
- file = self.mktemped(self.dir, pre, suf)
- except:
- self.failOnException("mktemp")
+ file = self.mktemped(self.dir, pre, suf)
self.nameCheck(file.name, self.dir, pre, suf)
return file
@@ -590,23 +545,18 @@ class test_mktemp(TC):
## self.assertRaises(RuntimeWarning,
## tempfile.mktemp, dir=self.dir)
-test_classes.append(test_mktemp)
-
# We test _TemporaryFileWrapper by testing NamedTemporaryFile.
-class test_NamedTemporaryFile(TC):
+class TestNamedTemporaryFile(BaseTestCase):
"""Test NamedTemporaryFile()."""
def do_create(self, dir=None, pre="", suf="", delete=True):
if dir is None:
dir = tempfile.gettempdir()
- try:
- file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
- delete=delete)
- except:
- self.failOnException("NamedTemporaryFile")
+ file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
+ delete=delete)
self.nameCheck(file.name, dir, pre, suf)
return file
@@ -659,11 +609,8 @@ class test_NamedTemporaryFile(TC):
f = tempfile.NamedTemporaryFile()
f.write(b'abc\n')
f.close()
- try:
- f.close()
- f.close()
- except:
- self.failOnException("close")
+ f.close()
+ f.close()
def test_context_manager(self):
# A NamedTemporaryFile can be used as a context manager
@@ -677,18 +624,14 @@ class test_NamedTemporaryFile(TC):
# How to test the mode and bufsize parameters?
-test_classes.append(test_NamedTemporaryFile)
-class test_SpooledTemporaryFile(TC):
+class TestSpooledTemporaryFile(BaseTestCase):
"""Test SpooledTemporaryFile()."""
def do_create(self, max_size=0, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
- try:
- file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
- except:
- self.failOnException("SpooledTemporaryFile")
+ file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
return file
@@ -776,11 +719,8 @@ class test_SpooledTemporaryFile(TC):
f.write(b'abc\n')
self.assertFalse(f._rolled)
f.close()
- try:
- f.close()
- f.close()
- except:
- self.failOnException("close")
+ f.close()
+ f.close()
def test_multiple_close_after_rollover(self):
# A SpooledTemporaryFile can be closed many times without error
@@ -788,11 +728,8 @@ class test_SpooledTemporaryFile(TC):
f.write(b'abc\n')
self.assertTrue(f._rolled)
f.close()
- try:
- f.close()
- f.close()
- except:
- self.failOnException("close")
+ f.close()
+ f.close()
def test_bound_methods(self):
# It should be OK to steal a bound method from a SpooledTemporaryFile
@@ -878,66 +815,76 @@ class test_SpooledTemporaryFile(TC):
pass
self.assertRaises(ValueError, use_closed)
+ def test_truncate_with_size_parameter(self):
+ # A SpooledTemporaryFile can be truncated to zero size
+ f = tempfile.SpooledTemporaryFile(max_size=10)
+ f.write(b'abcdefg\n')
+ f.seek(0)
+ f.truncate()
+ self.assertFalse(f._rolled)
+ self.assertEqual(f._file.getvalue(), b'')
+ # A SpooledTemporaryFile can be truncated to a specific size
+ f = tempfile.SpooledTemporaryFile(max_size=10)
+ f.write(b'abcdefg\n')
+ f.truncate(4)
+ self.assertFalse(f._rolled)
+ self.assertEqual(f._file.getvalue(), b'abcd')
+ # A SpooledTemporaryFile rolls over if truncated to large size
+ f = tempfile.SpooledTemporaryFile(max_size=10)
+ f.write(b'abcdefg\n')
+ f.truncate(20)
+ self.assertTrue(f._rolled)
+ if has_stat:
+ self.assertEqual(os.fstat(f.fileno()).st_size, 20)
-test_classes.append(test_SpooledTemporaryFile)
+if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
-class test_TemporaryFile(TC):
- """Test TemporaryFile()."""
+ class TestTemporaryFile(BaseTestCase):
+ """Test TemporaryFile()."""
- def test_basic(self):
- # TemporaryFile can create files
- # No point in testing the name params - the file has no name.
- try:
+ def test_basic(self):
+ # TemporaryFile can create files
+ # No point in testing the name params - the file has no name.
tempfile.TemporaryFile()
- except:
- self.failOnException("TemporaryFile")
- def test_has_no_name(self):
- # TemporaryFile creates files with no names (on this system)
- dir = tempfile.mkdtemp()
- f = tempfile.TemporaryFile(dir=dir)
- f.write(b'blat')
+ def test_has_no_name(self):
+ # TemporaryFile creates files with no names (on this system)
+ dir = tempfile.mkdtemp()
+ f = tempfile.TemporaryFile(dir=dir)
+ f.write(b'blat')
- # Sneaky: because this file has no name, it should not prevent
- # us from removing the directory it was created in.
- try:
- os.rmdir(dir)
- except:
- ei = sys.exc_info()
- # cleanup
+ # Sneaky: because this file has no name, it should not prevent
+ # us from removing the directory it was created in.
+ try:
+ os.rmdir(dir)
+ except:
+ # cleanup
+ f.close()
+ os.rmdir(dir)
+ raise
+
+ def test_multiple_close(self):
+ # A TemporaryFile can be closed many times without error
+ f = tempfile.TemporaryFile()
+ f.write(b'abc\n')
f.close()
- os.rmdir(dir)
- self.failOnException("rmdir", ei)
-
- def test_multiple_close(self):
- # A TemporaryFile can be closed many times without error
- f = tempfile.TemporaryFile()
- f.write(b'abc\n')
- f.close()
- try:
f.close()
f.close()
- except:
- self.failOnException("close")
- # How to test the mode and bufsize parameters?
- def test_mode_and_encoding(self):
+ # How to test the mode and bufsize parameters?
+ def test_mode_and_encoding(self):
- def roundtrip(input, *args, **kwargs):
- with tempfile.TemporaryFile(*args, **kwargs) as fileobj:
- fileobj.write(input)
- fileobj.seek(0)
- self.assertEqual(input, fileobj.read())
+ def roundtrip(input, *args, **kwargs):
+ with tempfile.TemporaryFile(*args, **kwargs) as fileobj:
+ fileobj.write(input)
+ fileobj.seek(0)
+ self.assertEqual(input, fileobj.read())
- roundtrip(b"1234", "w+b")
- roundtrip("abdc\n", "w+")
- roundtrip("\u039B", "w+", encoding="utf-16")
- roundtrip("foo\r\n", "w+", newline="")
-
-
-if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
- test_classes.append(test_TemporaryFile)
+ roundtrip(b"1234", "w+b")
+ roundtrip("abdc\n", "w+")
+ roundtrip("\u039B", "w+", encoding="utf-16")
+ roundtrip("foo\r\n", "w+", newline="")
# Helper for test_del_on_shutdown
@@ -956,16 +903,13 @@ class NulledModules:
d.clear()
d.update(c)
-class test_TemporaryDirectory(TC):
+class TestTemporaryDirectory(BaseTestCase):
"""Test TemporaryDirectory()."""
def do_create(self, dir=None, pre="", suf="", recurse=1):
if dir is None:
dir = tempfile.gettempdir()
- try:
- tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
- except:
- self.failOnException("TemporaryDirectory")
+ tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
self.nameCheck(tmp.name, dir, pre, suf)
# Create a subdirectory and some files
if recurse:
@@ -1089,11 +1033,8 @@ class test_TemporaryDirectory(TC):
# Can be cleaned-up many times without error
d = self.do_create()
d.cleanup()
- try:
- d.cleanup()
- d.cleanup()
- except:
- self.failOnException("cleanup")
+ d.cleanup()
+ d.cleanup()
def test_context_manager(self):
# Can be used as a context manager
@@ -1104,10 +1045,8 @@ class test_TemporaryDirectory(TC):
self.assertFalse(os.path.exists(name))
-test_classes.append(test_TemporaryDirectory)
-
def test_main():
- support.run_unittest(*test_classes)
+ support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_threaded_import.py b/Lib/test/test_threaded_import.py
index 7791935..789920b 100644
--- a/Lib/test/test_threaded_import.py
+++ b/Lib/test/test_threaded_import.py
@@ -11,8 +11,8 @@ import sys
import time
import shutil
import unittest
-from test.support import verbose, import_module, run_unittest, TESTFN
-thread = import_module('_thread')
+from test.support import (
+ verbose, import_module, run_unittest, TESTFN, reap_threads)
threading = import_module('threading')
def task(N, done, done_tasks, errors):
@@ -30,7 +30,7 @@ def task(N, done, done_tasks, errors):
except Exception as e:
errors.append(e.with_traceback(None))
finally:
- done_tasks.append(thread.get_ident())
+ done_tasks.append(threading.get_ident())
finished = len(done_tasks) == N
if finished:
done.set()
@@ -62,7 +62,7 @@ class Finder:
def __init__(self):
self.numcalls = 0
self.x = 0
- self.lock = thread.allocate_lock()
+ self.lock = threading.Lock()
def find_module(self, name, path=None):
# Simulate some thread-unsafe behaviour. If calls to find_module()
@@ -113,7 +113,9 @@ class ThreadedImportTests(unittest.TestCase):
done_tasks = []
done.clear()
for i in range(N):
- thread.start_new_thread(task, (N, done, done_tasks, errors,))
+ t = threading.Thread(target=task,
+ args=(N, done, done_tasks, errors,))
+ t.start()
done.wait(60)
self.assertFalse(errors)
if verbose:
@@ -203,6 +205,7 @@ class ThreadedImportTests(unittest.TestCase):
self.assertEqual(set(results), {'a', 'b'})
+@reap_threads
def test_main():
run_unittest(ThreadedImportTests)
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index 32637b5..141d961 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -173,7 +173,7 @@ class ThreadTests(BaseTestCase):
exception = ctypes.py_object(AsyncExc)
# First check it works when setting the exception from the same thread.
- tid = _thread.get_ident()
+ tid = threading.get_ident()
try:
result = set_async_exc(ctypes.c_long(tid), exception)
@@ -202,7 +202,7 @@ class ThreadTests(BaseTestCase):
class Worker(threading.Thread):
def run(self):
- self.id = _thread.get_ident()
+ self.id = threading.get_ident()
self.finished = False
try:
@@ -407,6 +407,14 @@ class ThreadTests(BaseTestCase):
t.daemon = True
self.assertTrue('daemon' in repr(t))
+ def test_deamon_param(self):
+ t = threading.Thread()
+ self.assertFalse(t.daemon)
+ t = threading.Thread(daemon=False)
+ self.assertFalse(t.daemon)
+ t = threading.Thread(daemon=True)
+ self.assertTrue(t.daemon)
+
class ThreadJoinOnShutdown(BaseTestCase):
@@ -712,6 +720,10 @@ class ThreadingExceptionTests(BaseTestCase):
thread.start()
self.assertRaises(RuntimeError, setattr, thread, "daemon", True)
+ def test_releasing_unacquired_lock(self):
+ lock = threading.Lock()
+ self.assertRaises(RuntimeError, lock.release)
+
@unittest.skipUnless(sys.platform == 'darwin', 'test macosx problem')
def test_recursion_limit(self):
# Issue 9670
@@ -773,6 +785,7 @@ class BoundedSemaphoreTests(lock_tests.BoundedSemaphoreTests):
class BarrierTests(lock_tests.BarrierTests):
barriertype = staticmethod(threading.Barrier)
+
def test_main():
test.support.run_unittest(LockTests, PyRLockTests, CRLockTests, EventTests,
ConditionAsRLockTests, ConditionTests,
@@ -780,7 +793,7 @@ def test_main():
ThreadTests,
ThreadJoinOnShutdown,
ThreadingExceptionTests,
- BarrierTests
+ BarrierTests,
)
if __name__ == "__main__":
diff --git a/Lib/test/test_threadsignals.py b/Lib/test/test_threadsignals.py
index e0af31d..f975a75 100644
--- a/Lib/test/test_threadsignals.py
+++ b/Lib/test/test_threadsignals.py
@@ -14,10 +14,8 @@ if sys.platform[:3] in ('win', 'os2') or sys.platform=='riscos':
process_pid = os.getpid()
signalled_all=thread.allocate_lock()
-# Issue #11223: Locks are implemented using a mutex and a condition variable of
-# the pthread library on FreeBSD6. POSIX condition variables cannot be
-# interrupted by signals (see pthread_cond_wait manual page).
-USING_PTHREAD_COND = (sys.platform == 'freebsd6')
+USING_PTHREAD_COND = (sys.thread_info.name == 'pthread'
+ and sys.thread_info.lock == 'mutex+cond')
def registerSignals(for_usr1, for_usr2, for_alrm):
usr1 = signal.signal(signal.SIGUSR1, for_usr1)
diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py
index ce57d87..28d018a 100644
--- a/Lib/test/test_time.py
+++ b/Lib/test/test_time.py
@@ -4,7 +4,13 @@ import unittest
import locale
import sysconfig
import sys
-import warnings
+import platform
+
+# Max year is only limited by the size of C int.
+SIZEOF_INT = sysconfig.get_config_var('SIZEOF_INT') or 4
+TIME_MAXYEAR = (1 << 8 * SIZEOF_INT - 1) - 1
+TIME_MINYEAR = -TIME_MAXYEAR - 1
+
class TimeTestCase(unittest.TestCase):
@@ -20,6 +26,27 @@ class TimeTestCase(unittest.TestCase):
def test_clock(self):
time.clock()
+ @unittest.skipUnless(hasattr(time, 'clock_gettime'),
+ 'need time.clock_gettime()')
+ def test_clock_realtime(self):
+ time.clock_gettime(time.CLOCK_REALTIME)
+
+ @unittest.skipUnless(hasattr(time, 'clock_gettime'),
+ 'need time.clock_gettime()')
+ @unittest.skipUnless(hasattr(time, 'CLOCK_MONOTONIC'),
+ 'need time.CLOCK_MONOTONIC')
+ def test_clock_monotonic(self):
+ a = time.clock_gettime(time.CLOCK_MONOTONIC)
+ b = time.clock_gettime(time.CLOCK_MONOTONIC)
+ self.assertLessEqual(a, b)
+
+ @unittest.skipUnless(hasattr(time, 'clock_getres'),
+ 'need time.clock_getres()')
+ def test_clock_getres(self):
+ res = time.clock_getres(time.CLOCK_REALTIME)
+ self.assertGreater(res, 0.0)
+ self.assertLessEqual(res, 1.0)
+
def test_conversions(self):
self.assertEqual(time.ctime(self.t),
time.asctime(time.localtime(self.t)))
@@ -27,6 +54,8 @@ class TimeTestCase(unittest.TestCase):
int(self.t))
def test_sleep(self):
+ self.assertRaises(ValueError, time.sleep, -2)
+ self.assertRaises(ValueError, time.sleep, -1)
time.sleep(1.2)
def test_strftime(self):
@@ -47,28 +76,34 @@ class TimeTestCase(unittest.TestCase):
with self.assertRaises(ValueError):
time.strftime('%f')
- def _bounds_checking(self, func=time.strftime):
+ def _bounds_checking(self, func):
# Make sure that strftime() checks the bounds of the various parts
- #of the time tuple (0 is valid for *all* values).
+ # of the time tuple (0 is valid for *all* values).
# The year field is tested by other test cases above
# Check month [1, 12] + zero support
+ func((1900, 0, 1, 0, 0, 0, 0, 1, -1))
+ func((1900, 12, 1, 0, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, -1, 1, 0, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, 13, 1, 0, 0, 0, 0, 1, -1))
# Check day of month [1, 31] + zero support
+ func((1900, 1, 0, 0, 0, 0, 0, 1, -1))
+ func((1900, 1, 31, 0, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, 1, -1, 0, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, 1, 32, 0, 0, 0, 0, 1, -1))
# Check hour [0, 23]
+ func((1900, 1, 1, 23, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, 1, 1, -1, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, 1, 1, 24, 0, 0, 0, 1, -1))
# Check minute [0, 59]
+ func((1900, 1, 1, 0, 59, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, 1, 1, 0, -1, 0, 0, 1, -1))
self.assertRaises(ValueError, func,
@@ -78,15 +113,21 @@ class TimeTestCase(unittest.TestCase):
(1900, 1, 1, 0, 0, -1, 0, 1, -1))
# C99 only requires allowing for one leap second, but Python's docs say
# allow two leap seconds (0..61)
+ func((1900, 1, 1, 0, 0, 60, 0, 1, -1))
+ func((1900, 1, 1, 0, 0, 61, 0, 1, -1))
self.assertRaises(ValueError, func,
(1900, 1, 1, 0, 0, 62, 0, 1, -1))
# No check for upper-bound day of week;
# value forced into range by a ``% 7`` calculation.
# Start check at -2 since gettmarg() increments value before taking
# modulo.
+ self.assertEqual(func((1900, 1, 1, 0, 0, 0, -1, 1, -1)),
+ func((1900, 1, 1, 0, 0, 0, +6, 1, -1)))
self.assertRaises(ValueError, func,
(1900, 1, 1, 0, 0, 0, -2, 1, -1))
# Check day of the year [1, 366] + zero support
+ func((1900, 1, 1, 0, 0, 0, 0, 0, -1))
+ func((1900, 1, 1, 0, 0, 0, 0, 366, -1))
self.assertRaises(ValueError, func,
(1900, 1, 1, 0, 0, 0, 0, -1, -1))
self.assertRaises(ValueError, func,
@@ -96,12 +137,13 @@ class TimeTestCase(unittest.TestCase):
self._bounds_checking(lambda tup: time.strftime('', tup))
def test_default_values_for_zero(self):
- # Make sure that using all zeros uses the proper default values.
- # No test for daylight savings since strftime() does not change output
- # based on its value.
+ # Make sure that using all zeros uses the proper default
+ # values. No test for daylight savings since strftime() does
+ # not change output based on its value and no test for year
+ # because systems vary in their support for year 0.
expected = "2000 01 01 00 00 00 1 001"
with support.check_warnings():
- result = time.strftime("%Y %m %d %H %M %S %w %j", (0,)*9)
+ result = time.strftime("%Y %m %d %H %M %S %w %j", (2000,)+(0,)*8)
self.assertEqual(expected, result)
def test_strptime(self):
@@ -128,11 +170,13 @@ class TimeTestCase(unittest.TestCase):
time.asctime(time.gmtime(self.t))
# Max year is only limited by the size of C int.
- sizeof_int = sysconfig.get_config_var('SIZEOF_INT') or 4
- bigyear = (1 << 8 * sizeof_int - 1) - 1
- asc = time.asctime((bigyear, 6, 1) + (0,)*6)
- self.assertEqual(asc[-len(str(bigyear)):], str(bigyear))
- self.assertRaises(OverflowError, time.asctime, (bigyear + 1,) + (0,)*8)
+ for bigyear in TIME_MAXYEAR, TIME_MINYEAR:
+ asc = time.asctime((bigyear, 6, 1) + (0,) * 6)
+ self.assertEqual(asc[-len(str(bigyear)):], str(bigyear))
+ self.assertRaises(OverflowError, time.asctime,
+ (TIME_MAXYEAR + 1,) + (0,) * 8)
+ self.assertRaises(OverflowError, time.asctime,
+ (TIME_MINYEAR - 1,) + (0,) * 8)
self.assertRaises(TypeError, time.asctime, 0)
self.assertRaises(TypeError, time.asctime, ())
self.assertRaises(TypeError, time.asctime, (0,) * 10)
@@ -155,8 +199,8 @@ class TimeTestCase(unittest.TestCase):
else:
self.assertEqual(time.ctime(testval)[20:], str(year))
- @unittest.skipIf(not hasattr(time, "tzset"),
- "time module has no attribute tzset")
+ @unittest.skipUnless(hasattr(time, "tzset"),
+ "time module has no attribute tzset")
def test_tzset(self):
from os import environ
@@ -208,11 +252,13 @@ class TimeTestCase(unittest.TestCase):
self.assertNotEqual(time.gmtime(xmas2002), time.localtime(xmas2002))
# Issue #11886: Australian Eastern Standard Time (UTC+10) is called
- # "EST" (as Eastern Standard Time, UTC-5) instead of "AEST" on some
- # operating systems (e.g. FreeBSD), which is wrong. See for example
- # this bug: http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=93810
+ # "EST" (as Eastern Standard Time, UTC-5) instead of "AEST"
+ # (non-DST timezone), and "EDT" instead of "AEDT" (DST timezone),
+ # on some operating systems (e.g. FreeBSD), which is wrong. See for
+ # example this bug:
+ # http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=93810
self.assertIn(time.tzname[0], ('AEST' 'EST'), time.tzname[0])
- self.assertTrue(time.tzname[1] == 'AEDT', str(time.tzname[1]))
+ self.assertTrue(time.tzname[1] in ('AEDT', 'EDT'), str(time.tzname[1]))
self.assertEqual(len(time.tzname), 2)
self.assertEqual(time.daylight, 1)
self.assertEqual(time.timezone, -36000)
@@ -235,7 +281,7 @@ class TimeTestCase(unittest.TestCase):
# results!).
for func in time.ctime, time.gmtime, time.localtime:
for unreasonable in -1e200, 1e200:
- self.assertRaises(ValueError, func, unreasonable)
+ self.assertRaises(OverflowError, func, unreasonable)
def test_ctime_without_arg(self):
# Not sure how to check the values, since the clock could tick
@@ -258,6 +304,69 @@ class TimeTestCase(unittest.TestCase):
t1 = time.mktime(lt1)
self.assertAlmostEqual(t1, t0, delta=0.2)
+ def test_mktime(self):
+ # Issue #1726687
+ for t in (-2, -1, 0, 1):
+ try:
+ tt = time.localtime(t)
+ except (OverflowError, OSError):
+ pass
+ else:
+ self.assertEqual(time.mktime(tt), t)
+
+ # Issue #13309: passing extreme values to mktime() or localtime()
+ # borks the glibc's internal timezone data.
+ @unittest.skipUnless(platform.libc_ver()[0] != 'glibc',
+ "disabled because of a bug in glibc. Issue #13309")
+ def test_mktime_error(self):
+ # It may not be possible to reliably make mktime return error
+ # on all platfom. This will make sure that no other exception
+ # than OverflowError is raised for an extreme value.
+ tt = time.gmtime(self.t)
+ tzname = time.strftime('%Z', tt)
+ self.assertNotEqual(tzname, 'LMT')
+ try:
+ time.mktime((-1, 1, 1, 0, 0, 0, -1, -1, -1))
+ except OverflowError:
+ pass
+ self.assertEqual(time.strftime('%Z', tt), tzname)
+
+ def test_steady(self):
+ t1 = time.steady()
+ time.sleep(0.1)
+ t2 = time.steady()
+ dt = t2 - t1
+ # may fail if the system clock was changed
+ self.assertGreater(t2, t1)
+ self.assertAlmostEqual(dt, 0.1, delta=0.2)
+
+ def test_steady_strict(self):
+ try:
+ t1 = time.steady(strict=True)
+ except OSError as err:
+ self.skipTest("the monotonic clock failed: %s" % err)
+ except NotImplementedError:
+ self.skipTest("no monotonic clock available")
+ t2 = time.steady(strict=True)
+ self.assertGreaterEqual(t2, t1)
+
+ def test_localtime_failure(self):
+ # Issue #13847: check for localtime() failure
+ invalid_time_t = None
+ for time_t in (-1, 2**30, 2**33, 2**60):
+ try:
+ time.localtime(time_t)
+ except OverflowError:
+ self.skipTest("need 64-bit time_t")
+ except OSError:
+ invalid_time_t = time_t
+ break
+ if invalid_time_t is None:
+ self.skipTest("unable to find an invalid time_t value")
+
+ self.assertRaises(OSError, time.localtime, invalid_time_t)
+ self.assertRaises(OSError, time.ctime, invalid_time_t)
+
class TestLocale(unittest.TestCase):
def setUp(self):
self.oldloc = locale.setlocale(locale.LC_ALL)
@@ -276,19 +385,12 @@ class TestLocale(unittest.TestCase):
class _BaseYearTest(unittest.TestCase):
- accept2dyear = None
-
- def setUp(self):
- self.saved_accept2dyear = time.accept2dyear
- time.accept2dyear = self.accept2dyear
-
- def tearDown(self):
- time.accept2dyear = self.saved_accept2dyear
-
def yearstr(self, y):
raise NotImplementedError()
class _TestAsctimeYear:
+ _format = '%d'
+
def yearstr(self, y):
return time.asctime((y,) + (0,) * 8).split()[-1]
@@ -298,114 +400,160 @@ class _TestAsctimeYear:
self.assertEqual(self.yearstr(123456789), '123456789')
class _TestStrftimeYear:
- def yearstr(self, y):
- return time.strftime('%Y', (y,) + (0,) * 8).split()[-1]
- def test_large_year(self):
+ # Issue 13305: For years < 1000, the value is not always
+ # padded to 4 digits across platforms. The C standard
+ # assumes year >= 1900, so it does not specify the number
+ # of digits.
+
+ if time.strftime('%Y', (1,) + (0,) * 8) == '0001':
+ _format = '%04d'
+ else:
+ _format = '%d'
+
+ def yearstr(self, y):
+ return time.strftime('%Y', (y,) + (0,) * 8)
+
+ def test_4dyear(self):
+ # Check that we can return the zero padded value.
+ if self._format == '%04d':
+ self.test_year('%04d')
+ else:
+ def year4d(y):
+ return time.strftime('%4Y', (y,) + (0,) * 8)
+ self.test_year('%04d', func=year4d)
+
+ def skip_if_not_supported(y):
+ msg = "strftime() is limited to [1; 9999] with Visual Studio"
# Check that it doesn't crash for year > 9999
try:
- text = self.yearstr(12345)
+ time.strftime('%Y', (y,) + (0,) * 8)
except ValueError:
- # strftime() is limited to [1; 9999] with Visual Studio
- return
- self.assertEqual(text, '12345')
- self.assertEqual(self.yearstr(123456789), '123456789')
+ cond = False
+ else:
+ cond = True
+ return unittest.skipUnless(cond, msg)
-class _Test2dYear(_BaseYearTest):
- accept2dyear = 1
+ @skip_if_not_supported(10000)
+ def test_large_year(self):
+ return super().test_large_year()
- def test_year(self):
- with support.check_warnings():
- self.assertEqual(self.yearstr(0), '2000')
- self.assertEqual(self.yearstr(69), '1969')
- self.assertEqual(self.yearstr(68), '2068')
- self.assertEqual(self.yearstr(99), '1999')
+ @skip_if_not_supported(0)
+ def test_negative(self):
+ return super().test_negative()
+
+ del skip_if_not_supported
- def test_invalid(self):
- self.assertRaises(ValueError, self.yearstr, -1)
- self.assertRaises(ValueError, self.yearstr, 100)
- self.assertRaises(ValueError, self.yearstr, 999)
class _Test4dYear(_BaseYearTest):
- accept2dyear = 0
+ _format = '%d'
+
+ def test_year(self, fmt=None, func=None):
+ fmt = fmt or self._format
+ func = func or self.yearstr
+ self.assertEqual(func(1), fmt % 1)
+ self.assertEqual(func(68), fmt % 68)
+ self.assertEqual(func(69), fmt % 69)
+ self.assertEqual(func(99), fmt % 99)
+ self.assertEqual(func(999), fmt % 999)
+ self.assertEqual(func(9999), fmt % 9999)
- def test_year(self):
- self.assertIn(self.yearstr(1), ('1', '0001'))
- self.assertIn(self.yearstr(68), ('68', '0068'))
- self.assertIn(self.yearstr(69), ('69', '0069'))
- self.assertIn(self.yearstr(99), ('99', '0099'))
- self.assertIn(self.yearstr(999), ('999', '0999'))
- self.assertEqual(self.yearstr(9999), '9999')
+ def test_large_year(self):
+ self.assertEqual(self.yearstr(12345), '12345')
+ self.assertEqual(self.yearstr(123456789), '123456789')
+ self.assertEqual(self.yearstr(TIME_MAXYEAR), str(TIME_MAXYEAR))
+ self.assertRaises(OverflowError, self.yearstr, TIME_MAXYEAR + 1)
def test_negative(self):
- try:
- text = self.yearstr(-1)
- except ValueError:
- # strftime() is limited to [1; 9999] with Visual Studio
- return
- self.assertIn(text, ('-1', '-001'))
-
+ self.assertEqual(self.yearstr(-1), self._format % -1)
self.assertEqual(self.yearstr(-1234), '-1234')
self.assertEqual(self.yearstr(-123456), '-123456')
+ self.assertEqual(self.yearstr(-123456789), str(-123456789))
+ self.assertEqual(self.yearstr(-1234567890), str(-1234567890))
+ self.assertEqual(self.yearstr(TIME_MINYEAR + 1900), str(TIME_MINYEAR + 1900))
+ # Issue #13312: it may return wrong value for year < TIME_MINYEAR + 1900
+ # Skip the value test, but check that no error is raised
+ self.yearstr(TIME_MINYEAR)
+ # self.assertEqual(self.yearstr(TIME_MINYEAR), str(TIME_MINYEAR))
+ self.assertRaises(OverflowError, self.yearstr, TIME_MINYEAR - 1)
- def test_mktime(self):
- # Issue #1726687
- for t in (-2, -1, 0, 1):
- try:
- tt = time.localtime(t)
- except (OverflowError, ValueError):
- pass
- else:
- self.assertEqual(time.mktime(tt), t)
- # It may not be possible to reliably make mktime return error
- # on all platfom. This will make sure that no other exception
- # than OverflowError is raised for an extreme value.
- try:
- time.mktime((-1, 1, 1, 0, 0, 0, -1, -1, -1))
- except OverflowError:
- pass
-
-class TestAsctimeAccept2dYear(_TestAsctimeYear, _Test2dYear):
- pass
-
-class TestStrftimeAccept2dYear(_TestStrftimeYear, _Test2dYear):
- pass
-
class TestAsctime4dyear(_TestAsctimeYear, _Test4dYear):
pass
class TestStrftime4dyear(_TestStrftimeYear, _Test4dYear):
pass
-class Test2dyearBool(_TestAsctimeYear, _Test2dYear):
- accept2dyear = True
-class Test4dyearBool(_TestAsctimeYear, _Test4dYear):
- accept2dyear = False
+class TestPytime(unittest.TestCase):
+ def setUp(self):
+ self.invalid_values = (
+ -(2 ** 100), 2 ** 100,
+ -(2.0 ** 100.0), 2.0 ** 100.0,
+ )
+
+ def test_time_t(self):
+ from _testcapi import pytime_object_to_time_t
+ for obj, time_t in (
+ (0, 0),
+ (-1, -1),
+ (-1.0, -1),
+ (-1.9, -1),
+ (1.0, 1),
+ (1.9, 1),
+ ):
+ self.assertEqual(pytime_object_to_time_t(obj), time_t)
+
+ for invalid in self.invalid_values:
+ self.assertRaises(OverflowError, pytime_object_to_time_t, invalid)
+
+ def test_timeval(self):
+ from _testcapi import pytime_object_to_timeval
+ for obj, timeval in (
+ (0, (0, 0)),
+ (-1, (-1, 0)),
+ (-1.0, (-1, 0)),
+ (1e-6, (0, 1)),
+ (-1e-6, (-1, 999999)),
+ (-1.2, (-2, 800000)),
+ (1.1234560, (1, 123456)),
+ (1.1234569, (1, 123456)),
+ (-1.1234560, (-2, 876544)),
+ (-1.1234561, (-2, 876543)),
+ ):
+ self.assertEqual(pytime_object_to_timeval(obj), timeval)
+
+ for invalid in self.invalid_values:
+ self.assertRaises(OverflowError, pytime_object_to_timeval, invalid)
+
+ def test_timespec(self):
+ from _testcapi import pytime_object_to_timespec
+ for obj, timespec in (
+ (0, (0, 0)),
+ (-1, (-1, 0)),
+ (-1.0, (-1, 0)),
+ (1e-9, (0, 1)),
+ (-1e-9, (-1, 999999999)),
+ (-1.2, (-2, 800000000)),
+ (1.1234567890, (1, 123456789)),
+ (1.1234567899, (1, 123456789)),
+ (-1.1234567890, (-2, 876543211)),
+ (-1.1234567891, (-2, 876543210)),
+ ):
+ self.assertEqual(pytime_object_to_timespec(obj), timespec)
+
+ for invalid in self.invalid_values:
+ self.assertRaises(OverflowError, pytime_object_to_timespec, invalid)
-class TestAccept2YearBad(_TestAsctimeYear, _BaseYearTest):
- class X:
- def __bool__(self):
- raise RuntimeError('boo')
- accept2dyear = X()
- def test_2dyear(self):
- pass
- def test_invalid(self):
- self.assertRaises(RuntimeError, self.yearstr, 200)
def test_main():
support.run_unittest(
TimeTestCase,
TestLocale,
- TestAsctimeAccept2dYear,
- TestStrftimeAccept2dYear,
TestAsctime4dyear,
TestStrftime4dyear,
- Test2dyearBool,
- Test4dyearBool,
- TestAccept2YearBad)
+ TestPytime)
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 9e9656c..db87e11 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -563,15 +563,28 @@ Non-ascii identifiers
NAME 'grün' (2, 0) (2, 4)
OP '=' (2, 5) (2, 6)
STRING "'green'" (2, 7) (2, 14)
+
+Legacy unicode literals:
+
+ >>> dump_tokens("Örter = u'places'\\ngrün = UR'green'")
+ ENCODING 'utf-8' (0, 0) (0, 0)
+ NAME 'Örter' (1, 0) (1, 5)
+ OP '=' (1, 6) (1, 7)
+ STRING "u'places'" (1, 8) (1, 17)
+ NEWLINE '\\n' (1, 17) (1, 18)
+ NAME 'grün' (2, 0) (2, 4)
+ OP '=' (2, 5) (2, 6)
+ STRING "UR'green'" (2, 7) (2, 16)
"""
from test import support
from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
- STRING, ENDMARKER, tok_name, detect_encoding,
+ STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
open as tokenize_open)
from io import BytesIO
from unittest import TestCase
import os, sys, glob
+import token
def dump_tokens(s):
"""Print out the tokens in s in a table format.
@@ -600,7 +613,7 @@ def roundtrip(f):
f.close()
tokens1 = [tok[:2] for tok in token_list]
new_bytes = untokenize(tokens1)
- readline = (line for line in new_bytes.splitlines(1)).__next__
+ readline = (line for line in new_bytes.splitlines(keepends=True)).__next__
tokens2 = [tok[:2] for tok in tokenize(readline)]
return tokens1 == tokens2
@@ -922,6 +935,78 @@ class TestTokenize(TestCase):
self.assertTrue(encoding_used, encoding)
+ def assertExactTypeEqual(self, opstr, *optypes):
+ tokens = list(tokenize(BytesIO(opstr.encode('utf-8')).readline))
+ num_optypes = len(optypes)
+ self.assertEqual(len(tokens), 2 + num_optypes)
+ self.assertEqual(token.tok_name[tokens[0].exact_type],
+ token.tok_name[ENCODING])
+ for i in range(num_optypes):
+ self.assertEqual(token.tok_name[tokens[i + 1].exact_type],
+ token.tok_name[optypes[i]])
+ self.assertEqual(token.tok_name[tokens[1 + num_optypes].exact_type],
+ token.tok_name[token.ENDMARKER])
+
+ def test_exact_type(self):
+ self.assertExactTypeEqual('()', token.LPAR, token.RPAR)
+ self.assertExactTypeEqual('[]', token.LSQB, token.RSQB)
+ self.assertExactTypeEqual(':', token.COLON)
+ self.assertExactTypeEqual(',', token.COMMA)
+ self.assertExactTypeEqual(';', token.SEMI)
+ self.assertExactTypeEqual('+', token.PLUS)
+ self.assertExactTypeEqual('-', token.MINUS)
+ self.assertExactTypeEqual('*', token.STAR)
+ self.assertExactTypeEqual('/', token.SLASH)
+ self.assertExactTypeEqual('|', token.VBAR)
+ self.assertExactTypeEqual('&', token.AMPER)
+ self.assertExactTypeEqual('<', token.LESS)
+ self.assertExactTypeEqual('>', token.GREATER)
+ self.assertExactTypeEqual('=', token.EQUAL)
+ self.assertExactTypeEqual('.', token.DOT)
+ self.assertExactTypeEqual('%', token.PERCENT)
+ self.assertExactTypeEqual('{}', token.LBRACE, token.RBRACE)
+ self.assertExactTypeEqual('==', token.EQEQUAL)
+ self.assertExactTypeEqual('!=', token.NOTEQUAL)
+ self.assertExactTypeEqual('<=', token.LESSEQUAL)
+ self.assertExactTypeEqual('>=', token.GREATEREQUAL)
+ self.assertExactTypeEqual('~', token.TILDE)
+ self.assertExactTypeEqual('^', token.CIRCUMFLEX)
+ self.assertExactTypeEqual('<<', token.LEFTSHIFT)
+ self.assertExactTypeEqual('>>', token.RIGHTSHIFT)
+ self.assertExactTypeEqual('**', token.DOUBLESTAR)
+ self.assertExactTypeEqual('+=', token.PLUSEQUAL)
+ self.assertExactTypeEqual('-=', token.MINEQUAL)
+ self.assertExactTypeEqual('*=', token.STAREQUAL)
+ self.assertExactTypeEqual('/=', token.SLASHEQUAL)
+ self.assertExactTypeEqual('%=', token.PERCENTEQUAL)
+ self.assertExactTypeEqual('&=', token.AMPEREQUAL)
+ self.assertExactTypeEqual('|=', token.VBAREQUAL)
+ self.assertExactTypeEqual('^=', token.CIRCUMFLEXEQUAL)
+ self.assertExactTypeEqual('^=', token.CIRCUMFLEXEQUAL)
+ self.assertExactTypeEqual('<<=', token.LEFTSHIFTEQUAL)
+ self.assertExactTypeEqual('>>=', token.RIGHTSHIFTEQUAL)
+ self.assertExactTypeEqual('**=', token.DOUBLESTAREQUAL)
+ self.assertExactTypeEqual('//', token.DOUBLESLASH)
+ self.assertExactTypeEqual('//=', token.DOUBLESLASHEQUAL)
+ self.assertExactTypeEqual('@', token.AT)
+
+ self.assertExactTypeEqual('a**2+b**2==c**2',
+ NAME, token.DOUBLESTAR, NUMBER,
+ token.PLUS,
+ NAME, token.DOUBLESTAR, NUMBER,
+ token.EQEQUAL,
+ NAME, token.DOUBLESTAR, NUMBER)
+ self.assertExactTypeEqual('{1, 2, 3}',
+ token.LBRACE,
+ token.NUMBER, token.COMMA,
+ token.NUMBER, token.COMMA,
+ token.NUMBER,
+ token.RBRACE)
+ self.assertExactTypeEqual('^(x & 0x1)',
+ token.CIRCUMFLEX,
+ token.LPAR,
+ token.NAME, token.AMPER, token.NUMBER,
+ token.RPAR)
__test__ = {"doctests" : doctests, 'decistmt': decistmt}
diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py
index 461d1d8..d9bef38 100644
--- a/Lib/test/test_trace.py
+++ b/Lib/test/test_trace.py
@@ -102,6 +102,7 @@ class TracedClass(object):
class TestLineCounts(unittest.TestCase):
"""White-box testing of line-counting, via runfunc"""
def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
self.tracer = Trace(count=1, trace=0, countfuncs=0, countcallers=0)
self.my_py_filename = fix_ext_py(__file__)
@@ -192,6 +193,7 @@ class TestRunExecCounts(unittest.TestCase):
"""A simple sanity test of line-counting, via runctx (exec)"""
def setUp(self):
self.my_py_filename = fix_ext_py(__file__)
+ self.addCleanup(sys.settrace, sys.gettrace())
def test_exec_counts(self):
self.tracer = Trace(count=1, trace=0, countfuncs=0, countcallers=0)
@@ -218,6 +220,7 @@ class TestRunExecCounts(unittest.TestCase):
class TestFuncs(unittest.TestCase):
"""White-box testing of funcs tracing"""
def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
self.tracer = Trace(count=0, trace=0, countfuncs=1)
self.filemod = my_file_and_modname()
@@ -242,6 +245,8 @@ class TestFuncs(unittest.TestCase):
}
self.assertEqual(self.tracer.results().calledfuncs, expected)
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'pre-existing trace function throws off measurements')
def test_inst_method_calling(self):
obj = TracedClass(20)
self.tracer.runfunc(obj.inst_method_calling, 1)
@@ -257,9 +262,12 @@ class TestFuncs(unittest.TestCase):
class TestCallers(unittest.TestCase):
"""White-box testing of callers tracing"""
def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
self.tracer = Trace(count=0, trace=0, countcallers=1)
self.filemod = my_file_and_modname()
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'pre-existing trace function throws off measurements')
def test_loop_caller_importing(self):
self.tracer.runfunc(traced_func_importing_caller, 1)
@@ -280,6 +288,9 @@ class TestCallers(unittest.TestCase):
# Created separately for issue #3821
class TestCoverage(unittest.TestCase):
+ def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
+
def tearDown(self):
rmtree(TESTFN)
unlink(TESTFN)
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index 4752d37..5bce2af 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -246,6 +246,21 @@ class BaseExceptionReportingTests:
self.check_zero_div(blocks[0])
self.assertIn('inner_raise() # Marker', blocks[2])
+ def test_context_suppression(self):
+ try:
+ try:
+ raise Exception
+ except:
+ raise ZeroDivisionError from None
+ except ZeroDivisionError as _:
+ e = _
+ lines = self.get_report(e).splitlines()
+ self.assertEqual(len(lines), 4)
+ self.assertTrue(lines[0].startswith('Traceback'))
+ self.assertTrue(lines[1].startswith(' File'))
+ self.assertIn('ZeroDivisionError from None', lines[2])
+ self.assertTrue(lines[3].startswith('ZeroDivisionError'))
+
def test_cause_and_context(self):
# When both a cause and a context are set, only the cause should be
# displayed and the context should be muted.
diff --git a/Lib/test/test_ucn.py b/Lib/test/test_ucn.py
index fd620f0..68a3219 100644
--- a/Lib/test/test_ucn.py
+++ b/Lib/test/test_ucn.py
@@ -8,8 +8,11 @@ Modified for Python 2.0 by Fredrik Lundh (fredrik@pythonware.com)
"""#"
import unittest
+import unicodedata
from test import support
+from http.client import HTTPException
+from test.test_normalization import check_version
class UnicodeNamesTest(unittest.TestCase):
@@ -59,8 +62,6 @@ class UnicodeNamesTest(unittest.TestCase):
)
def test_ascii_letters(self):
- import unicodedata
-
for char in "".join(map(chr, range(ord("a"), ord("z")))):
name = "LATIN SMALL LETTER %s" % char.upper()
code = unicodedata.lookup(name)
@@ -81,7 +82,6 @@ class UnicodeNamesTest(unittest.TestCase):
self.checkletter("HANGUL SYLLABLE HWEOK", "\ud6f8")
self.checkletter("HANGUL SYLLABLE HIH", "\ud7a3")
- import unicodedata
self.assertRaises(ValueError, unicodedata.name, "\ud7a4")
def test_cjk_unified_ideographs(self):
@@ -97,14 +97,11 @@ class UnicodeNamesTest(unittest.TestCase):
self.checkletter("CJK UNIFIED IDEOGRAPH-2B81D", "\U0002B81D")
def test_bmp_characters(self):
- import unicodedata
- count = 0
for code in range(0x10000):
char = chr(code)
name = unicodedata.name(char, None)
if name is not None:
self.assertEqual(unicodedata.lookup(name), char)
- count += 1
def test_misc_symbols(self):
self.checkletter("PILCROW SIGN", "\u00b6")
@@ -112,8 +109,85 @@ class UnicodeNamesTest(unittest.TestCase):
self.checkletter("HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK", "\uFF9F")
self.checkletter("FULLWIDTH LATIN SMALL LETTER A", "\uFF41")
+ def test_aliases(self):
+ # Check that the aliases defined in the NameAliases.txt file work.
+ # This should be updated when new aliases are added or the file
+ # should be downloaded and parsed instead. See #12753.
+ aliases = [
+ ('LATIN CAPITAL LETTER GHA', 0x01A2),
+ ('LATIN SMALL LETTER GHA', 0x01A3),
+ ('KANNADA LETTER LLLA', 0x0CDE),
+ ('LAO LETTER FO FON', 0x0E9D),
+ ('LAO LETTER FO FAY', 0x0E9F),
+ ('LAO LETTER RO', 0x0EA3),
+ ('LAO LETTER LO', 0x0EA5),
+ ('TIBETAN MARK BKA- SHOG GI MGO RGYAN', 0x0FD0),
+ ('YI SYLLABLE ITERATION MARK', 0xA015),
+ ('PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRACKET', 0xFE18),
+ ('BYZANTINE MUSICAL SYMBOL FTHORA SKLIRON CHROMA VASIS', 0x1D0C5)
+ ]
+ for alias, codepoint in aliases:
+ self.checkletter(alias, chr(codepoint))
+ name = unicodedata.name(chr(codepoint))
+ self.assertNotEqual(name, alias)
+ self.assertEqual(unicodedata.lookup(alias),
+ unicodedata.lookup(name))
+ with self.assertRaises(KeyError):
+ unicodedata.ucd_3_2_0.lookup(alias)
+
+ def test_aliases_names_in_pua_range(self):
+ # We are storing aliases in the PUA 15, but their names shouldn't leak
+ for cp in range(0xf0000, 0xf0100):
+ with self.assertRaises(ValueError) as cm:
+ unicodedata.name(chr(cp))
+ self.assertEqual(str(cm.exception), 'no such name')
+
+ def test_named_sequences_names_in_pua_range(self):
+ # We are storing named seq in the PUA 15, but their names shouldn't leak
+ for cp in range(0xf0100, 0xf0fff):
+ with self.assertRaises(ValueError) as cm:
+ unicodedata.name(chr(cp))
+ self.assertEqual(str(cm.exception), 'no such name')
+
+ def test_named_sequences_sample(self):
+ # Check a few named sequences. See #12753.
+ sequences = [
+ ('LATIN SMALL LETTER R WITH TILDE', '\u0072\u0303'),
+ ('TAMIL SYLLABLE SAI', '\u0BB8\u0BC8'),
+ ('TAMIL SYLLABLE MOO', '\u0BAE\u0BCB'),
+ ('TAMIL SYLLABLE NNOO', '\u0BA3\u0BCB'),
+ ('TAMIL CONSONANT KSS', '\u0B95\u0BCD\u0BB7\u0BCD'),
+ ]
+ for seqname, codepoints in sequences:
+ self.assertEqual(unicodedata.lookup(seqname), codepoints)
+ with self.assertRaises(SyntaxError):
+ self.checkletter(seqname, None)
+ with self.assertRaises(KeyError):
+ unicodedata.ucd_3_2_0.lookup(seqname)
+
+ def test_named_sequences_full(self):
+ # Check all the named sequences
+ url = ("http://www.unicode.org/Public/%s/ucd/NamedSequences.txt" %
+ unicodedata.unidata_version)
+ try:
+ testdata = support.open_urlresource(url, encoding="utf-8",
+ check=check_version)
+ except (IOError, HTTPException):
+ self.skipTest("Could not retrieve " + url)
+ self.addCleanup(testdata.close)
+ for line in testdata:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ seqname, codepoints = line.split(';')
+ codepoints = ''.join(chr(int(cp, 16)) for cp in codepoints.split())
+ self.assertEqual(unicodedata.lookup(seqname), codepoints)
+ with self.assertRaises(SyntaxError):
+ self.checkletter(seqname, None)
+ with self.assertRaises(KeyError):
+ unicodedata.ucd_3_2_0.lookup(seqname)
+
def test_errors(self):
- import unicodedata
self.assertRaises(TypeError, unicodedata.name)
self.assertRaises(TypeError, unicodedata.name, 'xx')
self.assertRaises(TypeError, unicodedata.lookup)
diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py
index 19b06a0..813d59f 100644
--- a/Lib/test/test_unicode.py
+++ b/Lib/test/test_unicode.py
@@ -5,17 +5,13 @@ Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
+import _string
import codecs
import struct
import sys
import unittest
import warnings
from test import support, string_tests
-import _string
-
-# decorator to skip tests on narrow builds
-requires_wide_build = unittest.skipIf(sys.maxunicode == 65535,
- 'requires wide build')
# Error handling (bad decoder return)
def search_function(encoding):
@@ -175,6 +171,15 @@ class UnicodeTest(string_tests.CommonTest,
def test_find(self):
string_tests.CommonTest.test_find(self)
+ # test implementation details of the memchr fast path
+ self.checkequal(100, 'a' * 100 + '\u0102', 'find', '\u0102')
+ self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0201')
+ self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0120')
+ self.checkequal(-1, 'a' * 100 + '\u0102', 'find', '\u0220')
+ self.checkequal(100, 'a' * 100 + '\U00100304', 'find', '\U00100304')
+ self.checkequal(-1, 'a' * 100 + '\U00100304', 'find', '\U00100204')
+ self.checkequal(-1, 'a' * 100 + '\U00100304', 'find', '\U00102004')
+ # check mixed argument types
self.checkequalnofix(0, 'abcdefghiabc', 'find', 'abc')
self.checkequalnofix(9, 'abcdefghiabc', 'find', 'abc', 1)
self.checkequalnofix(-1, 'abcdefghiabc', 'find', 'def', 4)
@@ -184,6 +189,14 @@ class UnicodeTest(string_tests.CommonTest,
def test_rfind(self):
string_tests.CommonTest.test_rfind(self)
+ # test implementation details of the memrchr fast path
+ self.checkequal(0, '\u0102' + 'a' * 100 , 'rfind', '\u0102')
+ self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0201')
+ self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0120')
+ self.checkequal(-1, '\u0102' + 'a' * 100 , 'rfind', '\u0220')
+ self.checkequal(0, '\U00100304' + 'a' * 100, 'rfind', '\U00100304')
+ self.checkequal(-1, '\U00100304' + 'a' * 100, 'rfind', '\U00100204')
+ self.checkequal(-1, '\U00100304' + 'a' * 100, 'rfind', '\U00102004')
# check mixed argument types
self.checkequalnofix(9, 'abcdefghiabc', 'rfind', 'abc')
self.checkequalnofix(12, 'abcdefghiabc', 'rfind', '')
@@ -280,6 +293,12 @@ class UnicodeTest(string_tests.CommonTest,
self.checkequalnofix('one@two!three!', 'one!two!three!', 'replace', '!', '@', 1)
self.assertRaises(TypeError, 'replace'.replace, "r", 42)
+ @support.cpython_only
+ def test_replace_id(self):
+ pattern = 'abc'
+ text = 'abc def'
+ self.assertIs(text.replace(pattern, pattern), text)
+
def test_bytes_comparison(self):
with support.check_warnings():
warnings.simplefilter('ignore', BytesWarning)
@@ -350,6 +369,8 @@ class UnicodeTest(string_tests.CommonTest,
def test_islower(self):
string_tests.MixinStrUnicodeUserStringTest.test_islower(self)
self.checkequalnofix(False, '\u1FFc', 'islower')
+ self.assertFalse('\u2167'.islower())
+ self.assertTrue('\u2177'.islower())
# non-BMP, uppercase
self.assertFalse('\U00010401'.islower())
self.assertFalse('\U00010427'.islower())
@@ -364,6 +385,8 @@ class UnicodeTest(string_tests.CommonTest,
string_tests.MixinStrUnicodeUserStringTest.test_isupper(self)
if not sys.platform.startswith('java'):
self.checkequalnofix(False, '\u1FFc', 'isupper')
+ self.assertTrue('\u2167'.isupper())
+ self.assertFalse('\u2177'.isupper())
# non-BMP, uppercase
self.assertTrue('\U00010401'.isupper())
self.assertTrue('\U00010427'.isupper())
@@ -520,7 +543,6 @@ class UnicodeTest(string_tests.CommonTest,
self.assertFalse(meth(s), '%a.%s() is False' % (s, meth_name))
- @requires_wide_build
def test_lower(self):
string_tests.CommonTest.test_lower(self)
self.assertEqual('\U00010427'.lower(), '\U0001044F')
@@ -530,8 +552,27 @@ class UnicodeTest(string_tests.CommonTest,
'\U0001044F\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.lower(),
'x\U0001044Fx\U0001044F')
+ self.assertEqual('fi'.lower(), 'fi')
+ self.assertEqual('\u0130'.lower(), '\u0069\u0307')
+ # Special case for GREEK CAPITAL LETTER SIGMA U+03A3
+ self.assertEqual('\u03a3'.lower(), '\u03c3')
+ self.assertEqual('\u0345\u03a3'.lower(), '\u0345\u03c3')
+ self.assertEqual('A\u0345\u03a3'.lower(), 'a\u0345\u03c2')
+ self.assertEqual('A\u0345\u03a3a'.lower(), 'a\u0345\u03c3a')
+ self.assertEqual('A\u0345\u03a3'.lower(), 'a\u0345\u03c2')
+ self.assertEqual('A\u03a3\u0345'.lower(), 'a\u03c2\u0345')
+ self.assertEqual('\u03a3\u0345 '.lower(), '\u03c3\u0345 ')
+ self.assertEqual('\U0008fffe'.lower(), '\U0008fffe')
+ self.assertEqual('\u2177'.lower(), '\u2177')
+
+ def test_casefold(self):
+ self.assertEqual('hello'.casefold(), 'hello')
+ self.assertEqual('hELlo'.casefold(), 'hello')
+ self.assertEqual('ß'.casefold(), 'ss')
+ self.assertEqual('fi'.casefold(), 'fi')
+ self.assertEqual('\u03a3'.casefold(), '\u03c3')
+ self.assertEqual('A\u0345\u03a3'.casefold(), 'a\u03b9\u03c3')
- @requires_wide_build
def test_upper(self):
string_tests.CommonTest.test_upper(self)
self.assertEqual('\U0001044F'.upper(), '\U00010427')
@@ -541,8 +582,14 @@ class UnicodeTest(string_tests.CommonTest,
'\U00010427\U00010427')
self.assertEqual('X\U00010427x\U0001044F'.upper(),
'X\U00010427X\U00010427')
+ self.assertEqual('fi'.upper(), 'FI')
+ self.assertEqual('\u0130'.upper(), '\u0130')
+ self.assertEqual('\u03a3'.upper(), '\u03a3')
+ self.assertEqual('ß'.upper(), 'SS')
+ self.assertEqual('\u1fd2'.upper(), '\u0399\u0308\u0300')
+ self.assertEqual('\U0008fffe'.upper(), '\U0008fffe')
+ self.assertEqual('\u2177'.upper(), '\u2167')
- @requires_wide_build
def test_capitalize(self):
string_tests.CommonTest.test_capitalize(self)
self.assertEqual('\U0001044F'.capitalize(), '\U00010427')
@@ -554,8 +601,12 @@ class UnicodeTest(string_tests.CommonTest,
'\U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.capitalize(),
'X\U0001044Fx\U0001044F')
+ self.assertEqual('h\u0130'.capitalize(), 'H\u0069\u0307')
+ exp = '\u0399\u0308\u0300\u0069\u0307'
+ self.assertEqual('\u1fd2\u0130'.capitalize(), exp)
+ self.assertEqual('finnish'.capitalize(), 'FInnish')
+ self.assertEqual('A\u0345\u03a3'.capitalize(), 'A\u0345\u03c2')
- @requires_wide_build
def test_title(self):
string_tests.MixinStrUnicodeUserStringTest.test_title(self)
self.assertEqual('\U0001044F'.title(), '\U00010427')
@@ -569,8 +620,10 @@ class UnicodeTest(string_tests.CommonTest,
'\U00010427\U0001044F \U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F X\U00010427x\U0001044F'.title(),
'X\U0001044Fx\U0001044F X\U0001044Fx\U0001044F')
+ self.assertEqual('fiNNISH'.title(), 'Finnish')
+ self.assertEqual('A\u03a3 \u1fa1xy'.title(), 'A\u03c2 \u1fa9xy')
+ self.assertEqual('A\u03a3A'.title(), 'A\u03c3a')
- @requires_wide_build
def test_swapcase(self):
string_tests.CommonTest.test_swapcase(self)
self.assertEqual('\U0001044F'.swapcase(), '\U00010427')
@@ -583,6 +636,19 @@ class UnicodeTest(string_tests.CommonTest,
'\U00010427\U0001044F')
self.assertEqual('X\U00010427x\U0001044F'.swapcase(),
'x\U0001044FX\U00010427')
+ self.assertEqual('fi'.swapcase(), 'FI')
+ self.assertEqual('\u0130'.swapcase(), '\u0069\u0307')
+ # Special case for GREEK CAPITAL LETTER SIGMA U+03A3
+ self.assertEqual('\u03a3'.swapcase(), '\u03c3')
+ self.assertEqual('\u0345\u03a3'.swapcase(), '\u0399\u03c3')
+ self.assertEqual('A\u0345\u03a3'.swapcase(), 'a\u0399\u03c2')
+ self.assertEqual('A\u0345\u03a3a'.swapcase(), 'a\u0399\u03c3A')
+ self.assertEqual('A\u0345\u03a3'.swapcase(), 'a\u0399\u03c2')
+ self.assertEqual('A\u03a3\u0345'.swapcase(), 'a\u03c2\u0399')
+ self.assertEqual('\u03a3\u0345 '.swapcase(), '\u03c3\u0399 ')
+ self.assertEqual('\u03a3'.swapcase(), '\u03c3')
+ self.assertEqual('ß'.swapcase(), 'SS')
+ self.assertEqual('\u1fd2'.swapcase(), '\u0399\u0308\u0300')
def test_contains(self):
# Testing Unicode contains method
@@ -776,7 +842,7 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual('{0!s}'.format(G('data')), 'string is data')
msg = 'object.__format__ with a non-empty format string is deprecated'
- with support.check_warnings((msg, PendingDeprecationWarning)):
+ with support.check_warnings((msg, DeprecationWarning)):
self.assertEqual('{0:^10}'.format(E('data')), ' E(data) ')
self.assertEqual('{0:^10s}'.format(E('data')), ' E(data) ')
self.assertEqual('{0:>15s}'.format(G('data')), ' string is data')
@@ -1014,10 +1080,13 @@ class UnicodeTest(string_tests.CommonTest,
class UnicodeSubclass(str):
pass
- self.assertEqual(
- str(UnicodeSubclass('unicode subclass becomes unicode')),
- 'unicode subclass becomes unicode'
- )
+ for text in ('ascii', '\xe9', '\u20ac', '\U0010FFFF'):
+ subclass = UnicodeSubclass(text)
+ self.assertEqual(str(subclass), text)
+ self.assertEqual(len(subclass), len(text))
+ if text == 'ascii':
+ self.assertEqual(subclass.encode('ascii'), b'ascii')
+ self.assertEqual(subclass.encode('utf-8'), b'ascii')
self.assertEqual(
str('strings are converted to unicode'),
@@ -1123,15 +1192,12 @@ class UnicodeTest(string_tests.CommonTest,
def test_codecs_utf8(self):
self.assertEqual(''.encode('utf-8'), b'')
self.assertEqual('\u20ac'.encode('utf-8'), b'\xe2\x82\xac')
- if sys.maxunicode == 65535:
- self.assertEqual('\ud800\udc02'.encode('utf-8'), b'\xf0\x90\x80\x82')
- self.assertEqual('\ud84d\udc56'.encode('utf-8'), b'\xf0\xa3\x91\x96')
+ self.assertEqual('\U00010002'.encode('utf-8'), b'\xf0\x90\x80\x82')
+ self.assertEqual('\U00023456'.encode('utf-8'), b'\xf0\xa3\x91\x96')
self.assertEqual('\ud800'.encode('utf-8', 'surrogatepass'), b'\xed\xa0\x80')
self.assertEqual('\udc00'.encode('utf-8', 'surrogatepass'), b'\xed\xb0\x80')
- if sys.maxunicode == 65535:
- self.assertEqual(
- ('\ud800\udc02'*1000).encode('utf-8'),
- b'\xf0\x90\x80\x82'*1000)
+ self.assertEqual(('\U00010002'*10).encode('utf-8'),
+ b'\xf0\x90\x80\x82'*10)
self.assertEqual(
'\u6b63\u78ba\u306b\u8a00\u3046\u3068\u7ffb\u8a33\u306f'
'\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002\u4e00'
@@ -1369,18 +1435,25 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual('hello'.encode('ascii'), b'hello')
self.assertEqual('hello'.encode('utf-7'), b'hello')
self.assertEqual('hello'.encode('utf-8'), b'hello')
- self.assertEqual('hello'.encode('utf8'), b'hello')
+ self.assertEqual('hello'.encode('utf-8'), b'hello')
self.assertEqual('hello'.encode('utf-16-le'), b'h\000e\000l\000l\000o\000')
self.assertEqual('hello'.encode('utf-16-be'), b'\000h\000e\000l\000l\000o')
self.assertEqual('hello'.encode('latin-1'), b'hello')
+ # Default encoding is utf-8
+ self.assertEqual('\u2603'.encode(), b'\xe2\x98\x83')
+
# Roundtrip safety for BMP (just the first 1024 chars)
for c in range(1024):
u = chr(c)
for encoding in ('utf-7', 'utf-8', 'utf-16', 'utf-16-le',
'utf-16-be', 'raw_unicode_escape',
'unicode_escape', 'unicode_internal'):
- self.assertEqual(str(u.encode(encoding),encoding), u)
+ with warnings.catch_warnings():
+ # unicode-internal has been deprecated
+ warnings.simplefilter("ignore", DeprecationWarning)
+
+ self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for BMP (just the first 256 chars)
for c in range(256):
@@ -1395,18 +1468,20 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual(str(u.encode(encoding),encoding), u)
# Roundtrip safety for non-BMP (just a few chars)
- u = '\U00010001\U00020002\U00030003\U00040004\U00050005'
- for encoding in ('utf-8', 'utf-16', 'utf-16-le', 'utf-16-be',
- #'raw_unicode_escape',
- 'unicode_escape', 'unicode_internal'):
- self.assertEqual(str(u.encode(encoding),encoding), u)
+ with warnings.catch_warnings():
+ # unicode-internal has been deprecated
+ warnings.simplefilter("ignore", DeprecationWarning)
- # UTF-8 must be roundtrip safe for all UCS-2 code points
- # This excludes surrogates: in the full range, there would be
- # a surrogate pair (\udbff\udc00), which gets converted back
- # to a non-BMP character (\U0010fc00)
- u = ''.join(map(chr, list(range(0,0xd800)) +
- list(range(0xe000,0x10000))))
+ u = '\U00010001\U00020002\U00030003\U00040004\U00050005'
+ for encoding in ('utf-8', 'utf-16', 'utf-16-le', 'utf-16-be',
+ 'raw_unicode_escape',
+ 'unicode_escape', 'unicode_internal'):
+ self.assertEqual(str(u.encode(encoding),encoding), u)
+
+ # UTF-8 must be roundtrip safe for all code points
+ # (except surrogates, which are forbidden).
+ u = ''.join(map(chr, list(range(0, 0xd800)) +
+ list(range(0xe000, 0x110000))))
for encoding in ('utf-8',):
self.assertEqual(str(u.encode(encoding),encoding), u)
@@ -1591,17 +1666,39 @@ class UnicodeTest(string_tests.CommonTest,
return
self.assertRaises(OverflowError, 't\tt\t'.expandtabs, sys.maxsize)
+ @support.cpython_only
+ def test_expandtabs_optimization(self):
+ s = 'abc'
+ self.assertIs(s.expandtabs(), s)
+
def test_raiseMemError(self):
- # Ensure that the freelist contains a consistent object, even
- # when a string allocation fails with a MemoryError.
- # This used to crash the interpreter,
- # or leak references when the number was smaller.
- charwidth = 4 if sys.maxunicode >= 0x10000 else 2
- # Note: sys.maxsize is half of the actual max allocation because of
- # the signedness of Py_ssize_t.
- alloc = lambda: "a" * (sys.maxsize // charwidth * 2)
- self.assertRaises(MemoryError, alloc)
- self.assertRaises(MemoryError, alloc)
+ if struct.calcsize('P') == 8:
+ # 64 bits pointers
+ ascii_struct_size = 48
+ compact_struct_size = 72
+ else:
+ # 32 bits pointers
+ ascii_struct_size = 24
+ compact_struct_size = 36
+
+ for char in ('a', '\xe9', '\u20ac', '\U0010ffff'):
+ code = ord(char)
+ if code < 0x100:
+ char_size = 1 # sizeof(Py_UCS1)
+ struct_size = ascii_struct_size
+ elif code < 0x10000:
+ char_size = 2 # sizeof(Py_UCS2)
+ struct_size = compact_struct_size
+ else:
+ char_size = 4 # sizeof(Py_UCS4)
+ struct_size = compact_struct_size
+ # Note: sys.maxsize is half of the actual max allocation because of
+ # the signedness of Py_ssize_t. Strings of maxlen-1 should in principle
+ # be allocatable, given enough memory.
+ maxlen = ((sys.maxsize - struct_size) // char_size)
+ alloc = lambda: char * maxlen
+ self.assertRaises(MemoryError, alloc)
+ self.assertRaises(MemoryError, alloc)
def test_format_subclass(self):
class S(str):
@@ -1614,11 +1711,10 @@ class UnicodeTest(string_tests.CommonTest,
# Test PyUnicode_FromFormat()
def test_from_format(self):
support.import_module('ctypes')
- from ctypes import pythonapi, py_object, c_int
- if sys.maxunicode == 65535:
- name = "PyUnicodeUCS2_FromFormat"
- else:
- name = "PyUnicodeUCS4_FromFormat"
+ from ctypes import (pythonapi, py_object,
+ c_int, c_long, c_longlong, c_ssize_t,
+ c_uint, c_ulong, c_ulonglong, c_size_t)
+ name = "PyUnicode_FromFormat"
_PyUnicode_FromFormat = getattr(pythonapi, name)
_PyUnicode_FromFormat.restype = py_object
@@ -1639,13 +1735,40 @@ class UnicodeTest(string_tests.CommonTest,
'string, got a non-ASCII byte: 0xe9$',
PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii')
+ # test "%c"
self.assertEqual(PyUnicode_FromFormat(b'%c', c_int(0xabcd)), '\uabcd')
self.assertEqual(PyUnicode_FromFormat(b'%c', c_int(0x10ffff)), '\U0010ffff')
- # other tests
+ # test "%"
+ self.assertEqual(PyUnicode_FromFormat(b'%'), '%')
+ self.assertEqual(PyUnicode_FromFormat(b'%%'), '%')
+ self.assertEqual(PyUnicode_FromFormat(b'%%s'), '%s')
+ self.assertEqual(PyUnicode_FromFormat(b'[%%]'), '[%]')
+ self.assertEqual(PyUnicode_FromFormat(b'%%%s', b'abc'), '%abc')
+
+ # test integer formats (%i, %d, %u)
+ self.assertEqual(PyUnicode_FromFormat(b'%03i', c_int(10)), '010')
+ self.assertEqual(PyUnicode_FromFormat(b'%0.4i', c_int(10)), '0010')
+ self.assertEqual(PyUnicode_FromFormat(b'%i', c_int(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%li', c_long(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%lli', c_longlong(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%zi', c_ssize_t(-123)), '-123')
+
+ self.assertEqual(PyUnicode_FromFormat(b'%d', c_int(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%ld', c_long(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%zd', c_ssize_t(-123)), '-123')
+
+ self.assertEqual(PyUnicode_FromFormat(b'%u', c_uint(123)), '123')
+ self.assertEqual(PyUnicode_FromFormat(b'%lu', c_ulong(123)), '123')
+ self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(123)), '123')
+ self.assertEqual(PyUnicode_FromFormat(b'%zu', c_size_t(123)), '123')
+
+ # test %A
text = PyUnicode_FromFormat(b'%%A:%A', 'abc\xe9\uabcd\U0010ffff')
self.assertEqual(text, r"%A:'abc\xe9\uabcd\U0010ffff'")
+ # test %V
text = PyUnicode_FromFormat(b'repr=%V', 'abc', b'xyz')
self.assertEqual(text, 'repr=abc')
@@ -1659,6 +1782,13 @@ class UnicodeTest(string_tests.CommonTest,
text = PyUnicode_FromFormat(b'repr=%V', None, b'abc\xff')
self.assertEqual(text, 'repr=abc\ufffd')
+ # not supported: copy the raw format string. these tests are just here
+ # to check for crashs and should not be considered as specifications
+ self.assertEqual(PyUnicode_FromFormat(b'%1%s', b'abc'), '%s')
+ self.assertEqual(PyUnicode_FromFormat(b'%1abc'), '%1abc')
+ self.assertEqual(PyUnicode_FromFormat(b'%+i', c_int(10)), '%+i')
+ self.assertEqual(PyUnicode_FromFormat(b'%.%s', b'abc'), '%.%s')
+
# Test PyUnicode_AsWideChar()
def test_aswidechar(self):
from _testcapi import unicode_aswidechar
@@ -1719,6 +1849,51 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual(size, nchar)
self.assertEqual(wchar, nonbmp + '\0')
+ def test_subclass_add(self):
+ class S(str):
+ def __add__(self, o):
+ return "3"
+ self.assertEqual(S("4") + S("5"), "3")
+ class S(str):
+ def __iadd__(self, o):
+ return "3"
+ s = S("1")
+ s += "4"
+ self.assertEqual(s, "3")
+
+ def test_encode_decimal(self):
+ from _testcapi import unicode_encodedecimal
+ self.assertEqual(unicode_encodedecimal('123'),
+ b'123')
+ self.assertEqual(unicode_encodedecimal('\u0663.\u0661\u0664'),
+ b'3.14')
+ self.assertEqual(unicode_encodedecimal("\N{EM SPACE}3.14\N{EN SPACE}"),
+ b' 3.14 ')
+ self.assertRaises(UnicodeEncodeError,
+ unicode_encodedecimal, "123\u20ac", "strict")
+ self.assertRaisesRegex(
+ ValueError,
+ "^'decimal' codec can't encode character",
+ unicode_encodedecimal, "123\u20ac", "replace")
+
+ def test_transform_decimal(self):
+ from _testcapi import unicode_transformdecimaltoascii as transform_decimal
+ self.assertEqual(transform_decimal('123'),
+ '123')
+ self.assertEqual(transform_decimal('\u0663.\u0661\u0664'),
+ '3.14')
+ self.assertEqual(transform_decimal("\N{EM SPACE}3.14\N{EN SPACE}"),
+ "\N{EM SPACE}3.14\N{EN SPACE}")
+ self.assertEqual(transform_decimal('123\u20ac'),
+ '123\u20ac')
+
+ def test_getnewargs(self):
+ text = 'abc'
+ args = text.__getnewargs__()
+ self.assertIsNot(args[0], text)
+ self.assertEqual(args[0], text)
+ self.assertEqual(len(args), 1)
+
class StringModuleTest(unittest.TestCase):
def test_formatter_parser(self):
@@ -1770,42 +1945,6 @@ class StringModuleTest(unittest.TestCase):
]])
self.assertRaises(TypeError, _string.formatter_field_name_split, 1)
- def test_encode_decimal(self):
- from _testcapi import unicode_encodedecimal
- self.assertEqual(unicode_encodedecimal('123'),
- b'123')
- self.assertEqual(unicode_encodedecimal('\u0663.\u0661\u0664'),
- b'3.14')
- self.assertEqual(unicode_encodedecimal("\N{EM SPACE}3.14\N{EN SPACE}"),
- b' 3.14 ')
- self.assertRaises(UnicodeEncodeError,
- unicode_encodedecimal, "123\u20ac", "strict")
- self.assertEqual(unicode_encodedecimal("123\u20ac", "replace"),
- b'123?')
- self.assertEqual(unicode_encodedecimal("123\u20ac", "ignore"),
- b'123')
- self.assertEqual(unicode_encodedecimal("123\u20ac", "xmlcharrefreplace"),
- b'123&#8364;')
- self.assertEqual(unicode_encodedecimal("123\u20ac", "backslashreplace"),
- b'123\\u20ac')
- self.assertEqual(unicode_encodedecimal("123\u20ac\N{EM SPACE}", "replace"),
- b'123? ')
- self.assertEqual(unicode_encodedecimal("123\u20ac\u20ac", "replace"),
- b'123??')
- self.assertEqual(unicode_encodedecimal("123\u20ac\u0660", "replace"),
- b'123?0')
-
- def test_transform_decimal(self):
- from _testcapi import unicode_transformdecimaltoascii as transform_decimal
- self.assertEqual(transform_decimal('123'),
- '123')
- self.assertEqual(transform_decimal('\u0663.\u0661\u0664'),
- '3.14')
- self.assertEqual(transform_decimal("\N{EM SPACE}3.14\N{EN SPACE}"),
- "\N{EM SPACE}3.14\N{EN SPACE}")
- self.assertEqual(transform_decimal('123\u20ac'),
- '123\u20ac')
-
def test_main():
support.run_unittest(__name__)
diff --git a/Lib/test/test_unicode_file.py b/Lib/test/test_unicode_file.py
index 6c2011a..faa8da3 100644
--- a/Lib/test/test_unicode_file.py
+++ b/Lib/test/test_unicode_file.py
@@ -6,7 +6,7 @@ import unicodedata
import unittest
from test.support import (run_unittest, rmtree,
- TESTFN_ENCODING, TESTFN_UNICODE, TESTFN_UNENCODABLE)
+ TESTFN_ENCODING, TESTFN_UNICODE, TESTFN_UNENCODABLE, create_empty_file)
if not os.path.supports_unicode_filenames:
try:
@@ -56,16 +56,20 @@ class TestUnicodeFiles(unittest.TestCase):
# Should be able to rename the file using either name.
self.assertTrue(os.path.isfile(filename1)) # must exist.
os.rename(filename1, filename2 + ".new")
- self.assertTrue(os.path.isfile(filename1+".new"))
+ self.assertFalse(os.path.isfile(filename2))
+ self.assertTrue(os.path.isfile(filename1 + '.new'))
os.rename(filename1 + ".new", filename2)
+ self.assertFalse(os.path.isfile(filename1 + '.new'))
self.assertTrue(os.path.isfile(filename2))
shutil.copy(filename1, filename2 + ".new")
os.unlink(filename1 + ".new") # remove using equiv name.
# And a couple of moves, one using each name.
shutil.move(filename1, filename2 + ".new")
- self.assertTrue(not os.path.exists(filename2))
+ self.assertFalse(os.path.exists(filename2))
+ self.assertTrue(os.path.exists(filename1 + '.new'))
shutil.move(filename1 + ".new", filename2)
+ self.assertFalse(os.path.exists(filename2 + '.new'))
self.assertTrue(os.path.exists(filename1))
# Note - due to the implementation of shutil.move,
# it tries a rename first. This only fails on Windows when on
@@ -73,10 +77,12 @@ class TestUnicodeFiles(unittest.TestCase):
# So we test the shutil.copy2 function, which is the thing most
# likely to fail.
shutil.copy2(filename1, filename2 + ".new")
+ self.assertTrue(os.path.isfile(filename1 + '.new'))
os.unlink(filename1 + ".new")
+ self.assertFalse(os.path.exists(filename2 + '.new'))
def _do_directory(self, make_name, chdir_name):
- cwd = os.getcwdb()
+ cwd = os.getcwd()
if os.path.isdir(make_name):
rmtree(make_name)
os.mkdir(make_name)
@@ -99,8 +105,7 @@ class TestUnicodeFiles(unittest.TestCase):
# top-level 'test' functions would be if they could take params
def _test_single(self, filename):
remove_if_exists(filename)
- f = open(filename, "w")
- f.close()
+ create_empty_file(filename)
try:
self._do_single(filename)
finally:
diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py
index 9744256..99aa003 100644
--- a/Lib/test/test_unicodedata.py
+++ b/Lib/test/test_unicodedata.py
@@ -21,7 +21,7 @@ errors = 'surrogatepass'
class UnicodeMethodsTest(unittest.TestCase):
# update this, if the database changes
- expectedchecksum = '21b90f1aed00081b81ca7942b22196af090015a0'
+ expectedchecksum = 'bf7a78f1a532421b5033600102e23a92044dbba9'
def test_method_checksum(self):
h = hashlib.sha1()
@@ -80,7 +80,7 @@ class UnicodeDatabaseTest(unittest.TestCase):
class UnicodeFunctionsTest(UnicodeDatabaseTest):
# update this, if the database changes
- expectedchecksum = 'c23dfc0b5eaf3ca2aad32d733de96bb182ccda50'
+ expectedchecksum = '17fe2f12b788e4fff5479b469c4404bb6ecf841f'
def test_function_checksum(self):
data = []
h = hashlib.sha1()
@@ -108,6 +108,7 @@ class UnicodeFunctionsTest(UnicodeDatabaseTest):
self.assertEqual(self.db.digit('\u215b', None), None)
self.assertEqual(self.db.digit('\u2468'), 9)
self.assertEqual(self.db.digit('\U00020000', None), None)
+ self.assertEqual(self.db.digit('\U0001D7FD'), 7)
self.assertRaises(TypeError, self.db.digit)
self.assertRaises(TypeError, self.db.digit, 'xx')
@@ -120,6 +121,7 @@ class UnicodeFunctionsTest(UnicodeDatabaseTest):
self.assertEqual(self.db.numeric('\u2468'), 9.0)
self.assertEqual(self.db.numeric('\ua627'), 7.0)
self.assertEqual(self.db.numeric('\U00020000', None), None)
+ self.assertEqual(self.db.numeric('\U0001012A'), 9000)
self.assertRaises(TypeError, self.db.numeric)
self.assertRaises(TypeError, self.db.numeric, 'xx')
@@ -131,6 +133,7 @@ class UnicodeFunctionsTest(UnicodeDatabaseTest):
self.assertEqual(self.db.decimal('\u215b', None), None)
self.assertEqual(self.db.decimal('\u2468', None), None)
self.assertEqual(self.db.decimal('\U00020000', None), None)
+ self.assertEqual(self.db.decimal('\U0001D7FD'), 7)
self.assertRaises(TypeError, self.db.decimal)
self.assertRaises(TypeError, self.db.decimal, 'xx')
@@ -141,6 +144,7 @@ class UnicodeFunctionsTest(UnicodeDatabaseTest):
self.assertEqual(self.db.category('a'), 'Ll')
self.assertEqual(self.db.category('A'), 'Lu')
self.assertEqual(self.db.category('\U00020000'), 'Lo')
+ self.assertEqual(self.db.category('\U0001012A'), 'No')
self.assertRaises(TypeError, self.db.category)
self.assertRaises(TypeError, self.db.category, 'xx')
@@ -308,14 +312,6 @@ class UnicodeMiscTest(UnicodeDatabaseTest):
self.assertEqual(len(lines), 1,
r"\u%.4x should not be a linebreak" % i)
- def test_UCS4(self):
- # unicodedata should work with code points outside the BMP
- # even on a narrow Unicode build
- self.assertEqual(self.db.category("\U0001012A"), "No")
- self.assertEqual(self.db.numeric("\U0001012A"), 9000)
- self.assertEqual(self.db.decimal("\U0001D7FD"), 7)
- self.assertEqual(self.db.digit("\U0001D7FD"), 7)
-
def test_main():
test.support.run_unittest(
UnicodeMiscTest,
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index c6f6f61..22ada56 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -298,6 +298,10 @@ Content-Type: text/html; charset=iso-8859-1
finally:
self.unfakehttp()
+ def test_URLopener_deprecation(self):
+ with support.check_warnings(('',DeprecationWarning)):
+ warn = urllib.request.URLopener()
+
class urlretrieve_FileTests(unittest.TestCase):
"""Test urllib.urlretrieve() on local files"""
@@ -331,7 +335,7 @@ class urlretrieve_FileTests(unittest.TestCase):
def constructLocalFileUrl(self, filePath):
filePath = os.path.abspath(filePath)
try:
- filePath.encode("utf8")
+ filePath.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("filePath is not encodable to utf8")
return "file://%s" % urllib.request.pathname2url(filePath)
@@ -384,11 +388,11 @@ class urlretrieve_FileTests(unittest.TestCase):
def test_reporthook(self):
# Make sure that the reporthook works.
- def hooktester(count, block_size, total_size, count_holder=[0]):
- self.assertIsInstance(count, int)
- self.assertIsInstance(block_size, int)
- self.assertIsInstance(total_size, int)
- self.assertEqual(count, count_holder[0])
+ def hooktester(block_count, block_read_size, file_size, count_holder=[0]):
+ self.assertIsInstance(block_count, int)
+ self.assertIsInstance(block_read_size, int)
+ self.assertIsInstance(file_size, int)
+ self.assertEqual(block_count, count_holder[0])
count_holder[0] = count_holder[0] + 1
second_temp = "%s.2" % support.TESTFN
self.registerFileForCleanUp(second_temp)
@@ -399,8 +403,8 @@ class urlretrieve_FileTests(unittest.TestCase):
def test_reporthook_0_bytes(self):
# Test on zero length file. Should call reporthook only 1 time.
report = []
- def hooktester(count, block_size, total_size, _report=report):
- _report.append((count, block_size, total_size))
+ def hooktester(block_count, block_read_size, file_size, _report=report):
+ _report.append((block_count, block_read_size, file_size))
srcFileName = self.createNewTempFile()
urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName),
support.TESTFN, hooktester)
@@ -410,31 +414,31 @@ class urlretrieve_FileTests(unittest.TestCase):
def test_reporthook_5_bytes(self):
# Test on 5 byte file. Should call reporthook only 2 times (once when
# the "network connection" is established and once when the block is
- # read). Since the block size is 8192 bytes, only one block read is
- # required to read the entire file.
+ # read).
report = []
- def hooktester(count, block_size, total_size, _report=report):
- _report.append((count, block_size, total_size))
+ def hooktester(block_count, block_read_size, file_size, _report=report):
+ _report.append((block_count, block_read_size, file_size))
srcFileName = self.createNewTempFile(b"x" * 5)
urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName),
support.TESTFN, hooktester)
self.assertEqual(len(report), 2)
- self.assertEqual(report[0][1], 8192)
- self.assertEqual(report[0][2], 5)
+ self.assertEqual(report[0][1], 0)
+ self.assertEqual(report[1][1], 5)
def test_reporthook_8193_bytes(self):
# Test on 8193 byte file. Should call reporthook only 3 times (once
# when the "network connection" is established, once for the next 8192
# bytes, and once for the last byte).
report = []
- def hooktester(count, block_size, total_size, _report=report):
- _report.append((count, block_size, total_size))
+ def hooktester(block_count, block_read_size, file_size, _report=report):
+ _report.append((block_count, block_read_size, file_size))
srcFileName = self.createNewTempFile(b"x" * 8193)
urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName),
support.TESTFN, hooktester)
self.assertEqual(len(report), 3)
- self.assertEqual(report[0][1], 8192)
- self.assertEqual(report[0][2], 8193)
+ self.assertEqual(report[0][1], 0)
+ self.assertEqual(report[1][1], 8192)
+ self.assertEqual(report[2][1], 1)
class urlretrieve_HttpTests(unittest.TestCase, FakeHTTPMixin):
@@ -1245,6 +1249,28 @@ class URLopener_Tests(unittest.TestCase):
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
# ftp.close()
+class RequestTests(unittest.TestCase):
+ """Unit tests for urllib.request.Request."""
+
+ def test_default_values(self):
+ Request = urllib.request.Request
+ request = Request("http://www.python.org")
+ self.assertEqual(request.get_method(), 'GET')
+ request = Request("http://www.python.org", {})
+ self.assertEqual(request.get_method(), 'POST')
+
+ def test_with_method_arg(self):
+ Request = urllib.request.Request
+ request = Request("http://www.python.org", method='HEAD')
+ self.assertEqual(request.method, 'HEAD')
+ self.assertEqual(request.get_method(), 'HEAD')
+ request = Request("http://www.python.org", {}, method='HEAD')
+ self.assertEqual(request.method, 'HEAD')
+ self.assertEqual(request.get_method(), 'HEAD')
+ request = Request("http://www.python.org", method='GET')
+ self.assertEqual(request.get_method(), 'GET')
+ request.method = 'HEAD'
+ self.assertEqual(request.get_method(), 'HEAD')
def test_main():
@@ -1261,6 +1287,7 @@ def test_main():
Utility_Tests,
URLopener_Tests,
#FTPWrapperTests,
+ RequestTests,
)
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 3d80e01..632524c 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -5,6 +5,7 @@ import os
import io
import socket
import array
+import sys
import urllib.request
# The proxy bypass method imported below has logic specific to the OSX
@@ -18,6 +19,22 @@ import urllib.error
# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler
class TrivialTests(unittest.TestCase):
+
+ def test___all__(self):
+ # Verify which names are exposed
+ for module in 'request', 'response', 'parse', 'error', 'robotparser':
+ context = {}
+ exec('from urllib.%s import *' % module, context)
+ del context['__builtins__']
+ if module == 'request' and os.name == 'nt':
+ u, p = context.pop('url2pathname'), context.pop('pathname2url')
+ self.assertEqual(u.__module__, 'nturl2path')
+ self.assertEqual(p.__module__, 'nturl2path')
+ for k, v in context.items():
+ self.assertEqual(v.__module__, 'urllib.%s' % module,
+ "%r is exposed in 'urllib.%s' but defined in %r" %
+ (k, module, v.__module__))
+
def test_trivial(self):
# A couple trivial tests
@@ -536,10 +553,6 @@ class OpenerDirectorTests(unittest.TestCase):
self.assertRaises(urllib.error.URLError, o.open, req)
self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])
-## def test_error(self):
-## # XXX this doesn't actually seem to be used in standard library,
-## # but should really be tested anyway...
-
def test_http_error(self):
# XXX http_error_default
# http errors are a special case
@@ -567,6 +580,7 @@ class OpenerDirectorTests(unittest.TestCase):
self.assertEqual((handler, method_name), got[:2])
self.assertEqual(args, got[2])
+
def test_processors(self):
# *_request / *_response methods get called appropriately
o = OpenerDirector()
@@ -602,10 +616,26 @@ class OpenerDirectorTests(unittest.TestCase):
self.assertTrue(args[1] is None or
isinstance(args[1], MockResponse))
+ def test_method_deprecations(self):
+ req = Request("http://www.example.com")
+ with support.check_warnings(('', DeprecationWarning)):
+ req.add_data("data")
+ with support.check_warnings(('', DeprecationWarning)):
+ req.has_data()
+ with support.check_warnings(('', DeprecationWarning)):
+ req.get_data()
+ with support.check_warnings(('', DeprecationWarning)):
+ req.get_host()
+ with support.check_warnings(('', DeprecationWarning)):
+ req.get_selector()
+ with support.check_warnings(('', DeprecationWarning)):
+ req.is_unverifiable()
+ with support.check_warnings(('', DeprecationWarning)):
+ req.get_origin_req_host()
def sanepathname2url(path):
try:
- path.encode("utf8")
+ path.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("path is not encodable to utf8")
urlpath = urllib.request.pathname2url(path)
@@ -1179,6 +1209,8 @@ class HandlerTests(unittest.TestCase):
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
+ # TODO: This should be only for OSX
+ @unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX")
def test_osx_proxy_bypass(self):
bypass = {
'exclude_simple': False,
@@ -1282,6 +1314,26 @@ class HandlerTests(unittest.TestCase):
# _test_basic_auth called .open() twice)
self.assertEqual(opener.recorded, ["digest", "basic"]*2)
+ def test_unsupported_auth_digest_handler(self):
+ opener = OpenerDirector()
+ # While using DigestAuthHandler
+ digest_auth_handler = urllib.request.HTTPDigestAuthHandler(None)
+ http_handler = MockHTTPHandler(
+ 401, 'WWW-Authenticate: Kerberos\r\n\r\n')
+ opener.add_handler(digest_auth_handler)
+ opener.add_handler(http_handler)
+ self.assertRaises(ValueError,opener.open,"http://www.example.com")
+
+ def test_unsupported_auth_basic_handler(self):
+ # While using BasicAuthHandler
+ opener = OpenerDirector()
+ basic_auth_handler = urllib.request.HTTPBasicAuthHandler(None)
+ http_handler = MockHTTPHandler(
+ 401, 'WWW-Authenticate: NTLM\r\n\r\n')
+ opener.add_handler(basic_auth_handler)
+ opener.add_handler(http_handler)
+ self.assertRaises(ValueError,opener.open,"http://www.example.com")
+
def _test_basic_auth(self, opener, auth_handler, auth_header,
realm, http_handler, password_manager,
request_url, protected_url):
@@ -1319,6 +1371,7 @@ class HandlerTests(unittest.TestCase):
self.assertEqual(len(http_handler.requests), 1)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
+
class MiscTests(unittest.TestCase):
def test_build_opener(self):
@@ -1427,7 +1480,9 @@ def test_HTTPError_interface():
Issue 13211 reveals that HTTPError didn't implement the URLError
interface even though HTTPError is a subclass of URLError.
- >>> err = urllib.error.HTTPError(msg='something bad happened', url=None, code=None, hdrs=None, fp=None)
+ >>> msg = 'something bad happened'
+ >>> url = code = hdrs = fp = None
+ >>> err = urllib.error.HTTPError(url, code, msg, hdrs, fp)
>>> assert hasattr(err, 'reason')
>>> err.reason
'something bad happened'
diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py
index 5fcb4cb..fc5527e 100644
--- a/Lib/test/test_urllib2net.py
+++ b/Lib/test/test_urllib2net.py
@@ -83,12 +83,13 @@ class CloseSocketTest(unittest.TestCase):
def test_close(self):
# calling .close() on urllib2's response objects should close the
# underlying socket
-
- response = _urlopen_with_retry("http://www.python.org/")
- sock = response.fp
- self.assertTrue(not sock.closed)
- response.close()
- self.assertTrue(sock.closed)
+ url = "http://www.python.org/"
+ with support.transient_internet(url):
+ response = _urlopen_with_retry(url)
+ sock = response.fp
+ self.assertTrue(not sock.closed)
+ response.close()
+ self.assertTrue(sock.closed)
class OtherNetworkTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_userlist.py b/Lib/test/test_userlist.py
index 868ed24..6381070 100644
--- a/Lib/test/test_userlist.py
+++ b/Lib/test/test_userlist.py
@@ -52,6 +52,12 @@ class UserListTest(list_tests.CommonTest):
return str(key) + '!!!'
self.assertEqual(next(iter(T((1,2)))), "0!!!")
+ def test_userlist_copy(self):
+ u = self.type2test([6, 8, 1, 9, 1])
+ v = u.copy()
+ self.assertEqual(u, v)
+ self.assertEqual(type(u), type(v))
+
def test_main():
support.run_unittest(UserListTest)
diff --git a/Lib/test/test_userstring.py b/Lib/test/test_userstring.py
index 7a8b932..d5d0c89 100755
--- a/Lib/test/test_userstring.py
+++ b/Lib/test/test_userstring.py
@@ -17,11 +17,11 @@ class UserStringTest(
# Overwrite the three testing methods, because UserString
# can't cope with arguments propagated to UserString
# (and we don't test with subclasses)
- def checkequal(self, result, object, methodname, *args):
+ def checkequal(self, result, object, methodname, *args, **kwargs):
result = self.fixtype(result)
object = self.fixtype(object)
# we don't fix the arguments, because UserString can't cope with it
- realresult = getattr(object, methodname)(*args)
+ realresult = getattr(object, methodname)(*args, **kwargs)
self.assertEqual(
result,
realresult
diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py
index 43fa656..7bc59ed 100644
--- a/Lib/test/test_uuid.py
+++ b/Lib/test/test_uuid.py
@@ -471,14 +471,14 @@ class TestUUID(TestCase):
if pid == 0:
os.close(fds[0])
value = uuid.uuid4()
- os.write(fds[1], value.hex.encode('latin1'))
+ os.write(fds[1], value.hex.encode('latin-1'))
os._exit(0)
else:
os.close(fds[1])
parent_value = uuid.uuid4().hex
os.waitpid(pid, 0)
- child_value = os.read(fds[0], 100).decode('latin1')
+ child_value = os.read(fds[0], 100).decode('latin-1')
self.assertNotEqual(parent_value, child_value)
diff --git a/Lib/test/test_wait3.py b/Lib/test/test_wait3.py
index 786e60b..bd06c8d 100644
--- a/Lib/test/test_wait3.py
+++ b/Lib/test/test_wait3.py
@@ -19,13 +19,16 @@ except AttributeError:
class Wait3Test(ForkWait):
def wait_impl(self, cpid):
- for i in range(10):
+ # This many iterations can be required, since some previously run
+ # tests (e.g. test_ctypes) could have spawned a lot of children
+ # very quickly.
+ for i in range(30):
# wait3() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status, rusage = os.wait3(os.WNOHANG)
if spid == cpid:
break
- time.sleep(1.0)
+ time.sleep(0.1)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
diff --git a/Lib/test/test_warnings.py b/Lib/test/test_warnings.py
index 79be835..953b282 100644
--- a/Lib/test/test_warnings.py
+++ b/Lib/test/test_warnings.py
@@ -512,12 +512,11 @@ class _WarningsTests(BaseTest):
def test_showwarning_not_callable(self):
with original_warnings.catch_warnings(module=self.module):
self.module.filterwarnings("always", category=UserWarning)
- old_showwarning = self.module.showwarning
+ self.module.showwarning = print
+ with support.captured_output('stdout'):
+ self.module.warn('Warning!')
self.module.showwarning = 23
- try:
- self.assertRaises(TypeError, self.module.warn, "Warning!")
- finally:
- self.module.showwarning = old_showwarning
+ self.assertRaises(TypeError, self.module.warn, "Warning!")
def test_show_warning_output(self):
# With showarning() missing, make sure that output is okay.
@@ -547,10 +546,13 @@ class _WarningsTests(BaseTest):
globals_dict = globals()
oldfile = globals_dict['__file__']
try:
- with original_warnings.catch_warnings(module=self.module) as w:
+ catch = original_warnings.catch_warnings(record=True,
+ module=self.module)
+ with catch as w:
self.module.filterwarnings("always", category=UserWarning)
globals_dict['__file__'] = None
original_warnings.warn('test', UserWarning)
+ self.assertTrue(len(w))
finally:
globals_dict['__file__'] = oldfile
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index b7a996c..50e5196 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -1,10 +1,13 @@
# xml.etree test. This file contains enough tests to make sure that
# all included components work as they should.
# Large parts are extracted from the upstream test suite.
-
-# IMPORTANT: the same doctests are run from "test_xml_etree_c" in
-# order to ensure consistency between the C implementation and the
-# Python implementation.
+#
+# PLEASE write all new tests using the standard unittest infrastructure and
+# not doctest.
+#
+# IMPORTANT: the same tests are run from "test_xml_etree_c" in order
+# to ensure consistency between the C implementation and the Python
+# implementation.
#
# For this purpose, the module-level "ET" symbol is temporarily
# monkey-patched when running the "test_xml_etree_c" test suite.
@@ -13,16 +16,17 @@
import sys
import html
+import io
import unittest
from test import support
-from test.support import findfile
+from test.support import findfile, import_fresh_module
-from xml.etree import ElementTree as ET
+pyET = import_fresh_module('xml.etree.ElementTree', blocked=['_elementtree'])
SIMPLE_XMLFILE = findfile("simple.xml", subdir="xmltestdata")
try:
- SIMPLE_XMLFILE.encode("utf8")
+ SIMPLE_XMLFILE.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("filename is not encodable to utf8")
SIMPLE_NS_XMLFILE = findfile("simple-ns.xml", subdir="xmltestdata")
@@ -275,7 +279,7 @@ def simplefind():
"""
Test find methods using the elementpath fallback.
- >>> from xml.etree import ElementTree
+ >>> ElementTree = pyET
>>> CurrentElementPath = ElementTree.ElementPath
>>> ElementTree.ElementPath = ElementTree._SimpleElementPath()
@@ -460,17 +464,19 @@ def path_cache():
"""
Check that the path cache behaves sanely.
+ >>> from xml.etree import ElementPath
+
>>> elem = ET.XML(SAMPLE_XML)
>>> for i in range(10): ET.ElementTree(elem).find('./'+str(i))
- >>> cache_len_10 = len(ET.ElementPath._cache)
+ >>> cache_len_10 = len(ElementPath._cache)
>>> for i in range(10): ET.ElementTree(elem).find('./'+str(i))
- >>> len(ET.ElementPath._cache) == cache_len_10
+ >>> len(ElementPath._cache) == cache_len_10
True
>>> for i in range(20): ET.ElementTree(elem).find('./'+str(i))
- >>> len(ET.ElementPath._cache) > cache_len_10
+ >>> len(ElementPath._cache) > cache_len_10
True
>>> for i in range(600): ET.ElementTree(elem).find('./'+str(i))
- >>> len(ET.ElementPath._cache) < 500
+ >>> len(ElementPath._cache) < 500
True
"""
@@ -1049,26 +1055,6 @@ def entity():
'<document>text</document>'
"""
-def error(xml):
- """
-
- Test error handling.
-
- >>> issubclass(ET.ParseError, SyntaxError)
- True
- >>> error("foo").position
- (1, 0)
- >>> error("<tag>&foo;</tag>").position
- (1, 5)
- >>> error("foobar<").position
- (1, 6)
-
- """
- try:
- ET.XML(xml)
- except ET.ParseError:
- return sys.exc_info()[1]
-
def namespace():
"""
Test namespace issues.
@@ -1256,8 +1242,8 @@ def processinginstruction():
>>> ET.tostring(ET.PI('test', '<testing&>'))
b'<?test <testing&>?>'
- >>> ET.tostring(ET.PI('test', '<testing&>\xe3'), 'latin1')
- b"<?xml version='1.0' encoding='latin1'?>\\n<?test <testing&>\\xe3?>"
+ >>> ET.tostring(ET.PI('test', '<testing&>\xe3'), 'latin-1')
+ b"<?xml version='1.0' encoding='latin-1'?>\\n<?test <testing&>\\xe3?>"
"""
#
@@ -1340,7 +1326,7 @@ def xinclude_loader(href, parse="xml", encoding=None):
try:
data = XINCLUDE[href]
except KeyError:
- raise IOError("resource not found")
+ raise OSError("resource not found")
if parse == "xml":
from xml.etree.ElementTree import XML
return XML(data)
@@ -1350,7 +1336,6 @@ def xinclude():
r"""
Basic inclusion example (XInclude C.1)
- >>> from xml.etree import ElementTree as ET
>>> from xml.etree import ElementInclude
>>> document = xinclude_loader("C1.xml")
@@ -1405,7 +1390,7 @@ def xinclude():
>>> document = xinclude_loader("C5.xml")
>>> ElementInclude.include(document, xinclude_loader)
Traceback (most recent call last):
- IOError: resource not found
+ OSError: resource not found
>>> # print(serialize(document)) # C5
"""
@@ -1612,7 +1597,7 @@ def bug_xmltoolkit55():
class ExceptionFile:
def read(self, x):
- raise IOError
+ raise OSError
def xmltoolkit60():
"""
@@ -1620,7 +1605,7 @@ def xmltoolkit60():
Handle crash in stream source.
>>> tree = ET.parse(ExceptionFile())
Traceback (most recent call last):
- IOError
+ OSError
"""
@@ -1854,6 +1839,215 @@ def check_issue10777():
# --------------------------------------------------------------------
+class ElementTreeTest(unittest.TestCase):
+
+ def test_istype(self):
+ self.assertIsInstance(ET.ParseError, type)
+ self.assertIsInstance(ET.QName, type)
+ self.assertIsInstance(ET.ElementTree, type)
+ self.assertIsInstance(ET.Element, type)
+ # XXX issue 14128 with C ElementTree
+ # self.assertIsInstance(ET.TreeBuilder, type)
+ # self.assertIsInstance(ET.XMLParser, type)
+
+ def test_Element_subclass_trivial(self):
+ class MyElement(ET.Element):
+ pass
+
+ mye = MyElement('foo')
+ self.assertIsInstance(mye, ET.Element)
+ self.assertIsInstance(mye, MyElement)
+ self.assertEqual(mye.tag, 'foo')
+
+ def test_Element_subclass_constructor(self):
+ class MyElement(ET.Element):
+ def __init__(self, tag, attrib={}, **extra):
+ super(MyElement, self).__init__(tag + '__', attrib, **extra)
+
+ mye = MyElement('foo', {'a': 1, 'b': 2}, c=3, d=4)
+ self.assertEqual(mye.tag, 'foo__')
+ self.assertEqual(sorted(mye.items()),
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)])
+
+ def test_Element_subclass_new_method(self):
+ class MyElement(ET.Element):
+ def newmethod(self):
+ return self.tag
+
+ mye = MyElement('joe')
+ self.assertEqual(mye.newmethod(), 'joe')
+
+
+class TreeBuilderTest(unittest.TestCase):
+
+ sample1 = ('<!DOCTYPE html PUBLIC'
+ ' "-//W3C//DTD XHTML 1.0 Transitional//EN"'
+ ' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">'
+ '<html>text</html>')
+
+ def test_dummy_builder(self):
+ class BaseDummyBuilder:
+ def close(self):
+ return 42
+
+ class DummyBuilder(BaseDummyBuilder):
+ data = start = end = lambda *a: None
+
+ parser = ET.XMLParser(target=DummyBuilder())
+ parser.feed(self.sample1)
+ self.assertEqual(parser.close(), 42)
+
+ parser = ET.XMLParser(target=BaseDummyBuilder())
+ parser.feed(self.sample1)
+ self.assertEqual(parser.close(), 42)
+
+ parser = ET.XMLParser(target=object())
+ parser.feed(self.sample1)
+ self.assertIsNone(parser.close())
+
+ # XXX in _elementtree, the constructor of TreeBuilder expects no
+ # arguments
+ @unittest.expectedFailure
+ def test_element_factory(self):
+ tb = ET.TreeBuilder(element_factory=lambda: ET.Element())
+
+ @unittest.expectedFailure # XXX issue 14007 with C ElementTree
+ def test_doctype(self):
+ class DoctypeParser:
+ _doctype = None
+
+ def doctype(self, name, pubid, system):
+ self._doctype = (name, pubid, system)
+
+ def close(self):
+ return self._doctype
+
+ parser = ET.XMLParser(target=DoctypeParser())
+ parser.feed(self.sample1)
+
+ self.assertEqual(parser.close(),
+ ('html', '-//W3C//DTD XHTML 1.0 Transitional//EN',
+ 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'))
+
+
+class NoAcceleratorTest(unittest.TestCase):
+
+ # Test that the C accelerator was not imported for pyET
+ def test_correct_import_pyET(self):
+ self.assertEqual(pyET.Element.__module__, 'xml.etree.ElementTree')
+ self.assertEqual(pyET.SubElement.__module__, 'xml.etree.ElementTree')
+
+
+class ElementSlicingTest(unittest.TestCase):
+ def _elem_tags(self, elemlist):
+ return [e.tag for e in elemlist]
+
+ def _subelem_tags(self, elem):
+ return self._elem_tags(list(elem))
+
+ def _make_elem_with_children(self, numchildren):
+ """Create an Element with a tag 'a', with the given amount of children
+ named 'a0', 'a1' ... and so on.
+
+ """
+ e = ET.Element('a')
+ for i in range(numchildren):
+ ET.SubElement(e, 'a%s' % i)
+ return e
+
+ def test_getslice_single_index(self):
+ e = self._make_elem_with_children(10)
+
+ self.assertEqual(e[1].tag, 'a1')
+ self.assertEqual(e[-2].tag, 'a8')
+
+ self.assertRaises(IndexError, lambda: e[12])
+
+ def test_getslice_range(self):
+ e = self._make_elem_with_children(6)
+
+ self.assertEqual(self._elem_tags(e[3:]), ['a3', 'a4', 'a5'])
+ self.assertEqual(self._elem_tags(e[3:6]), ['a3', 'a4', 'a5'])
+ self.assertEqual(self._elem_tags(e[3:16]), ['a3', 'a4', 'a5'])
+ self.assertEqual(self._elem_tags(e[3:5]), ['a3', 'a4'])
+ self.assertEqual(self._elem_tags(e[3:-1]), ['a3', 'a4'])
+ self.assertEqual(self._elem_tags(e[:2]), ['a0', 'a1'])
+
+ def test_getslice_steps(self):
+ e = self._make_elem_with_children(10)
+
+ self.assertEqual(self._elem_tags(e[8:10:1]), ['a8', 'a9'])
+ self.assertEqual(self._elem_tags(e[::3]), ['a0', 'a3', 'a6', 'a9'])
+ self.assertEqual(self._elem_tags(e[::8]), ['a0', 'a8'])
+ self.assertEqual(self._elem_tags(e[1::8]), ['a1', 'a9'])
+
+ def test_getslice_negative_steps(self):
+ e = self._make_elem_with_children(4)
+
+ self.assertEqual(self._elem_tags(e[::-1]), ['a3', 'a2', 'a1', 'a0'])
+ self.assertEqual(self._elem_tags(e[::-2]), ['a3', 'a1'])
+
+ def test_delslice(self):
+ e = self._make_elem_with_children(4)
+ del e[0:2]
+ self.assertEqual(self._subelem_tags(e), ['a2', 'a3'])
+
+ e = self._make_elem_with_children(4)
+ del e[0:]
+ self.assertEqual(self._subelem_tags(e), [])
+
+ e = self._make_elem_with_children(4)
+ del e[::-1]
+ self.assertEqual(self._subelem_tags(e), [])
+
+ e = self._make_elem_with_children(4)
+ del e[::-2]
+ self.assertEqual(self._subelem_tags(e), ['a0', 'a2'])
+
+ e = self._make_elem_with_children(4)
+ del e[1::2]
+ self.assertEqual(self._subelem_tags(e), ['a0', 'a2'])
+
+ e = self._make_elem_with_children(2)
+ del e[::2]
+ self.assertEqual(self._subelem_tags(e), ['a1'])
+
+
+class StringIOTest(unittest.TestCase):
+ def test_read_from_stringio(self):
+ tree = ET.ElementTree()
+ stream = io.StringIO()
+ stream.write('''<?xml version="1.0"?><site></site>''')
+ stream.seek(0)
+ tree.parse(stream)
+
+ self.assertEqual(tree.getroot().tag, 'site')
+
+
+class ParseErrorTest(unittest.TestCase):
+ def test_subclass(self):
+ self.assertIsInstance(ET.ParseError(), SyntaxError)
+
+ def _get_error(self, s):
+ try:
+ ET.fromstring(s)
+ except ET.ParseError as e:
+ return e
+
+ def test_error_position(self):
+ self.assertEqual(self._get_error('foo').position, (1, 0))
+ self.assertEqual(self._get_error('<tag>&foo;</tag>').position, (1, 5))
+ self.assertEqual(self._get_error('foobar<').position, (1, 6))
+
+ def test_error_code(self):
+ import xml.parsers.expat.errors as ERRORS
+ self.assertEqual(self._get_error('foo').code,
+ ERRORS.codes[ERRORS.XML_ERROR_SYNTAX])
+
+
+# --------------------------------------------------------------------
+
+
class CleanContext(object):
"""Provide default namespace mapping and path cache."""
checkwarnings = None
@@ -1872,44 +2066,49 @@ class CleanContext(object):
("This method will be removed in future versions. "
"Use .+ instead.", DeprecationWarning),
("This method will be removed in future versions. "
- "Use .+ instead.", PendingDeprecationWarning),
- # XMLParser.doctype() is deprecated.
- ("This method of XMLParser is deprecated. Define doctype.. "
- "method on the TreeBuilder target.", DeprecationWarning))
+ "Use .+ instead.", PendingDeprecationWarning))
self.checkwarnings = support.check_warnings(*deprecations, quiet=quiet)
def __enter__(self):
- from xml.etree import ElementTree
- self._nsmap = ElementTree._namespace_map
- self._path_cache = ElementTree.ElementPath._cache
+ from xml.etree import ElementPath
+ self._nsmap = ET.register_namespace._namespace_map
# Copy the default namespace mapping
- ElementTree._namespace_map = self._nsmap.copy()
+ self._nsmap_copy = self._nsmap.copy()
# Copy the path cache (should be empty)
- ElementTree.ElementPath._cache = self._path_cache.copy()
+ self._path_cache = ElementPath._cache
+ ElementPath._cache = self._path_cache.copy()
self.checkwarnings.__enter__()
def __exit__(self, *args):
- from xml.etree import ElementTree
+ from xml.etree import ElementPath
# Restore mapping and path cache
- ElementTree._namespace_map = self._nsmap
- ElementTree.ElementPath._cache = self._path_cache
+ self._nsmap.clear()
+ self._nsmap.update(self._nsmap_copy)
+ ElementPath._cache = self._path_cache
self.checkwarnings.__exit__(*args)
-def test_main(module_name='xml.etree.ElementTree'):
+def test_main(module=pyET):
from test import test_xml_etree
- use_py_module = (module_name == 'xml.etree.ElementTree')
-
# The same doctests are used for both the Python and the C implementations
- assert test_xml_etree.ET.__name__ == module_name
+ test_xml_etree.ET = module
+
+ test_classes = [
+ ElementSlicingTest,
+ StringIOTest,
+ ParseErrorTest,
+ ElementTreeTest,
+ TreeBuilderTest]
+ if module is pyET:
+ # Run the tests specific to the Python implementation
+ test_classes += [NoAcceleratorTest]
+
+ support.run_unittest(*test_classes)
# XXX the C module should give the same warnings as the Python module
- with CleanContext(quiet=not use_py_module):
+ with CleanContext(quiet=(module is not pyET)):
support.run_doctest(test_xml_etree, verbosity=True)
- # The module should not be changed by the tests
- assert test_xml_etree.ET.__name__ == module_name
-
if __name__ == '__main__':
test_main()
diff --git a/Lib/test/test_xml_etree_c.py b/Lib/test/test_xml_etree_c.py
index 2ff118f..10416d2 100644
--- a/Lib/test/test_xml_etree_c.py
+++ b/Lib/test/test_xml_etree_c.py
@@ -1,10 +1,11 @@
# xml.etree test for cElementTree
from test import support
-from test.support import bigmemtest, _2G
+from test.support import import_fresh_module
import unittest
-cET = support.import_module('xml.etree.cElementTree')
+cET = import_fresh_module('xml.etree.ElementTree', fresh=['_elementtree'])
+cET_alias = import_fresh_module('xml.etree.cElementTree', fresh=['_elementtree', 'xml.etree'])
# cElementTree specific tests
@@ -13,10 +14,9 @@ def sanity():
r"""
Import sanity.
- >>> from xml.etree import cElementTree
-
Issue #6697.
+ >>> cElementTree = cET
>>> e = cElementTree.Element('a')
>>> getattr(e, '\uD800') # doctest: +ELLIPSIS
Traceback (most recent call last):
@@ -46,28 +46,37 @@ class MiscTests(unittest.TestCase):
finally:
data = None
+@unittest.skipUnless(cET, 'requires _elementtree')
+class TestAliasWorking(unittest.TestCase):
+ # Test that the cET alias module is alive
+ def test_alias_working(self):
+ e = cET_alias.Element('foo')
+ self.assertEqual(e.tag, 'foo')
+
+@unittest.skipUnless(cET, 'requires _elementtree')
+class TestAcceleratorImported(unittest.TestCase):
+ # Test that the C accelerator was imported, as expected
+ def test_correct_import_cET(self):
+ self.assertEqual(cET.SubElement.__module__, '_elementtree')
+
+ def test_correct_import_cET_alias(self):
+ self.assertEqual(cET_alias.SubElement.__module__, '_elementtree')
+
def test_main():
from test import test_xml_etree, test_xml_etree_c
# Run the tests specific to the C implementation
support.run_doctest(test_xml_etree_c, verbosity=True)
+ support.run_unittest(
+ MiscTests,
+ TestAliasWorking,
+ TestAcceleratorImported
+ )
+
+ # Run the same test suite as the Python module
+ test_xml_etree.test_main(module=cET)
- support.run_unittest(MiscTests)
-
- # Assign the C implementation before running the doctests
- # Patch the __name__, to prevent confusion with the pure Python test
- pyET = test_xml_etree.ET
- py__name__ = test_xml_etree.__name__
- test_xml_etree.ET = cET
- if __name__ != '__main__':
- test_xml_etree.__name__ = __name__
- try:
- # Run the same test suite as xml.etree.ElementTree
- test_xml_etree.test_main(module_name='xml.etree.cElementTree')
- finally:
- test_xml_etree.ET = pyET
- test_xml_etree.__name__ = py__name__
if __name__ == '__main__':
test_main()
diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py
index 3814191..afd8c51 100644
--- a/Lib/test/test_xmlrpc.py
+++ b/Lib/test/test_xmlrpc.py
@@ -24,6 +24,8 @@ alist = [{'astring': 'foo@bar.baz.spam',
'ashortlong': 2,
'anotherlist': ['.zyx.41'],
'abase64': xmlrpclib.Binary(b"my dog has fleas"),
+ 'b64bytes': b"my dog has fleas",
+ 'b64bytearray': bytearray(b"my dog has fleas"),
'boolean': False,
'unicode': '\u4000\u6000\u8000',
'ukey\u4000': 'regular value',
@@ -44,27 +46,54 @@ class XMLRPCTestCase(unittest.TestCase):
def test_dump_bare_datetime(self):
# This checks that an unwrapped datetime.date object can be handled
# by the marshalling code. This can't be done via test_dump_load()
- # since with use_datetime set to 1 the unmarshaller would create
+ # since with use_builtin_types set to 1 the unmarshaller would create
# datetime objects for the 'datetime[123]' keys as well
dt = datetime.datetime(2005, 2, 10, 11, 41, 23)
+ self.assertEqual(dt, xmlrpclib.DateTime('20050210T11:41:23'))
s = xmlrpclib.dumps((dt,))
- (newdt,), m = xmlrpclib.loads(s, use_datetime=1)
+
+ result, m = xmlrpclib.loads(s, use_builtin_types=True)
+ (newdt,) = result
self.assertEqual(newdt, dt)
- self.assertEqual(m, None)
+ self.assertIs(type(newdt), datetime.datetime)
+ self.assertIsNone(m)
+
+ result, m = xmlrpclib.loads(s, use_builtin_types=False)
+ (newdt,) = result
+ self.assertEqual(newdt, dt)
+ self.assertIs(type(newdt), xmlrpclib.DateTime)
+ self.assertIsNone(m)
+
+ result, m = xmlrpclib.loads(s, use_datetime=True)
+ (newdt,) = result
+ self.assertEqual(newdt, dt)
+ self.assertIs(type(newdt), datetime.datetime)
+ self.assertIsNone(m)
+
+ result, m = xmlrpclib.loads(s, use_datetime=False)
+ (newdt,) = result
+ self.assertEqual(newdt, dt)
+ self.assertIs(type(newdt), xmlrpclib.DateTime)
+ self.assertIsNone(m)
- (newdt,), m = xmlrpclib.loads(s, use_datetime=0)
- self.assertEqual(newdt, xmlrpclib.DateTime('20050210T11:41:23'))
def test_datetime_before_1900(self):
# same as before but with a date before 1900
dt = datetime.datetime(1, 2, 10, 11, 41, 23)
+ self.assertEqual(dt, xmlrpclib.DateTime('00010210T11:41:23'))
s = xmlrpclib.dumps((dt,))
- (newdt,), m = xmlrpclib.loads(s, use_datetime=1)
+
+ result, m = xmlrpclib.loads(s, use_builtin_types=True)
+ (newdt,) = result
self.assertEqual(newdt, dt)
- self.assertEqual(m, None)
+ self.assertIs(type(newdt), datetime.datetime)
+ self.assertIsNone(m)
- (newdt,), m = xmlrpclib.loads(s, use_datetime=0)
- self.assertEqual(newdt, xmlrpclib.DateTime('00010210T11:41:23'))
+ result, m = xmlrpclib.loads(s, use_builtin_types=False)
+ (newdt,) = result
+ self.assertEqual(newdt, dt)
+ self.assertIs(type(newdt), xmlrpclib.DateTime)
+ self.assertIsNone(m)
def test_bug_1164912 (self):
d = xmlrpclib.DateTime()
@@ -133,6 +162,25 @@ class XMLRPCTestCase(unittest.TestCase):
xmlrpclib.loads(strg)[0][0])
self.assertRaises(TypeError, xmlrpclib.dumps, (arg1,))
+ def test_dump_bytes(self):
+ sample = b"my dog has fleas"
+ self.assertEqual(sample, xmlrpclib.Binary(sample))
+ for type_ in bytes, bytearray, xmlrpclib.Binary:
+ value = type_(sample)
+ s = xmlrpclib.dumps((value,))
+
+ result, m = xmlrpclib.loads(s, use_builtin_types=True)
+ (newvalue,) = result
+ self.assertEqual(newvalue, sample)
+ self.assertIs(type(newvalue), bytes)
+ self.assertIsNone(m)
+
+ result, m = xmlrpclib.loads(s, use_builtin_types=False)
+ (newvalue,) = result
+ self.assertEqual(newvalue, sample)
+ self.assertIs(type(newvalue), xmlrpclib.Binary)
+ self.assertIsNone(m)
+
def test_get_host_info(self):
# see bug #3613, this raised a TypeError
transp = xmlrpc.client.Transport()
@@ -140,9 +188,6 @@ class XMLRPCTestCase(unittest.TestCase):
('host.tld',
[('Authorization', 'Basic dXNlcg==')], {}))
- def test_dump_bytes(self):
- self.assertRaises(TypeError, xmlrpclib.dumps, (b"my dog has fleas",))
-
def test_ssl_presence(self):
try:
import ssl
@@ -980,10 +1025,44 @@ class CGIHandlerTestCase(unittest.TestCase):
len(content))
+class UseBuiltinTypesTestCase(unittest.TestCase):
+
+ def test_use_builtin_types(self):
+ # SimpleXMLRPCDispatcher.__init__ accepts use_builtin_types, which
+ # makes all dispatch of binary data as bytes instances, and all
+ # dispatch of datetime argument as datetime.datetime instances.
+ self.log = []
+ expected_bytes = b"my dog has fleas"
+ expected_date = datetime.datetime(2008, 5, 26, 18, 25, 12)
+ marshaled = xmlrpclib.dumps((expected_bytes, expected_date), 'foobar')
+ def foobar(*args):
+ self.log.extend(args)
+ handler = xmlrpc.server.SimpleXMLRPCDispatcher(
+ allow_none=True, encoding=None, use_builtin_types=True)
+ handler.register_function(foobar)
+ handler._marshaled_dispatch(marshaled)
+ self.assertEqual(len(self.log), 2)
+ mybytes, mydate = self.log
+ self.assertEqual(self.log, [expected_bytes, expected_date])
+ self.assertIs(type(mydate), datetime.datetime)
+ self.assertIs(type(mybytes), bytes)
+
+ def test_cgihandler_has_use_builtin_types_flag(self):
+ handler = xmlrpc.server.CGIXMLRPCRequestHandler(use_builtin_types=True)
+ self.assertTrue(handler.use_builtin_types)
+
+ def test_xmlrpcserver_has_use_builtin_types_flag(self):
+ server = xmlrpc.server.SimpleXMLRPCServer(("localhost", 0),
+ use_builtin_types=True)
+ server.server_close()
+ self.assertTrue(server.use_builtin_types)
+
+
@support.reap_threads
def test_main():
xmlrpc_tests = [XMLRPCTestCase, HelperTestCase, DateTimeTestCase,
BinaryTestCase, FaultTestCase]
+ xmlrpc_tests.append(UseBuiltinTypesTestCase)
xmlrpc_tests.append(SimpleServerTestCase)
xmlrpc_tests.append(KeepaliveServerTestCase1)
xmlrpc_tests.append(KeepaliveServerTestCase2)
diff --git a/Lib/test/test_xmlrpc_net.py b/Lib/test/test_xmlrpc_net.py
index 9ed8f8d..9ba89b2 100644
--- a/Lib/test/test_xmlrpc_net.py
+++ b/Lib/test/test_xmlrpc_net.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-import collections
+import collections.abc
import errno
import socket
import sys
@@ -49,7 +49,7 @@ class CurrentTimeTest(unittest.TestCase):
# Perform a minimal sanity check on the result, just to be sure
# the request means what we think it means.
- self.assertIsInstance(builders, collections.Sequence)
+ self.assertIsInstance(builders, collections.abc.Sequence)
self.assertTrue([x for x in builders if "3.x" in x], builders)
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index bb0d79a..0b3a694 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -1,9 +1,3 @@
-# We can test part of the module without zlib.
-try:
- import zlib
-except ImportError:
- zlib = None
-
import io
import os
import sys
@@ -19,7 +13,7 @@ from tempfile import TemporaryFile
from random import randint, random
from unittest import skipUnless
-from test.support import TESTFN, run_unittest, findfile, unlink
+from test.support import TESTFN, run_unittest, findfile, unlink, requires_zlib
TESTFN2 = TESTFN + "2"
TESTFNDIR = TESTFN + "d"
@@ -269,44 +263,44 @@ class TestsWithSourceFile(unittest.TestCase):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_iterlines_test(f, zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_open_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_open_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_random_open_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_random_open_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_readline_read_deflated(self):
# Issue #7610: calls to readline() interleaved with calls to read().
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_readline_read_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_readline_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_readline_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_readlines_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_readlines_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_iterlines_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_iterlines_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_low_compression(self):
"""Check for cases where compressed data is larger than original."""
# Create the ZIP archive
@@ -377,7 +371,7 @@ class TestsWithSourceFile(unittest.TestCase):
with open(TESTFN, "rb") as f:
self.assertEqual(zipfp.read(TESTFN), f.read())
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_per_file_compression(self):
"""Check that files within a Zip archive can have different
compression options."""
@@ -446,19 +440,18 @@ class TestsWithSourceFile(unittest.TestCase):
# remove the test file subdirectories
shutil.rmtree(os.path.join(os.getcwd(), 'ziptest2dir'))
- def test_writestr_compression(self):
+ def test_writestr_compression_stored(self):
zipfp = zipfile.ZipFile(TESTFN2, "w")
zipfp.writestr("a.txt", "hello world", compress_type=zipfile.ZIP_STORED)
- if zlib:
- zipfp.writestr("b.txt", "hello world", compress_type=zipfile.ZIP_DEFLATED)
-
info = zipfp.getinfo('a.txt')
self.assertEqual(info.compress_type, zipfile.ZIP_STORED)
- if zlib:
- info = zipfp.getinfo('b.txt')
- self.assertEqual(info.compress_type, zipfile.ZIP_DEFLATED)
-
+ @requires_zlib
+ def test_writestr_compression_deflated(self):
+ zipfp = zipfile.ZipFile(TESTFN2, "w")
+ zipfp.writestr("b.txt", "hello world", compress_type=zipfile.ZIP_DEFLATED)
+ info = zipfp.getinfo('b.txt')
+ self.assertEqual(info.compress_type, zipfile.ZIP_DEFLATED)
def zip_test_writestr_permissions(self, f, compression):
# Make sure that writestr creates files with mode 0600,
@@ -514,7 +507,12 @@ class TestsWithSourceFile(unittest.TestCase):
self.assertRaises(ValueError, zipfp.write, TESTFN)
- @skipUnless(zlib, "requires zlib")
+
+
+
+
+
+ @requires_zlib
def test_unicode_filenames(self):
# bug #10801
fname = findfile('zip_cp437_header.zip')
@@ -623,7 +621,7 @@ class TestZip64InSmallFiles(unittest.TestCase):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_test(f, zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_test(f, zipfile.ZIP_DEFLATED)
@@ -983,7 +981,7 @@ class OtherTests(unittest.TestCase):
def test_testzip_with_bad_crc_stored(self):
self.check_testzip_with_bad_crc(zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_testzip_with_bad_crc_deflated(self):
self.check_testzip_with_bad_crc(zipfile.ZIP_DEFLATED)
@@ -1011,7 +1009,7 @@ class OtherTests(unittest.TestCase):
def test_read_with_bad_crc_stored(self):
self.check_read_with_bad_crc(zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_read_with_bad_crc_deflated(self):
self.check_read_with_bad_crc(zipfile.ZIP_DEFLATED)
@@ -1031,7 +1029,7 @@ class OtherTests(unittest.TestCase):
def test_read_return_size_stored(self):
self.check_read_return_size(zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_read_return_size_deflated(self):
self.check_read_return_size(zipfile.ZIP_DEFLATED)
@@ -1121,7 +1119,7 @@ class DecryptionTests(unittest.TestCase):
self.zip2.setpassword(b"perl")
self.assertRaises(RuntimeError, self.zip2.read, "zero")
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_good_password(self):
self.zip.setpassword(b"python")
self.assertEqual(self.zip.read("test.txt"), self.plain)
@@ -1171,7 +1169,7 @@ class TestsWithRandomBinaryFiles(unittest.TestCase):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_test(f, zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_test(f, zipfile.ZIP_DEFLATED)
@@ -1211,7 +1209,7 @@ class TestsWithRandomBinaryFiles(unittest.TestCase):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_open_test(f, zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_open_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_open_test(f, zipfile.ZIP_DEFLATED)
@@ -1239,13 +1237,13 @@ class TestsWithRandomBinaryFiles(unittest.TestCase):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_random_open_test(f, zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_random_open_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.zip_random_open_test(f, zipfile.ZIP_DEFLATED)
-@skipUnless(zlib, "requires zlib")
+@requires_zlib
class TestsWithMultipleOpens(unittest.TestCase):
def setUp(self):
# Create the ZIP archive
@@ -1437,28 +1435,28 @@ class UniversalNewlineTests(unittest.TestCase):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.iterlines_test(f, zipfile.ZIP_STORED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_read_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.read_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_readline_read_deflated(self):
# Issue #7610: calls to readline() interleaved with calls to read().
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.readline_read_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_readline_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.readline_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_readlines_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.readlines_test(f, zipfile.ZIP_DEFLATED)
- @skipUnless(zlib, "requires zlib")
+ @requires_zlib
def test_iterlines_deflated(self):
for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
self.iterlines_test(f, zipfile.ZIP_DEFLATED)
diff --git a/Lib/test/test_zipfile64.py b/Lib/test/test_zipfile64.py
index 0e7d73f..a8fb7ab 100644
--- a/Lib/test/test_zipfile64.py
+++ b/Lib/test/test_zipfile64.py
@@ -11,12 +11,6 @@ support.requires(
'test requires loads of disk-space bytes and a long time to run'
)
-# We can test part of the module without zlib.
-try:
- import zlib
-except ImportError:
- zlib = None
-
import zipfile, os, unittest
import time
import sys
@@ -24,7 +18,7 @@ import sys
from io import StringIO
from tempfile import TemporaryFile
-from test.support import TESTFN, run_unittest
+from test.support import TESTFN, run_unittest, requires_zlib
TESTFN2 = TESTFN + "2"
@@ -81,12 +75,12 @@ class TestsWithSourceFile(unittest.TestCase):
for f in TemporaryFile(), TESTFN2:
self.zipTest(f, zipfile.ZIP_STORED)
- if zlib:
- def testDeflated(self):
- # Try the temp file first. If we do TESTFN2 first, then it hogs
- # gigabytes of disk space for the duration of the test.
- for f in TemporaryFile(), TESTFN2:
- self.zipTest(f, zipfile.ZIP_DEFLATED)
+ @requires_zlib
+ def testDeflated(self):
+ # Try the temp file first. If we do TESTFN2 first, then it hogs
+ # gigabytes of disk space for the duration of the test.
+ for f in TemporaryFile(), TESTFN2:
+ self.zipTest(f, zipfile.ZIP_DEFLATED)
def tearDown(self):
for fname in TESTFN, TESTFN2:
diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py
index df5ff9d..f7cb8b9 100644
--- a/Lib/test/test_zipimport.py
+++ b/Lib/test/test_zipimport.py
@@ -9,12 +9,6 @@ import unittest
from test import support
from test.test_importhooks import ImportHooksBaseTestCase, test_src, test_co
-# some tests can be ran even without zlib
-try:
- import zlib
-except ImportError:
- zlib = None
-
from zipfile import ZipFile, ZipInfo, ZIP_STORED, ZIP_DEFLATED
import zipimport
@@ -25,7 +19,7 @@ import io
from traceback import extract_tb, extract_stack, print_tb
raise_src = 'def do_raise(): raise TypeError\n'
-def make_pyc(co, mtime):
+def make_pyc(co, mtime, size):
data = marshal.dumps(co)
if type(mtime) is type(0.0):
# Mac mtimes need a bit of special casing
@@ -33,14 +27,14 @@ def make_pyc(co, mtime):
mtime = int(mtime)
else:
mtime = int(-0x100000000 + int(mtime))
- pyc = imp.get_magic() + struct.pack("<i", int(mtime)) + data
+ pyc = imp.get_magic() + struct.pack("<ii", int(mtime), size & 0xFFFFFFFF) + data
return pyc
def module_path_to_dotted_name(path):
return path.replace(os.sep, '.')
NOW = time.time()
-test_pyc = make_pyc(test_co, NOW)
+test_pyc = make_pyc(test_co, NOW, len(test_src))
TESTMOD = "ziptestmodule"
@@ -211,6 +205,10 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
mod = zi.load_module(TESTPACK)
self.assertEqual(zi.get_filename(TESTPACK), mod.__file__)
+ existing_pack_path = __import__(TESTPACK).__path__[0]
+ expected_path_path = os.path.join(TEMP_ZIP, TESTPACK)
+ self.assertEqual(existing_pack_path, expected_path_path)
+
self.assertEqual(zi.is_package(packdir + '__init__'), False)
self.assertEqual(zi.is_package(packdir + TESTPACK2), True)
self.assertEqual(zi.is_package(packdir2 + TESTMOD), False)
@@ -299,7 +297,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
return __file__
if __loader__.get_data("some.data") != b"some data":
raise AssertionError("bad data")\n"""
- pyc = make_pyc(compile(src, "<???>", "exec"), NOW)
+ pyc = make_pyc(compile(src, "<???>", "exec"), NOW, len(src))
files = {TESTMOD + pyc_ext: (NOW, pyc),
"some.data": (NOW, "some data")}
self.doTest(pyc_ext, files, TESTMOD)
@@ -319,7 +317,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
self.doTest(".py", files, TESTMOD, call=self.assertModuleSource)
def testGetCompiledSource(self):
- pyc = make_pyc(compile(test_src, "<???>", "exec"), NOW)
+ pyc = make_pyc(compile(test_src, "<???>", "exec"), NOW, len(test_src))
files = {TESTMOD + ".py": (NOW, test_src),
TESTMOD + pyc_ext: (NOW, pyc)}
self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource)
@@ -392,7 +390,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
os.remove(filename)
-@unittest.skipUnless(zlib, "requires zlib")
+@support.requires_zlib
class CompressedZipImportTestCase(UncompressedZipImportTestCase):
compression = ZIP_DEFLATED
@@ -417,7 +415,7 @@ class BadFileZipImportTestCase(unittest.TestCase):
def testEmptyFile(self):
support.unlink(TESTMOD)
- open(TESTMOD, 'w+').close()
+ support.create_empty_file(TESTMOD)
self.assertZipFailure(TESTMOD)
def testFileUnreadable(self):
diff --git a/Lib/test/test_zipimport_support.py b/Lib/test/test_zipimport_support.py
index a558d7d..0c93a8c 100644
--- a/Lib/test/test_zipimport_support.py
+++ b/Lib/test/test_zipimport_support.py
@@ -156,20 +156,19 @@ class ZipSupportTests(unittest.TestCase):
test_zipped_doctest.test_DocTestRunner.verbose_flag,
test_zipped_doctest.test_Example,
test_zipped_doctest.test_debug,
- test_zipped_doctest.test_pdb_set_trace,
- test_zipped_doctest.test_pdb_set_trace_nested,
test_zipped_doctest.test_testsource,
test_zipped_doctest.test_trailing_space_in_test,
test_zipped_doctest.test_DocTestSuite,
test_zipped_doctest.test_DocTestFinder,
]
- # These remaining tests are the ones which need access
+ # These tests are the ones which need access
# to the data files, so we don't run them
fail_due_to_missing_data_files = [
test_zipped_doctest.test_DocFileSuite,
test_zipped_doctest.test_testfile,
test_zipped_doctest.test_unittest_reportflags,
]
+
for obj in known_good_tests:
_run_object_doctest(obj, test_zipped_doctest)
finally:
diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py
index 60081e2..3c982c6 100644
--- a/Lib/test/test_zlib.py
+++ b/Lib/test/test_zlib.py
@@ -7,10 +7,16 @@ from test.support import bigmemtest, _1G, _4G
zlib = support.import_module('zlib')
-try:
- import mmap
-except ImportError:
- mmap = None
+
+class VersionTestCase(unittest.TestCase):
+
+ def test_library_version(self):
+ # Test that the major version of the actual library in use matches the
+ # major version that we were compiled against. We can't guarantee that
+ # the minor versions will match (even on the machine on which the module
+ # was compiled), and the API is stable between minor versions, so
+ # testing only the major versions avoids spurious failures.
+ self.assertEqual(zlib.ZLIB_RUNTIME_VERSION[0], zlib.ZLIB_VERSION[0])
class ChecksumTestCase(unittest.TestCase):
@@ -173,10 +179,8 @@ class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
def test_big_decompress_buffer(self, size):
self.check_big_decompress_buffer(size, zlib.decompress)
- @bigmemtest(size=_4G + 100, memuse=1)
+ @bigmemtest(size=_4G + 100, memuse=1, dry_run=False)
def test_length_overflow(self, size):
- if size < _4G + 100:
- self.skipTest("not enough free memory, need at least 4 GB")
data = b'x' * size
try:
self.assertRaises(OverflowError, zlib.compress, data, 1)
@@ -434,6 +438,26 @@ class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
y += dco.flush()
self.assertEqual(y, b'foo')
+ def test_decompress_eof(self):
+ x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo'
+ dco = zlib.decompressobj()
+ self.assertFalse(dco.eof)
+ dco.decompress(x[:-5])
+ self.assertFalse(dco.eof)
+ dco.decompress(x[-5:])
+ self.assertTrue(dco.eof)
+ dco.flush()
+ self.assertTrue(dco.eof)
+
+ def test_decompress_eof_incomplete_stream(self):
+ x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo'
+ dco = zlib.decompressobj()
+ self.assertFalse(dco.eof)
+ dco.decompress(x[:-5])
+ self.assertFalse(dco.eof)
+ dco.flush()
+ self.assertFalse(dco.eof)
+
if hasattr(zlib.compressobj(), "copy"):
def test_compresscopy(self):
# Test copying a compression object
@@ -510,10 +534,8 @@ class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
decompress = lambda s: d.decompress(s) + d.flush()
self.check_big_decompress_buffer(size, decompress)
- @bigmemtest(size=_4G + 100, memuse=1)
+ @bigmemtest(size=_4G + 100, memuse=1, dry_run=False)
def test_length_overflow(self, size):
- if size < _4G + 100:
- self.skipTest("not enough free memory, need at least 4 GB")
data = b'x' * size
c = zlib.compressobj(1)
d = zlib.decompressobj()
@@ -614,6 +636,7 @@ LAERTES
def test_main():
support.run_unittest(
+ VersionTestCase,
ChecksumTestCase,
ChecksumBigBufferTestCase,
ExceptionTestCase,
diff --git a/Lib/test/threaded_import_hangers.py b/Lib/test/threaded_import_hangers.py
index adf03e3..5484e60 100644
--- a/Lib/test/threaded_import_hangers.py
+++ b/Lib/test/threaded_import_hangers.py
@@ -35,8 +35,11 @@ for name, func, args in [
("os.path.abspath", os.path.abspath, ('.',)),
]:
- t = Worker(func, args)
- t.start()
- t.join(TIMEOUT)
- if t.is_alive():
- errors.append("%s appeared to hang" % name)
+ try:
+ t = Worker(func, args)
+ t.start()
+ t.join(TIMEOUT)
+ if t.is_alive():
+ errors.append("%s appeared to hang" % name)
+ finally:
+ del t
diff --git a/Lib/test/tokenize_tests.txt b/Lib/test/tokenize_tests.txt
index 06c83b0..2c5fb10 100644
--- a/Lib/test/tokenize_tests.txt
+++ b/Lib/test/tokenize_tests.txt
@@ -114,8 +114,12 @@ x = b'abc' + B'ABC'
y = b"abc" + B"ABC"
x = br'abc' + Br'ABC' + bR'ABC' + BR'ABC'
y = br"abc" + Br"ABC" + bR"ABC" + BR"ABC"
+x = rb'abc' + rB'ABC' + Rb'ABC' + RB'ABC'
+y = rb"abc" + rB"ABC" + Rb"ABC" + RB"ABC"
x = br'\\' + BR'\\'
+x = rb'\\' + RB'\\'
x = br'\'' + ''
+x = rb'\'' + ''
y = br'''
foo bar \\
baz''' + BR'''
@@ -124,6 +128,10 @@ y = Br"""foo
bar \\ baz
""" + bR'''spam
'''
+y = rB"""foo
+bar \\ baz
+""" + Rb'''spam
+'''
# Indentation
if 1:
diff --git a/Lib/textwrap.py b/Lib/textwrap.py
index dfb4005..0aeba3f 100644
--- a/Lib/textwrap.py
+++ b/Lib/textwrap.py
@@ -5,7 +5,7 @@
# Copyright (C) 2002, 2003 Python Software Foundation.
# Written by Greg Ward <gward@python.net>
-import string, re
+import re
__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent']
diff --git a/Lib/threading.py b/Lib/threading.py
index 69b7763..197dec4 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -3,7 +3,7 @@
import sys as _sys
import _thread
-from time import time as _time, sleep as _sleep
+from time import steady as _time, sleep as _sleep
from traceback import format_exc as _format_exc
from _weakrefset import WeakSet
@@ -19,12 +19,12 @@ from _weakrefset import WeakSet
__all__ = ['active_count', 'Condition', 'current_thread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier',
- 'Timer', 'setprofile', 'settrace', 'local', 'stack_size']
+ 'Timer', 'ThreadError', 'setprofile', 'settrace', 'local', 'stack_size']
# Rename some stuff so "from threading import *" is safe
_start_new_thread = _thread.start_new_thread
_allocate_lock = _thread.allocate_lock
-_get_ident = _thread.get_ident
+get_ident = _thread.get_ident
ThreadError = _thread.error
try:
_CRLock = _thread.RLock
@@ -34,40 +34,6 @@ TIMEOUT_MAX = _thread.TIMEOUT_MAX
del _thread
-# Debug support (adapted from ihooks.py).
-
-_VERBOSE = False
-
-if __debug__:
-
- class _Verbose(object):
-
- def __init__(self, verbose=None):
- if verbose is None:
- verbose = _VERBOSE
- self._verbose = verbose
-
- def _note(self, format, *args):
- if self._verbose:
- format = format % args
- # Issue #4188: calling current_thread() can incur an infinite
- # recursion if it has to create a DummyThread on the fly.
- ident = _get_ident()
- try:
- name = _active[ident].name
- except KeyError:
- name = "<OS thread %d>" % ident
- format = "%s: %s\n" % (name, format)
- _sys.stderr.write(format)
-
-else:
- # Disable this when using "python -O"
- class _Verbose(object):
- def __init__(self, verbose=None):
- pass
- def _note(self, *args):
- pass
-
# Support for profile and trace hooks
_profile_hook = None
@@ -85,17 +51,14 @@ def settrace(func):
Lock = _allocate_lock
-def RLock(verbose=None, *args, **kwargs):
- if verbose is None:
- verbose = _VERBOSE
- if (__debug__ and verbose) or _CRLock is None:
- return _PyRLock(verbose, *args, **kwargs)
+def RLock(*args, **kwargs):
+ if _CRLock is None:
+ return _PyRLock(*args, **kwargs)
return _CRLock(*args, **kwargs)
-class _RLock(_Verbose):
+class _RLock:
- def __init__(self, verbose=None):
- _Verbose.__init__(self, verbose)
+ def __init__(self):
self._block = _allocate_lock()
self._owner = None
self._count = 0
@@ -110,37 +73,25 @@ class _RLock(_Verbose):
self.__class__.__name__, owner, self._count)
def acquire(self, blocking=True, timeout=-1):
- me = _get_ident()
+ me = get_ident()
if self._owner == me:
self._count = self._count + 1
- if __debug__:
- self._note("%s.acquire(%s): recursive success", self, blocking)
return 1
rc = self._block.acquire(blocking, timeout)
if rc:
self._owner = me
self._count = 1
- if __debug__:
- self._note("%s.acquire(%s): initial success", self, blocking)
- else:
- if __debug__:
- self._note("%s.acquire(%s): failure", self, blocking)
return rc
__enter__ = acquire
def release(self):
- if self._owner != _get_ident():
+ if self._owner != get_ident():
raise RuntimeError("cannot release un-acquired lock")
self._count = count = self._count - 1
if not count:
self._owner = None
self._block.release()
- if __debug__:
- self._note("%s.release(): final release", self)
- else:
- if __debug__:
- self._note("%s.release(): non-final release", self)
def __exit__(self, t, v, tb):
self.release()
@@ -150,12 +101,10 @@ class _RLock(_Verbose):
def _acquire_restore(self, state):
self._block.acquire()
self._count, self._owner = state
- if __debug__:
- self._note("%s._acquire_restore()", self)
def _release_save(self):
- if __debug__:
- self._note("%s._release_save()", self)
+ if self._count == 0:
+ raise RuntimeError("cannot release un-acquired lock")
count = self._count
self._count = 0
owner = self._owner
@@ -164,18 +113,14 @@ class _RLock(_Verbose):
return (count, owner)
def _is_owned(self):
- return self._owner == _get_ident()
+ return self._owner == get_ident()
_PyRLock = _RLock
-def Condition(*args, **kwargs):
- return _Condition(*args, **kwargs)
-
-class _Condition(_Verbose):
+class Condition:
- def __init__(self, lock=None, verbose=None):
- _Verbose.__init__(self, verbose)
+ def __init__(self, lock=None):
if lock is None:
lock = RLock()
self._lock = lock
@@ -234,23 +179,16 @@ class _Condition(_Verbose):
if timeout is None:
waiter.acquire()
gotit = True
- if __debug__:
- self._note("%s.wait(): got it", self)
else:
if timeout > 0:
gotit = waiter.acquire(True, timeout)
else:
gotit = waiter.acquire(False)
if not gotit:
- if __debug__:
- self._note("%s.wait(%s): timed out", self, timeout)
try:
self._waiters.remove(waiter)
except ValueError:
pass
- else:
- if __debug__:
- self._note("%s.wait(%s): got it", self, timeout)
return gotit
finally:
self._acquire_restore(saved_state)
@@ -266,19 +204,9 @@ class _Condition(_Verbose):
else:
waittime = endtime - _time()
if waittime <= 0:
- if __debug__:
- self._note("%s.wait_for(%r, %r): Timed out.",
- self, predicate, timeout)
break
- if __debug__:
- self._note("%s.wait_for(%r, %r): Waiting with timeout=%s.",
- self, predicate, timeout, waittime)
self.wait(waittime)
result = predicate()
- else:
- if __debug__:
- self._note("%s.wait_for(%r, %r): Success.",
- self, predicate, timeout)
return result
def notify(self, n=1):
@@ -287,11 +215,7 @@ class _Condition(_Verbose):
__waiters = self._waiters
waiters = __waiters[:n]
if not waiters:
- if __debug__:
- self._note("%s.notify(): no waiters", self)
return
- self._note("%s.notify(): notifying %d waiter%s", self, n,
- n!=1 and "s" or "")
for waiter in waiters:
waiter.release()
try:
@@ -305,17 +229,13 @@ class _Condition(_Verbose):
notifyAll = notify_all
-def Semaphore(*args, **kwargs):
- return _Semaphore(*args, **kwargs)
-
-class _Semaphore(_Verbose):
+class Semaphore:
# After Tim Peters' semaphore class, but not quite the same (no maximum)
- def __init__(self, value=1, verbose=None):
+ def __init__(self, value=1):
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
- _Verbose.__init__(self, verbose)
self._cond = Condition(Lock())
self._value = value
@@ -328,9 +248,6 @@ class _Semaphore(_Verbose):
while self._value == 0:
if not blocking:
break
- if __debug__:
- self._note("%s.acquire(%s): blocked waiting, value=%s",
- self, blocking, self._value)
if timeout is not None:
if endtime is None:
endtime = _time() + timeout
@@ -341,9 +258,6 @@ class _Semaphore(_Verbose):
self._cond.wait(timeout)
else:
self._value = self._value - 1
- if __debug__:
- self._note("%s.acquire: success, value=%s",
- self, self._value)
rc = True
self._cond.release()
return rc
@@ -353,9 +267,6 @@ class _Semaphore(_Verbose):
def release(self):
self._cond.acquire()
self._value = self._value + 1
- if __debug__:
- self._note("%s.release: success, value=%s",
- self, self._value)
self._cond.notify()
self._cond.release()
@@ -363,30 +274,23 @@ class _Semaphore(_Verbose):
self.release()
-def BoundedSemaphore(*args, **kwargs):
- return _BoundedSemaphore(*args, **kwargs)
-
-class _BoundedSemaphore(_Semaphore):
+class BoundedSemaphore(Semaphore):
"""Semaphore that checks that # releases is <= # acquires"""
- def __init__(self, value=1, verbose=None):
- _Semaphore.__init__(self, value, verbose)
+ def __init__(self, value=1):
+ Semaphore.__init__(self, value)
self._initial_value = value
def release(self):
if self._value >= self._initial_value:
raise ValueError("Semaphore released too many times")
- return _Semaphore.release(self)
+ return Semaphore.release(self)
-def Event(*args, **kwargs):
- return _Event(*args, **kwargs)
-
-class _Event(_Verbose):
+class Event:
# After Tim Peters' event class (without is_posted())
- def __init__(self, verbose=None):
- _Verbose.__init__(self, verbose)
+ def __init__(self):
self._cond = Condition(Lock())
self._flag = False
@@ -436,13 +340,13 @@ class _Event(_Verbose):
# since the previous cycle. In addition, a 'resetting' state exists which is
# similar to 'draining' except that threads leave with a BrokenBarrierError,
# and a 'broken' state in which all threads get the exception.
-class Barrier(_Verbose):
+class Barrier:
"""
Barrier. Useful for synchronizing a fixed number of threads
at known synchronization points. Threads block on 'wait()' and are
simultaneously once they have all made that call.
"""
- def __init__(self, parties, action=None, timeout=None, verbose=None):
+ def __init__(self, parties, action=None, timeout=None):
"""
Create a barrier, initialised to 'parties' threads.
'action' is a callable which, when supplied, will be called
@@ -451,7 +355,6 @@ class Barrier(_Verbose):
If a 'timeout' is provided, it is uses as the default for
all subsequent 'wait()' calls.
"""
- _Verbose.__init__(self, verbose)
self._cond = Condition(Lock())
self._action = action
self._timeout = timeout
@@ -612,7 +515,7 @@ _dangling = WeakSet()
# Main class for threads
-class Thread(_Verbose):
+class Thread:
__initialized = False
# Need to store a reference to sys.exc_info for printing
@@ -625,16 +528,18 @@ class Thread(_Verbose):
#XXX __exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None,
- args=(), kwargs=None, verbose=None):
+ args=(), kwargs=None, *, daemon=None):
assert group is None, "group argument must be None for now"
- _Verbose.__init__(self, verbose)
if kwargs is None:
kwargs = {}
self._target = target
self._name = str(name or _newname())
self._args = args
self._kwargs = kwargs
- self._daemonic = self._set_daemon()
+ if daemon is not None:
+ self._daemonic = daemon
+ else:
+ self._daemonic = current_thread().daemon
self._ident = None
self._started = Event()
self._stopped = False
@@ -652,10 +557,6 @@ class Thread(_Verbose):
self._block.__init__()
self._started._reset_internal_locks()
- def _set_daemon(self):
- # Overridden in _MainThread and _DummyThread
- return current_thread().daemon
-
def __repr__(self):
assert self._initialized, "Thread.__init__() was not called"
status = "initial"
@@ -675,8 +576,6 @@ class Thread(_Verbose):
if self._started.is_set():
raise RuntimeError("threads can only be started once")
- if __debug__:
- self._note("%s.start(): starting thread", self)
with _active_limbo_lock:
_limbo[self] = self
try:
@@ -717,7 +616,7 @@ class Thread(_Verbose):
raise
def _set_ident(self):
- self._ident = _get_ident()
+ self._ident = get_ident()
def _bootstrap_inner(self):
try:
@@ -726,24 +625,17 @@ class Thread(_Verbose):
with _active_limbo_lock:
_active[self._ident] = self
del _limbo[self]
- if __debug__:
- self._note("%s._bootstrap(): thread started", self)
if _trace_hook:
- self._note("%s._bootstrap(): registering trace hook", self)
_sys.settrace(_trace_hook)
if _profile_hook:
- self._note("%s._bootstrap(): registering profile hook", self)
_sys.setprofile(_profile_hook)
try:
self.run()
except SystemExit:
- if __debug__:
- self._note("%s._bootstrap(): raised SystemExit", self)
+ pass
except:
- if __debug__:
- self._note("%s._bootstrap(): unhandled exception", self)
# If sys.stderr is no more (most likely from interpreter
# shutdown) use self._stderr. Otherwise still use sys (as in
# _sys) in case sys.stderr was redefined since the creation of
@@ -774,9 +666,6 @@ class Thread(_Verbose):
# hog; deleting everything else is just for thoroughness
finally:
del exc_type, exc_value, exc_tb
- else:
- if __debug__:
- self._note("%s._bootstrap(): normal return", self)
finally:
# Prevent a race in
# test_threading.test_no_refcycle_through_target when
@@ -790,7 +679,7 @@ class Thread(_Verbose):
try:
# We don't call self._delete() because it also
# grabs _active_limbo_lock.
- del _active[_get_ident()]
+ del _active[get_ident()]
except:
pass
@@ -826,7 +715,7 @@ class Thread(_Verbose):
try:
with _active_limbo_lock:
- del _active[_get_ident()]
+ del _active[get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
@@ -843,29 +732,18 @@ class Thread(_Verbose):
if self is current_thread():
raise RuntimeError("cannot join current thread")
- if __debug__:
- if not self._stopped:
- self._note("%s.join(): waiting until thread stops", self)
-
self._block.acquire()
try:
if timeout is None:
while not self._stopped:
self._block.wait()
- if __debug__:
- self._note("%s.join(): thread stopped", self)
else:
deadline = _time() + timeout
while not self._stopped:
delay = deadline - _time()
if delay <= 0:
- if __debug__:
- self._note("%s.join(): timed out", self)
break
self._block.wait(delay)
- else:
- if __debug__:
- self._note("%s.join(): thread stopped", self)
finally:
self._block.release()
@@ -917,10 +795,7 @@ class Thread(_Verbose):
# The timer class was contributed by Itamar Shtull-Trauring
-def Timer(*args, **kwargs):
- return _Timer(*args, **kwargs)
-
-class _Timer(Thread):
+class Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=[], kwargs={})
@@ -952,26 +827,18 @@ class _Timer(Thread):
class _MainThread(Thread):
def __init__(self):
- Thread.__init__(self, name="MainThread")
+ Thread.__init__(self, name="MainThread", daemon=False)
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
- def _set_daemon(self):
- return False
-
def _exitfunc(self):
self._stop()
t = _pickSomeNonDaemonThread()
- if t:
- if __debug__:
- self._note("%s: waiting for other threads", self)
while t:
t.join()
t = _pickSomeNonDaemonThread()
- if __debug__:
- self._note("%s: exiting", self)
self._delete()
def _pickSomeNonDaemonThread():
@@ -992,7 +859,7 @@ def _pickSomeNonDaemonThread():
class _DummyThread(Thread):
def __init__(self):
- Thread.__init__(self, name=_newname("Dummy-%d"))
+ Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True)
# Thread._block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
@@ -1004,9 +871,6 @@ class _DummyThread(Thread):
with _active_limbo_lock:
_active[self._ident] = self
- def _set_daemon(self):
- return True
-
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
@@ -1015,9 +879,8 @@ class _DummyThread(Thread):
def current_thread():
try:
- return _active[_get_ident()]
+ return _active[get_ident()]
except KeyError:
- ##print "current_thread(): no current thread for", _get_ident()
return _DummyThread()
currentThread = current_thread
@@ -1074,7 +937,7 @@ def _after_fork():
if thread is current:
# There is only one active thread. We reset the ident to
# its new value since it can have changed.
- ident = _get_ident()
+ ident = get_ident()
thread._ident = ident
new_active[ident] = thread
else:
diff --git a/Lib/timeit.py b/Lib/timeit.py
index 1ae59e0..fdc0bbb 100644
--- a/Lib/timeit.py
+++ b/Lib/timeit.py
@@ -79,10 +79,10 @@ else:
# being indented 8 spaces.
template = """
def inner(_it, _timer):
- %(setup)s
+ {setup}
_t0 = _timer()
for _i in _it:
- %(stmt)s
+ {stmt}
_t1 = _timer()
return _t1 - _t0
"""
@@ -126,9 +126,9 @@ class Timer:
stmt = reindent(stmt, 8)
if isinstance(setup, str):
setup = reindent(setup, 4)
- src = template % {'stmt': stmt, 'setup': setup}
+ src = template.format(stmt=stmt, setup=setup)
elif callable(setup):
- src = template % {'stmt': stmt, 'setup': '_setup()'}
+ src = template.format(stmt=stmt, setup='_setup()')
ns['_setup'] = setup
else:
raise ValueError("setup is neither a string nor callable")
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
index 8af5065..ef082bf 100644
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -30,8 +30,6 @@ button.pack(side=BOTTOM)
tk.mainloop()
"""
-__version__ = "$Revision$"
-
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
@@ -157,6 +155,7 @@ class Variable:
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
+ _tk = None
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
@@ -167,6 +166,11 @@ class Variable:
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
+ # check for type of NAME parameter to override weird error message
+ # raised from Modules/_tkinter.c:SetVar like:
+ # TypeError: setvar() takes exactly 3 arguments (2 given)
+ if name is not None and not isinstance(name, str):
+ raise TypeError("name must be a string")
global _varnum
if not master:
master = _default_root
@@ -178,18 +182,21 @@ class Variable:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
- self.set(value)
+ self.initialize(value)
elif not self._tk.call("info", "exists", self._name):
- self.set(self._default)
+ self.initialize(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
- self._tk.globalunsetvar(self._name)
+ if (self._tk is not None and self._tk.call("info", "exists",
+ self._name)):
+ self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
+ initialize = set
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
@@ -264,12 +271,6 @@ class IntVar(Variable):
"""
Variable.__init__(self, master, value, name)
- def set(self, value):
- """Set the variable to value, converting booleans to integers."""
- if isinstance(value, bool):
- value = int(value)
- return Variable.set(self, value)
-
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
@@ -310,7 +311,10 @@ class BooleanVar(Variable):
def get(self):
"""Return the value of the variable as a bool."""
- return self._tk.getboolean(self._tk.globalgetvar(self._name))
+ try:
+ return self._tk.getboolean(self._tk.globalgetvar(self._name))
+ except TclError:
+ raise ValueError("invalid literal for getboolean()")
def mainloop(n=0):
"""Run the main loop of Tcl."""
@@ -322,7 +326,10 @@ getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
- return _default_root.tk.getboolean(s)
+ try:
+ return _default_root.tk.getboolean(s)
+ except TclError:
+ raise ValueError("invalid literal for getboolean()")
# Methods defined on both toplevel and interior widgets
class Misc:
@@ -412,7 +419,10 @@ class Misc:
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
- return self.tk.getboolean(s)
+ try:
+ return self.tk.getboolean(s)
+ except TclError:
+ raise ValueError("invalid literal for getboolean()")
def focus_set(self):
"""Direct input focus to this widget.
@@ -1161,7 +1171,6 @@ class Misc:
return (e,)
def _report_exception(self):
"""Internal function."""
- import sys
exc, val, tb = sys.exc_info()
root = self._root()
root.report_callback_exception(exc, val, tb)
@@ -1236,6 +1245,13 @@ class Misc:
self.tk.call(
'place', 'slaves', self._w))]
# Grid methods that apply to the master
+ def grid_anchor(self, anchor=None): # new in Tk 8.5
+ """The anchor value controls how to place the grid within the
+ master when no row/column has any weight.
+
+ The default anchor is nw."""
+ self.tk.call('grid', 'anchor', self._w, anchor)
+ anchor = grid_anchor
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
@@ -1254,7 +1270,6 @@ class Misc:
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
-
bbox = grid_bbox
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
@@ -1513,6 +1528,14 @@ class Wm:
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
+ def wm_forget(self, window): # new in Tk 8.5
+ """The window will be unmappend from the screen and will no longer
+ be managed by wm. toplevel windows will be treated like frame
+ windows once they are no longer managed by wm, however, the menu
+ option configuration will be remembered and the menus will return
+ once the widget is managed again."""
+ self.tk.call('wm', 'forget', window)
+ forget = wm_forget
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
@@ -1566,6 +1589,31 @@ class Wm:
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
+ def wm_iconphoto(self, default=False, *args): # new in Tk 8.5
+ """Sets the titlebar icon for this window based on the named photo
+ images passed through args. If default is True, this is applied to
+ all future created toplevels as well.
+
+ The data in the images is taken as a snapshot at the time of
+ invocation. If the images are later changed, this is not reflected
+ to the titlebar icons. Multiple images are accepted to allow
+ different images sizes to be provided. The window manager may scale
+ provided icons to an appropriate size.
+
+ On Windows, the images are packed into a Windows icon structure.
+ This will override an icon specified to wm_iconbitmap, and vice
+ versa.
+
+ On X, the images are arranged into the _NET_WM_ICON X property,
+ which most modern window managers support. An icon specified by
+ wm_iconbitmap may exist simuultaneously.
+
+ On Macintosh, this currently does nothing."""
+ if default:
+ self.tk.call('wm', 'iconphoto', self._w, "-default", *args)
+ else:
+ self.tk.call('wm', 'iconphoto', self._w, *args)
+ iconphoto = wm_iconphoto
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
@@ -1577,6 +1625,12 @@ class Wm:
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
+ def wm_manage(self, widget): # new in Tk 8.5
+ """The widget specified will become a stand alone top-level window.
+ The window will be decorated with the window managers title bar,
+ etc."""
+ self.tk.call('wm', 'manage', widget)
+ manage = wm_manage
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
@@ -1665,7 +1719,7 @@ class Tk(Misc, Wm):
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
- import sys, os
+ import os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
@@ -1739,7 +1793,7 @@ class Tk(Misc, Wm):
exec(open(base_py).read(), dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
- import traceback, sys
+ import traceback
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
@@ -2668,6 +2722,10 @@ class Menu(Widget):
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
+ def xposition(self, index): # new in Tk 8.5
+ """Return the x-position of the leftmost pixel of the menu item
+ at INDEX."""
+ return getint(self.tk.call(self._w, 'xposition', index))
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
@@ -2827,6 +2885,25 @@ class Text(Widget, XView, YView):
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
+ def count(self, index1, index2, *args): # new in Tk 8.5
+ """Counts the number of relevant things between the two indices.
+ If index1 is after index2, the result will be a negative number
+ (and this holds for each of the possible options).
+
+ The actual items which are counted depends on the options given by
+ args. The result is a list of integers, one for the result of each
+ counting option given. Valid counting options are "chars",
+ "displaychars", "displayindices", "displaylines", "indices",
+ "lines", "xpixels" and "ypixels". There is an additional possible
+ option "update", which if given then all subsequent options ensure
+ that any possible out of date information is recalculated."""
+ args = ['-%s' % arg for arg in args if not arg.startswith('-')]
+ args += [index1, index2]
+ res = self.tk.call(self._w, 'count', *args) or None
+ if res is not None and len(args) <= 3:
+ return (res, )
+ else:
+ return res
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
@@ -2989,6 +3066,24 @@ class Text(Widget, XView, YView):
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
+ def peer_create(self, newPathName, cnf={}, **kw): # new in Tk 8.5
+ """Creates a peer text widget with the given newPathName, and any
+ optional standard configuration options. By default the peer will
+ have the same start and and end line as the parent widget, but
+ these can be overriden with the standard configuration options."""
+ self.tk.call(self._w, 'peer', 'create', newPathName,
+ *self._options(cnf, kw))
+ def peer_names(self): # new in Tk 8.5
+ """Returns a list of peers of this widget (this does not include
+ the widget itself)."""
+ return self.tk.splitlist(self.tk.call(self._w, 'peer', 'names'))
+ def replace(self, index1, index2, chars, *args): # new in Tk 8.5
+ """Replaces the range of characters between index1 and index2 with
+ the given characters and tags specified by args.
+
+ See the method insert for some more information about args, and the
+ method delete for information about the indices."""
+ self.tk.call(self._w, 'replace', index1, index2, chars, *args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
diff --git a/Lib/tkinter/filedialog.py b/Lib/tkinter/filedialog.py
index 98d2d5c..3ffb252 100644
--- a/Lib/tkinter/filedialog.py
+++ b/Lib/tkinter/filedialog.py
@@ -306,7 +306,6 @@ class _Dialog(commondialog.Dialog):
def _fixresult(self, widget, result):
if result:
# keep directory and filename until next time
- import os
# convert Tcl path objects to strings
try:
result = result.string
@@ -333,7 +332,6 @@ class Open(_Dialog):
# multiple results:
result = tuple([getattr(r, "string", r) for r in result])
if result:
- import os
path, file = os.path.split(result[0])
self.options["initialdir"] = path
# don't set initialfile or filename, as we have multiple of these
diff --git a/Lib/tkinter/test/test_tkinter/test_variables.py b/Lib/tkinter/test/test_tkinter/test_variables.py
new file mode 100644
index 0000000..8db7aca
--- /dev/null
+++ b/Lib/tkinter/test/test_tkinter/test_variables.py
@@ -0,0 +1,165 @@
+import unittest
+
+from tkinter import Variable, StringVar, IntVar, DoubleVar, BooleanVar, Tk
+
+
+class Var(Variable):
+
+ _default = "default"
+ side_effect = False
+
+ def set(self, value):
+ self.side_effect = True
+ super().set(value)
+
+
+class TestBase(unittest.TestCase):
+
+ def setUp(self):
+ self.root = Tk()
+
+ def tearDown(self):
+ self.root.destroy()
+
+
+class TestVariable(TestBase):
+
+ def test_default(self):
+ v = Variable(self.root)
+ self.assertEqual("", v.get())
+ self.assertRegex(str(v), r"^PY_VAR(\d+)$")
+
+ def test_name_and_value(self):
+ v = Variable(self.root, "sample string", "varname")
+ self.assertEqual("sample string", v.get())
+ self.assertEqual("varname", str(v))
+
+ def test___del__(self):
+ self.assertFalse(self.root.call("info", "exists", "varname"))
+ v = Variable(self.root, "sample string", "varname")
+ self.assertTrue(self.root.call("info", "exists", "varname"))
+ del v
+ self.assertFalse(self.root.call("info", "exists", "varname"))
+
+ def test_dont_unset_not_existing(self):
+ self.assertFalse(self.root.call("info", "exists", "varname"))
+ v1 = Variable(self.root, name="name")
+ v2 = Variable(self.root, name="name")
+ del v1
+ self.assertFalse(self.root.call("info", "exists", "name"))
+ # shouldn't raise exception
+ del v2
+ self.assertFalse(self.root.call("info", "exists", "name"))
+
+ def test___eq__(self):
+ # values doesn't matter, only class and name are checked
+ v1 = Variable(self.root, name="abc")
+ v2 = Variable(self.root, name="abc")
+ self.assertEqual(v1, v2)
+
+ v3 = Variable(self.root, name="abc")
+ v4 = StringVar(self.root, name="abc")
+ self.assertNotEqual(v3, v4)
+
+ def test_invalid_name(self):
+ with self.assertRaises(TypeError):
+ Variable(self.root, name=123)
+
+ def test_initialize(self):
+ v = Var()
+ self.assertFalse(v.side_effect)
+ v.set("value")
+ self.assertTrue(v.side_effect)
+
+
+class TestStringVar(TestBase):
+
+ def test_default(self):
+ v = StringVar(self.root)
+ self.assertEqual("", v.get())
+
+ def test_get(self):
+ v = StringVar(self.root, "abc", "name")
+ self.assertEqual("abc", v.get())
+ self.root.globalsetvar("name", True)
+ self.assertEqual("1", v.get())
+
+
+class TestIntVar(TestBase):
+
+ def test_default(self):
+ v = IntVar(self.root)
+ self.assertEqual(0, v.get())
+
+ def test_get(self):
+ v = IntVar(self.root, 123, "name")
+ self.assertEqual(123, v.get())
+ self.root.globalsetvar("name", "345")
+ self.assertEqual(345, v.get())
+
+ def test_invalid_value(self):
+ v = IntVar(self.root, name="name")
+ self.root.globalsetvar("name", "value")
+ with self.assertRaises(ValueError):
+ v.get()
+ self.root.globalsetvar("name", "345.0")
+ with self.assertRaises(ValueError):
+ v.get()
+
+
+class TestDoubleVar(TestBase):
+
+ def test_default(self):
+ v = DoubleVar(self.root)
+ self.assertEqual(0.0, v.get())
+
+ def test_get(self):
+ v = DoubleVar(self.root, 1.23, "name")
+ self.assertAlmostEqual(1.23, v.get())
+ self.root.globalsetvar("name", "3.45")
+ self.assertAlmostEqual(3.45, v.get())
+
+ def test_get_from_int(self):
+ v = DoubleVar(self.root, 1.23, "name")
+ self.assertAlmostEqual(1.23, v.get())
+ self.root.globalsetvar("name", "3.45")
+ self.assertAlmostEqual(3.45, v.get())
+ self.root.globalsetvar("name", "456")
+ self.assertAlmostEqual(456, v.get())
+
+ def test_invalid_value(self):
+ v = DoubleVar(self.root, name="name")
+ self.root.globalsetvar("name", "value")
+ with self.assertRaises(ValueError):
+ v.get()
+
+
+class TestBooleanVar(TestBase):
+
+ def test_default(self):
+ v = BooleanVar(self.root)
+ self.assertEqual(False, v.get())
+
+ def test_get(self):
+ v = BooleanVar(self.root, True, "name")
+ self.assertAlmostEqual(True, v.get())
+ self.root.globalsetvar("name", "0")
+ self.assertAlmostEqual(False, v.get())
+
+ def test_invalid_value_domain(self):
+ v = BooleanVar(self.root, name="name")
+ self.root.globalsetvar("name", "value")
+ with self.assertRaises(ValueError):
+ v.get()
+ self.root.globalsetvar("name", "1.0")
+ with self.assertRaises(ValueError):
+ v.get()
+
+
+tests_gui = (TestVariable, TestStringVar, TestIntVar,
+ TestDoubleVar, TestBooleanVar)
+
+
+if __name__ == "__main__":
+ from test.support import run_unittest
+ run_unittest(*tests_gui)
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index f575e9b..741417a 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -45,6 +45,51 @@ tok_name[NL] = 'NL'
ENCODING = N_TOKENS + 2
tok_name[ENCODING] = 'ENCODING'
N_TOKENS += 3
+EXACT_TOKEN_TYPES = {
+ '(': LPAR,
+ ')': RPAR,
+ '[': LSQB,
+ ']': RSQB,
+ ':': COLON,
+ ',': COMMA,
+ ';': SEMI,
+ '+': PLUS,
+ '-': MINUS,
+ '*': STAR,
+ '/': SLASH,
+ '|': VBAR,
+ '&': AMPER,
+ '<': LESS,
+ '>': GREATER,
+ '=': EQUAL,
+ '.': DOT,
+ '%': PERCENT,
+ '{': LBRACE,
+ '}': RBRACE,
+ '==': EQEQUAL,
+ '!=': NOTEQUAL,
+ '<=': LESSEQUAL,
+ '>=': GREATEREQUAL,
+ '~': TILDE,
+ '^': CIRCUMFLEX,
+ '<<': LEFTSHIFT,
+ '>>': RIGHTSHIFT,
+ '**': DOUBLESTAR,
+ '+=': PLUSEQUAL,
+ '-=': MINEQUAL,
+ '*=': STAREQUAL,
+ '/=': SLASHEQUAL,
+ '%=': PERCENTEQUAL,
+ '&=': AMPEREQUAL,
+ '|=': VBAREQUAL,
+ '^=': CIRCUMFLEXEQUAL,
+ '<<=': LEFTSHIFTEQUAL,
+ '>>=': RIGHTSHIFTEQUAL,
+ '**=': DOUBLESTAREQUAL,
+ '//': DOUBLESLASH,
+ '//=': DOUBLESLASHEQUAL,
+ '@': AT
+}
class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')):
def __repr__(self):
@@ -52,6 +97,13 @@ class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line'
return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' %
self._replace(type=annotated_type))
+ @property
+ def exact_type(self):
+ if self.type == OP and self.string in EXACT_TOKEN_TYPES:
+ return EXACT_TOKEN_TYPES[self.string]
+ else:
+ return self.type
+
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
@@ -75,6 +127,8 @@ Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
+StringPrefix = r'(?:[uU][rR]?|[bB][rR]|[rR][bB]|[rR]|[uU])?'
+
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
@@ -83,10 +137,10 @@ Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
-Triple = group("[bB]?[rR]?'''", '[bB]?[rR]?"""')
+Triple = group(StringPrefix + "'''", StringPrefix + '"""')
# Single-line ' or " string.
-String = group(r"[bB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
- r'[bB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
+String = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
+ StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
@@ -104,9 +158,9 @@ PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
-ContStr = group(r"[bB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
+ContStr = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
- r'[bB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
+ StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
@@ -114,37 +168,55 @@ PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
def _compile(expr):
return re.compile(expr, re.UNICODE)
-tokenprog, pseudoprog, single3prog, double3prog = map(
- _compile, (Token, PseudoToken, Single3, Double3))
-endprogs = {"'": _compile(Single), '"': _compile(Double),
- "'''": single3prog, '"""': double3prog,
- "r'''": single3prog, 'r"""': double3prog,
- "b'''": single3prog, 'b"""': double3prog,
- "br'''": single3prog, 'br"""': double3prog,
- "R'''": single3prog, 'R"""': double3prog,
- "B'''": single3prog, 'B"""': double3prog,
- "bR'''": single3prog, 'bR"""': double3prog,
- "Br'''": single3prog, 'Br"""': double3prog,
- "BR'''": single3prog, 'BR"""': double3prog,
- 'r': None, 'R': None, 'b': None, 'B': None}
+endpats = {"'": Single, '"': Double,
+ "'''": Single3, '"""': Double3,
+ "r'''": Single3, 'r"""': Double3,
+ "b'''": Single3, 'b"""': Double3,
+ "R'''": Single3, 'R"""': Double3,
+ "B'''": Single3, 'B"""': Double3,
+ "br'''": Single3, 'br"""': Double3,
+ "bR'''": Single3, 'bR"""': Double3,
+ "Br'''": Single3, 'Br"""': Double3,
+ "BR'''": Single3, 'BR"""': Double3,
+ "rb'''": Single3, 'rb"""': Double3,
+ "Rb'''": Single3, 'Rb"""': Double3,
+ "rB'''": Single3, 'rB"""': Double3,
+ "RB'''": Single3, 'RB"""': Double3,
+ "u'''": Single3, 'u"""': Double3,
+ "ur'''": Single3, 'ur"""': Double3,
+ "R'''": Single3, 'R"""': Double3,
+ "U'''": Single3, 'U"""': Double3,
+ "uR'''": Single3, 'uR"""': Double3,
+ "Ur'''": Single3, 'Ur"""': Double3,
+ "UR'''": Single3, 'UR"""': Double3,
+ 'r': None, 'R': None, 'b': None, 'B': None,
+ 'u': None, 'U': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"b'''", 'b"""', "B'''", 'B"""',
"br'''", 'br"""', "Br'''", 'Br"""',
- "bR'''", 'bR"""', "BR'''", 'BR"""'):
+ "bR'''", 'bR"""', "BR'''", 'BR"""',
+ "rb'''", 'rb"""', "rB'''", 'rB"""',
+ "Rb'''", 'Rb"""', "RB'''", 'RB"""',
+ "u'''", 'u"""', "U'''", 'U"""',
+ "ur'''", 'ur"""', "Ur'''", 'Ur"""',
+ "uR'''", 'uR"""', "UR'''", 'UR"""'):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"b'", 'b"', "B'", 'B"',
"br'", 'br"', "Br'", 'Br"',
- "bR'", 'bR"', "BR'", 'BR"' ):
+ "bR'", 'bR"', "BR'", 'BR"' ,
+ "rb'", 'rb"', "rB'", 'rB"',
+ "Rb'", 'Rb"', "RB'", 'RB"' ,
+ "u'", 'u"', "U'", 'U"',
+ "ur'", 'ur"', "Ur'", 'Ur"',
+ "uR'", 'uR"', "UR'", 'UR"' ):
single_quoted[t] = t
-del _compile
-
tabsize = 8
class TokenError(Exception): pass
@@ -466,7 +538,7 @@ def _tokenize(readline, encoding):
continued = 0
while pos < max:
- pseudomatch = pseudoprog.match(line, pos)
+ pseudomatch = _compile(PseudoToken).match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
@@ -482,7 +554,7 @@ def _tokenize(readline, encoding):
assert not token.endswith("\n")
yield TokenInfo(COMMENT, token, spos, epos, line)
elif token in triple_quoted:
- endprog = endprogs[token]
+ endprog = _compile(endpats[token])
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
@@ -498,8 +570,9 @@ def _tokenize(readline, encoding):
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
- endprog = (endprogs[initial] or endprogs[token[1]] or
- endprogs[token[2]])
+ endprog = _compile(endpats[initial] or
+ endpats[token[1]] or
+ endpats[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
@@ -530,27 +603,65 @@ def _tokenize(readline, encoding):
def generate_tokens(readline):
return _tokenize(readline, None)
+def main():
+ import argparse
+
+ # Helper error handling routines
+ def perror(message):
+ print(message, file=sys.stderr)
+
+ def error(message, filename=None, location=None):
+ if location:
+ args = (filename,) + location + (message,)
+ perror("%s:%d:%d: error: %s" % args)
+ elif filename:
+ perror("%s: error: %s" % (filename, message))
+ else:
+ perror("error: %s" % message)
+ sys.exit(1)
+
+ # Parse the arguments and options
+ parser = argparse.ArgumentParser(prog='python -m tokenize')
+ parser.add_argument(dest='filename', nargs='?',
+ metavar='filename.py',
+ help='the file to tokenize; defaults to stdin')
+ parser.add_argument('-e', '--exact', dest='exact', action='store_true',
+ help='display token names using the exact type')
+ args = parser.parse_args()
+
+ try:
+ # Tokenize the input
+ if args.filename:
+ filename = args.filename
+ with builtins.open(filename, 'rb') as f:
+ tokens = list(tokenize(f.readline))
+ else:
+ filename = "<stdin>"
+ tokens = _tokenize(sys.stdin.readline, None)
+
+ # Output the tokenization
+ for token in tokens:
+ token_type = token.type
+ if args.exact:
+ token_type = token.exact_type
+ token_range = "%d,%d-%d,%d:" % (token.start + token.end)
+ print("%-20s%-15s%-15r" %
+ (token_range, tok_name[token_type], token.string))
+ except IndentationError as err:
+ line, column = err.args[1][1:3]
+ error(err.args[0], filename, (line, column))
+ except TokenError as err:
+ line, column = err.args[1]
+ error(err.args[0], filename, (line, column))
+ except SyntaxError as err:
+ error(err, filename)
+ except IOError as err:
+ error(err)
+ except KeyboardInterrupt:
+ print("interrupted\n")
+ except Exception as err:
+ perror("unexpected error: %s" % err)
+ raise
+
if __name__ == "__main__":
- # Quick sanity check
- s = b'''def parseline(self, line):
- """Parse the line into a command name and a string containing
- the arguments. Returns a tuple containing (command, args, line).
- 'command' and 'args' may be None if the line couldn't be parsed.
- """
- line = line.strip()
- if not line:
- return None, None, line
- elif line[0] == '?':
- line = 'help ' + line[1:]
- elif line[0] == '!':
- if hasattr(self, 'do_shell'):
- line = 'shell ' + line[1:]
- else:
- return None, None, line
- i, n = 0, len(line)
- while i < n and line[i] in self.identchars: i = i+1
- cmd, arg = line[:i], line[i:].strip()
- return cmd, arg, line
- '''
- for tok in tokenize(iter(s.splitlines()).__next__):
- print(tok)
+ main()
diff --git a/Lib/traceback.py b/Lib/traceback.py
index 8d4e96e..35858af 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -120,14 +120,14 @@ def _iter_chain(exc, custom_tb=None, seen=None):
seen.add(exc)
its = []
cause = exc.__cause__
- if cause is not None and cause not in seen:
- its.append(_iter_chain(cause, None, seen))
- its.append([(_cause_message, None)])
- else:
+ if cause is Ellipsis:
context = exc.__context__
if context is not None and context not in seen:
its.append(_iter_chain(context, None, seen))
its.append([(_context_message, None)])
+ elif cause is not None and cause not in seen:
+ its.append(_iter_chain(cause, False, seen))
+ its.append([(_cause_message, None)])
its.append([(exc, custom_tb or exc.__traceback__)])
# itertools.chain is in an extension module and may be unavailable
for it in its:
diff --git a/Lib/turtle.py b/Lib/turtle.py
index ac0c32c..a447433 100644
--- a/Lib/turtle.py
+++ b/Lib/turtle.py
@@ -108,7 +108,6 @@ import tkinter as TK
import types
import math
import time
-import os
import inspect
from os.path import isfile, split, join
diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py
index 3133907..5bed868 100644
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -9,8 +9,7 @@ import warnings
import collections
from . import result
-from .util import (strclass, safe_repr, sorted_list_difference,
- unorderable_list_difference, _count_diff_all_purpose,
+from .util import (strclass, safe_repr, _count_diff_all_purpose,
_count_diff_hashable)
__unittest = True
@@ -104,9 +103,9 @@ def expectedFailure(func):
class _AssertRaisesBaseContext(object):
def __init__(self, expected, test_case, callable_obj=None,
- expected_regex=None):
+ expected_regex=None):
self.expected = expected
- self.failureException = test_case.failureException
+ self.test_case = test_case
if callable_obj is not None:
try:
self.obj_name = callable_obj.__name__
@@ -117,6 +116,24 @@ class _AssertRaisesBaseContext(object):
if isinstance(expected_regex, (bytes, str)):
expected_regex = re.compile(expected_regex)
self.expected_regex = expected_regex
+ self.msg = None
+
+ def _raiseFailure(self, standardMsg):
+ msg = self.test_case._formatMessage(self.msg, standardMsg)
+ raise self.test_case.failureException(msg)
+
+ def handle(self, name, callable_obj, args, kwargs):
+ """
+ If callable_obj is None, assertRaises/Warns is being used as a
+ context manager, so check for a 'msg' kwarg and return self.
+ If callable_obj is not None, call it passing args and kwargs.
+ """
+ if callable_obj is None:
+ self.msg = kwargs.pop('msg', None)
+ return self
+ with self:
+ callable_obj(*args, **kwargs)
+
class _AssertRaisesContext(_AssertRaisesBaseContext):
@@ -132,11 +149,10 @@ class _AssertRaisesContext(_AssertRaisesBaseContext):
except AttributeError:
exc_name = str(self.expected)
if self.obj_name:
- raise self.failureException("{0} not raised by {1}"
- .format(exc_name, self.obj_name))
+ self._raiseFailure("{} not raised by {}".format(exc_name,
+ self.obj_name))
else:
- raise self.failureException("{0} not raised"
- .format(exc_name))
+ self._raiseFailure("{} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
@@ -147,8 +163,8 @@ class _AssertRaisesContext(_AssertRaisesBaseContext):
expected_regex = self.expected_regex
if not expected_regex.search(str(exc_value)):
- raise self.failureException('"%s" does not match "%s"' %
- (expected_regex.pattern, str(exc_value)))
+ self._raiseFailure('"{}" does not match "{}"'.format(
+ expected_regex.pattern, str(exc_value)))
return True
@@ -192,14 +208,13 @@ class _AssertWarnsContext(_AssertRaisesBaseContext):
return
# Now we simply try to choose a helpful failure message
if first_matching is not None:
- raise self.failureException('"%s" does not match "%s"' %
- (self.expected_regex.pattern, str(first_matching)))
+ self._raiseFailure('"{}" does not match "{}"'.format(
+ self.expected_regex.pattern, str(first_matching)))
if self.obj_name:
- raise self.failureException("{0} not triggered by {1}"
- .format(exc_name, self.obj_name))
+ self._raiseFailure("{} not triggered by {}".format(exc_name,
+ self.obj_name))
else:
- raise self.failureException("{0} not triggered"
- .format(exc_name))
+ self._raiseFailure("{} not triggered".format(exc_name))
class TestCase(object):
@@ -452,7 +467,7 @@ class TestCase(object):
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
-
+ return result
finally:
result.stopTest(self)
if orig_result is None:
@@ -526,7 +541,6 @@ class TestCase(object):
except UnicodeDecodeError:
return '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
-
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
@@ -541,6 +555,9 @@ class TestCase(object):
with self.assertRaises(SomeException):
do_something()
+ An optional keyword argument 'msg' can be provided when assertRaises
+ is used as a context object.
+
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
@@ -551,25 +568,25 @@ class TestCase(object):
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(excClass, self, callableObj)
- if callableObj is None:
- return context
- with context:
- callableObj(*args, **kwargs)
+ return context.handle('assertRaises', callableObj, args, kwargs)
def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs):
"""Fail unless a warning of class warnClass is triggered
- by callableObj when invoked with arguments args and keyword
+ by callable_obj when invoked with arguments args and keyword
arguments kwargs. If a different type of warning is
triggered, it will not be handled: depending on the other
warning filtering rules in effect, it might be silenced, printed
out, or raised as an exception.
- If called with callableObj omitted or None, will return a
+ If called with callable_obj omitted or None, will return a
context object used like this::
with self.assertWarns(SomeWarning):
do_something()
+ An optional keyword argument 'msg' can be provided when assertWarns
+ is used as a context object.
+
The context manager keeps a reference to the first matching
warning as the 'warning' attribute; similarly, the 'filename'
and 'lineno' attributes give you information about the line
@@ -582,10 +599,7 @@ class TestCase(object):
self.assertEqual(the_warning.some_attribute, 147)
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj)
- if callable_obj is None:
- return context
- with context:
- callable_obj(*args, **kwargs)
+ return context.handle('assertWarns', callable_obj, args, kwargs)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
@@ -951,48 +965,6 @@ class TestCase(object):
self.fail(self._formatMessage(msg, standardMsg))
- def assertSameElements(self, expected_seq, actual_seq, msg=None):
- """An unordered sequence specific comparison.
-
- Raises with an error message listing which elements of expected_seq
- are missing from actual_seq and vice versa if any.
-
- Duplicate elements are ignored when comparing *expected_seq* and
- *actual_seq*. It is the equivalent of ``assertEqual(set(expected),
- set(actual))`` but it works with sequences of unhashable objects as
- well.
- """
- warnings.warn('assertSameElements is deprecated',
- DeprecationWarning)
- try:
- expected = set(expected_seq)
- actual = set(actual_seq)
- missing = sorted(expected.difference(actual))
- unexpected = sorted(actual.difference(expected))
- except TypeError:
- # Fall back to slower list-compare if any of the objects are
- # not hashable.
- expected = list(expected_seq)
- actual = list(actual_seq)
- try:
- expected.sort()
- actual.sort()
- except TypeError:
- missing, unexpected = unorderable_list_difference(expected,
- actual)
- else:
- missing, unexpected = sorted_list_difference(expected, actual)
- errors = []
- if missing:
- errors.append('Expected, but missing:\n %s' %
- safe_repr(missing))
- if unexpected:
- errors.append('Unexpected, but present:\n %s' %
- safe_repr(unexpected))
- if errors:
- standardMsg = '\n'.join(errors)
- self.fail(self._formatMessage(msg, standardMsg))
-
def assertCountEqual(self, first, second, msg=None):
"""An unordered sequence comparison asserting that the same elements,
@@ -1037,8 +1009,8 @@ class TestCase(object):
if (len(first) > self._diffThreshold or
len(second) > self._diffThreshold):
self._baseAssertEqual(first, second, msg)
- firstlines = first.splitlines(True)
- secondlines = second.splitlines(True)
+ firstlines = first.splitlines(keepends=True)
+ secondlines = second.splitlines(keepends=True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
@@ -1106,15 +1078,15 @@ class TestCase(object):
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
+ msg: Optional message used in case of failure. Can only be used
+ when assertRaisesRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertRaisesContext(expected_exception, self, callable_obj,
expected_regex)
- if callable_obj is None:
- return context
- with context:
- callable_obj(*args, **kwargs)
+
+ return context.handle('assertRaisesRegex', callable_obj, args, kwargs)
def assertWarnsRegex(self, expected_warning, expected_regex,
callable_obj=None, *args, **kwargs):
@@ -1128,15 +1100,14 @@ class TestCase(object):
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
+ msg: Optional message used in case of failure. Can only be used
+ when assertWarnsRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj,
expected_regex)
- if callable_obj is None:
- return context
- with context:
- callable_obj(*args, **kwargs)
+ return context.handle('assertWarnsRegex', callable_obj, args, kwargs)
def assertRegex(self, text, expected_regex, msg=None):
"""Fail the test unless the text matches the regular expression."""
diff --git a/Lib/unittest/main.py b/Lib/unittest/main.py
index 55d4e4b..a25a2f8 100644
--- a/Lib/unittest/main.py
+++ b/Lib/unittest/main.py
@@ -1,8 +1,8 @@
"""Unittest main program"""
import sys
+import optparse
import os
-import types
from . import loader, runner
from .signals import installHandler
@@ -77,6 +77,7 @@ def _convert_name(name):
def _convert_names(names):
return [_convert_name(name) for name in names]
+
class TestProgram(object):
"""A command-line program that runs a set of tests; this is primarily
for making test modules conveniently executable.
@@ -143,33 +144,9 @@ class TestProgram(object):
self._do_discovery(argv[2:])
return
- import getopt
- long_opts = ['help', 'verbose', 'quiet', 'failfast', 'catch', 'buffer']
- try:
- options, args = getopt.getopt(argv[1:], 'hHvqfcb', long_opts)
- except getopt.error as msg:
- self.usageExit(msg)
- return
-
- for opt, value in options:
- if opt in ('-h','-H','--help'):
- self.usageExit()
- if opt in ('-q','--quiet'):
- self.verbosity = 0
- if opt in ('-v','--verbose'):
- self.verbosity = 2
- if opt in ('-f','--failfast'):
- if self.failfast is None:
- self.failfast = True
- # Should this raise an exception if -f is not valid?
- if opt in ('-c','--catch'):
- if self.catchbreak is None:
- self.catchbreak = True
- # Should this raise an exception if -c is not valid?
- if opt in ('-b','--buffer'):
- if self.buffer is None:
- self.buffer = True
- # Should this raise an exception if -b is not valid?
+ parser = self._getOptParser()
+ options, args = parser.parse_args(argv[1:])
+ self._setAttributesFromOptions(options)
if len(args) == 0 and self.module is None:
# this allows "python -m unittest -v" to still work for
@@ -197,14 +174,14 @@ class TestProgram(object):
self.test = self.testLoader.loadTestsFromNames(self.testNames,
self.module)
- def _do_discovery(self, argv, Loader=loader.TestLoader):
- # handle command line args for test discovery
- self.progName = '%s discover' % self.progName
- import optparse
+ def _getOptParser(self):
parser = optparse.OptionParser()
parser.prog = self.progName
parser.add_option('-v', '--verbose', dest='verbose', default=False,
help='Verbose output', action='store_true')
+ parser.add_option('-q', '--quiet', dest='quiet', default=False,
+ help='Quiet output', action='store_true')
+
if self.failfast != False:
parser.add_option('-f', '--failfast', dest='failfast', default=False,
help='Stop on first fail or error',
@@ -217,6 +194,24 @@ class TestProgram(object):
parser.add_option('-b', '--buffer', dest='buffer', default=False,
help='Buffer stdout and stderr during tests',
action='store_true')
+ return parser
+
+ def _setAttributesFromOptions(self, options):
+ # only set options from the parsing here
+ # if they weren't set explicitly in the constructor
+ if self.failfast is None:
+ self.failfast = options.failfast
+ if self.catchbreak is None:
+ self.catchbreak = options.catchbreak
+ if self.buffer is None:
+ self.buffer = options.buffer
+
+ if options.verbose:
+ self.verbosity = 2
+ elif options.quiet:
+ self.verbosity = 0
+
+ def _addDiscoveryOptions(self, parser):
parser.add_option('-s', '--start-directory', dest='start', default='.',
help="Directory to start discovery ('.' default)")
parser.add_option('-p', '--pattern', dest='pattern', default='test*.py',
@@ -224,6 +219,12 @@ class TestProgram(object):
parser.add_option('-t', '--top-level-directory', dest='top', default=None,
help='Top level directory of project (defaults to start directory)')
+ def _do_discovery(self, argv, Loader=loader.TestLoader):
+ # handle command line args for test discovery
+ self.progName = '%s discover' % self.progName
+ parser = self._getOptParser()
+ self._addDiscoveryOptions(parser)
+
options, args = parser.parse_args(argv)
if len(args) > 3:
self.usageExit()
@@ -231,17 +232,7 @@ class TestProgram(object):
for name, value in zip(('start', 'pattern', 'top'), args):
setattr(options, name, value)
- # only set options from the parsing here
- # if they weren't set explicitly in the constructor
- if self.failfast is None:
- self.failfast = options.failfast
- if self.catchbreak is None:
- self.catchbreak = options.catchbreak
- if self.buffer is None:
- self.buffer = options.buffer
-
- if options.verbose:
- self.verbosity = 2
+ self._setAttributesFromOptions(options)
start_dir = options.start
pattern = options.pattern
diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py
new file mode 100644
index 0000000..e6b103d
--- /dev/null
+++ b/Lib/unittest/mock.py
@@ -0,0 +1,2137 @@
+# mock.py
+# Test tools for mocking and patching.
+# Maintained by Michael Foord
+# Backport for other versions of Python available from
+# http://pypi.python.org/pypi/mock
+
+__all__ = (
+ 'Mock',
+ 'MagicMock',
+ 'patch',
+ 'sentinel',
+ 'DEFAULT',
+ 'ANY',
+ 'call',
+ 'create_autospec',
+ 'FILTER_DIR',
+ 'NonCallableMock',
+ 'NonCallableMagicMock',
+ 'mock_open',
+ 'PropertyMock',
+)
+
+
+__version__ = '1.0'
+
+
+import inspect
+import pprint
+import sys
+from functools import wraps
+
+
+BaseExceptions = (BaseException,)
+if 'java' in sys.platform:
+ # jython
+ import java
+ BaseExceptions = (BaseException, java.lang.Throwable)
+
+
+FILTER_DIR = True
+
+
+def _is_instance_mock(obj):
+ # can't use isinstance on Mock objects because they override __class__
+ # The base class for all mocks is NonCallableMock
+ return issubclass(type(obj), NonCallableMock)
+
+
+def _is_exception(obj):
+ return (
+ isinstance(obj, BaseExceptions) or
+ isinstance(obj, type) and issubclass(obj, BaseExceptions)
+ )
+
+
+class _slotted(object):
+ __slots__ = ['a']
+
+
+DescriptorTypes = (
+ type(_slotted.a),
+ property,
+)
+
+
+def _getsignature(func, skipfirst, instance=False):
+ if isinstance(func, type) and not instance:
+ try:
+ func = func.__init__
+ except AttributeError:
+ return
+ skipfirst = True
+ elif not isinstance(func, FunctionTypes):
+ # for classes where instance is True we end up here too
+ try:
+ func = func.__call__
+ except AttributeError:
+ return
+
+ try:
+ regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
+ except TypeError:
+ # C function / method, possibly inherited object().__init__
+ return
+
+ # instance methods and classmethods need to lose the self argument
+ if getattr(func, '__self__', None) is not None:
+ regargs = regargs[1:]
+ if skipfirst:
+ # this condition and the above one are never both True - why?
+ regargs = regargs[1:]
+
+ signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults,
+ formatvalue=lambda value: "")
+ return signature[1:-1], func
+
+
+def _check_signature(func, mock, skipfirst, instance=False):
+ if not _callable(func):
+ return
+
+ result = _getsignature(func, skipfirst, instance)
+ if result is None:
+ return
+ signature, func = result
+
+ # can't use self because "self" is common as an argument name
+ # unfortunately even not in the first place
+ src = "lambda _mock_self, %s: None" % signature
+ checksig = eval(src, {})
+ _copy_func_details(func, checksig)
+ type(mock)._mock_check_sig = checksig
+
+
+def _copy_func_details(func, funcopy):
+ funcopy.__name__ = func.__name__
+ funcopy.__doc__ = func.__doc__
+ # we explicitly don't copy func.__dict__ into this copy as it would
+ # expose original attributes that should be mocked
+ funcopy.__module__ = func.__module__
+ funcopy.__defaults__ = func.__defaults__
+ funcopy.__kwdefaults__ = func.__kwdefaults__
+
+
+def _callable(obj):
+ if isinstance(obj, type):
+ return True
+ if getattr(obj, '__call__', None) is not None:
+ return True
+ return False
+
+
+def _is_list(obj):
+ # checks for list or tuples
+ # XXXX badly named!
+ return type(obj) in (list, tuple)
+
+
+def _instance_callable(obj):
+ """Given an object, return True if the object is callable.
+ For classes, return True if instances would be callable."""
+ if not isinstance(obj, type):
+ # already an instance
+ return getattr(obj, '__call__', None) is not None
+
+ # *could* be broken by a class overriding __mro__ or __dict__ via
+ # a metaclass
+ for base in (obj,) + obj.__mro__:
+ if base.__dict__.get('__call__') is not None:
+ return True
+ return False
+
+
+def _set_signature(mock, original, instance=False):
+ # creates a function with signature (*args, **kwargs) that delegates to a
+ # mock. It still does signature checking by calling a lambda with the same
+ # signature as the original.
+ if not _callable(original):
+ return
+
+ skipfirst = isinstance(original, type)
+ result = _getsignature(original, skipfirst, instance)
+ if result is None:
+ # was a C function (e.g. object().__init__ ) that can't be mocked
+ return
+
+ signature, func = result
+
+ src = "lambda %s: None" % signature
+ context = {'_mock_': mock}
+ checksig = eval(src, context)
+ _copy_func_details(func, checksig)
+
+ name = original.__name__
+ if not name.isidentifier():
+ name = 'funcopy'
+ context = {'checksig': checksig, 'mock': mock}
+ src = """def %s(*args, **kwargs):
+ checksig(*args, **kwargs)
+ return mock(*args, **kwargs)""" % name
+ exec (src, context)
+ funcopy = context[name]
+ _setup_func(funcopy, mock)
+ return funcopy
+
+
+def _setup_func(funcopy, mock):
+ funcopy.mock = mock
+
+ # can't use isinstance with mocks
+ if not _is_instance_mock(mock):
+ return
+
+ def assert_called_with(*args, **kwargs):
+ return mock.assert_called_with(*args, **kwargs)
+ def assert_called_once_with(*args, **kwargs):
+ return mock.assert_called_once_with(*args, **kwargs)
+ def assert_has_calls(*args, **kwargs):
+ return mock.assert_has_calls(*args, **kwargs)
+ def assert_any_call(*args, **kwargs):
+ return mock.assert_any_call(*args, **kwargs)
+ def reset_mock():
+ funcopy.method_calls = _CallList()
+ funcopy.mock_calls = _CallList()
+ mock.reset_mock()
+ ret = funcopy.return_value
+ if _is_instance_mock(ret) and not ret is mock:
+ ret.reset_mock()
+
+ funcopy.called = False
+ funcopy.call_count = 0
+ funcopy.call_args = None
+ funcopy.call_args_list = _CallList()
+ funcopy.method_calls = _CallList()
+ funcopy.mock_calls = _CallList()
+
+ funcopy.return_value = mock.return_value
+ funcopy.side_effect = mock.side_effect
+ funcopy._mock_children = mock._mock_children
+
+ funcopy.assert_called_with = assert_called_with
+ funcopy.assert_called_once_with = assert_called_once_with
+ funcopy.assert_has_calls = assert_has_calls
+ funcopy.assert_any_call = assert_any_call
+ funcopy.reset_mock = reset_mock
+
+ mock._mock_delegate = funcopy
+
+
+def _is_magic(name):
+ return '__%s__' % name[2:-2] == name
+
+
+class _SentinelObject(object):
+ "A unique, named, sentinel object."
+ def __init__(self, name):
+ self.name = name
+
+ def __repr__(self):
+ return 'sentinel.%s' % self.name
+
+
+class _Sentinel(object):
+ """Access attributes to return a named object, usable as a sentinel."""
+ def __init__(self):
+ self._sentinels = {}
+
+ def __getattr__(self, name):
+ if name == '__bases__':
+ # Without this help(unittest.mock) raises an exception
+ raise AttributeError
+ return self._sentinels.setdefault(name, _SentinelObject(name))
+
+
+sentinel = _Sentinel()
+
+DEFAULT = sentinel.DEFAULT
+_missing = sentinel.MISSING
+_deleted = sentinel.DELETED
+
+
+def _copy(value):
+ if type(value) in (dict, list, tuple, set):
+ return type(value)(value)
+ return value
+
+
+_allowed_names = set(
+ [
+ 'return_value', '_mock_return_value', 'side_effect',
+ '_mock_side_effect', '_mock_parent', '_mock_new_parent',
+ '_mock_name', '_mock_new_name'
+ ]
+)
+
+
+def _delegating_property(name):
+ _allowed_names.add(name)
+ _the_name = '_mock_' + name
+ def _get(self, name=name, _the_name=_the_name):
+ sig = self._mock_delegate
+ if sig is None:
+ return getattr(self, _the_name)
+ return getattr(sig, name)
+ def _set(self, value, name=name, _the_name=_the_name):
+ sig = self._mock_delegate
+ if sig is None:
+ self.__dict__[_the_name] = value
+ else:
+ setattr(sig, name, value)
+
+ return property(_get, _set)
+
+
+
+class _CallList(list):
+
+ def __contains__(self, value):
+ if not isinstance(value, list):
+ return list.__contains__(self, value)
+ len_value = len(value)
+ len_self = len(self)
+ if len_value > len_self:
+ return False
+
+ for i in range(0, len_self - len_value + 1):
+ sub_list = self[i:i+len_value]
+ if sub_list == value:
+ return True
+ return False
+
+ def __repr__(self):
+ return pprint.pformat(list(self))
+
+
+def _check_and_set_parent(parent, value, name, new_name):
+ if not _is_instance_mock(value):
+ return False
+ if ((value._mock_name or value._mock_new_name) or
+ (value._mock_parent is not None) or
+ (value._mock_new_parent is not None)):
+ return False
+
+ _parent = parent
+ while _parent is not None:
+ # setting a mock (value) as a child or return value of itself
+ # should not modify the mock
+ if _parent is value:
+ return False
+ _parent = _parent._mock_new_parent
+
+ if new_name:
+ value._mock_new_parent = parent
+ value._mock_new_name = new_name
+ if name:
+ value._mock_parent = parent
+ value._mock_name = name
+ return True
+
+
+
+class Base(object):
+ _mock_return_value = DEFAULT
+ _mock_side_effect = None
+ def __init__(self, *args, **kwargs):
+ pass
+
+
+
+class NonCallableMock(Base):
+ """A non-callable version of `Mock`"""
+
+ def __new__(cls, *args, **kw):
+ # every instance has its own class
+ # so we can create magic methods on the
+ # class without stomping on other mocks
+ new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
+ instance = object.__new__(new)
+ return instance
+
+
+ def __init__(
+ self, spec=None, wraps=None, name=None, spec_set=None,
+ parent=None, _spec_state=None, _new_name='', _new_parent=None,
+ **kwargs
+ ):
+ if _new_parent is None:
+ _new_parent = parent
+
+ __dict__ = self.__dict__
+ __dict__['_mock_parent'] = parent
+ __dict__['_mock_name'] = name
+ __dict__['_mock_new_name'] = _new_name
+ __dict__['_mock_new_parent'] = _new_parent
+
+ if spec_set is not None:
+ spec = spec_set
+ spec_set = True
+
+ self._mock_add_spec(spec, spec_set)
+
+ __dict__['_mock_children'] = {}
+ __dict__['_mock_wraps'] = wraps
+ __dict__['_mock_delegate'] = None
+
+ __dict__['_mock_called'] = False
+ __dict__['_mock_call_args'] = None
+ __dict__['_mock_call_count'] = 0
+ __dict__['_mock_call_args_list'] = _CallList()
+ __dict__['_mock_mock_calls'] = _CallList()
+
+ __dict__['method_calls'] = _CallList()
+
+ if kwargs:
+ self.configure_mock(**kwargs)
+
+ super(NonCallableMock, self).__init__(
+ spec, wraps, name, spec_set, parent,
+ _spec_state
+ )
+
+
+ def attach_mock(self, mock, attribute):
+ """
+ Attach a mock as an attribute of this one, replacing its name and
+ parent. Calls to the attached mock will be recorded in the
+ `method_calls` and `mock_calls` attributes of this one."""
+ mock._mock_parent = None
+ mock._mock_new_parent = None
+ mock._mock_name = ''
+ mock._mock_new_name = None
+
+ setattr(self, attribute, mock)
+
+
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+
+
+ def _mock_add_spec(self, spec, spec_set):
+ _spec_class = None
+
+ if spec is not None and not _is_list(spec):
+ if isinstance(spec, type):
+ _spec_class = spec
+ else:
+ _spec_class = _get_class(spec)
+
+ spec = dir(spec)
+
+ __dict__ = self.__dict__
+ __dict__['_spec_class'] = _spec_class
+ __dict__['_spec_set'] = spec_set
+ __dict__['_mock_methods'] = spec
+
+
+ def __get_return_value(self):
+ ret = self._mock_return_value
+ if self._mock_delegate is not None:
+ ret = self._mock_delegate.return_value
+
+ if ret is DEFAULT:
+ ret = self._get_child_mock(
+ _new_parent=self, _new_name='()'
+ )
+ self.return_value = ret
+ return ret
+
+
+ def __set_return_value(self, value):
+ if self._mock_delegate is not None:
+ self._mock_delegate.return_value = value
+ else:
+ self._mock_return_value = value
+ _check_and_set_parent(self, value, None, '()')
+
+ __return_value_doc = "The value to be returned when the mock is called."
+ return_value = property(__get_return_value, __set_return_value,
+ __return_value_doc)
+
+
+ @property
+ def __class__(self):
+ if self._spec_class is None:
+ return type(self)
+ return self._spec_class
+
+ called = _delegating_property('called')
+ call_count = _delegating_property('call_count')
+ call_args = _delegating_property('call_args')
+ call_args_list = _delegating_property('call_args_list')
+ mock_calls = _delegating_property('mock_calls')
+
+
+ def __get_side_effect(self):
+ delegated = self._mock_delegate
+ if delegated is None:
+ return self._mock_side_effect
+ return delegated.side_effect
+
+ def __set_side_effect(self, value):
+ value = _try_iter(value)
+ delegated = self._mock_delegate
+ if delegated is None:
+ self._mock_side_effect = value
+ else:
+ delegated.side_effect = value
+
+ side_effect = property(__get_side_effect, __set_side_effect)
+
+
+ def reset_mock(self):
+ "Restore the mock object to its initial state."
+ self.called = False
+ self.call_args = None
+ self.call_count = 0
+ self.mock_calls = _CallList()
+ self.call_args_list = _CallList()
+ self.method_calls = _CallList()
+
+ for child in self._mock_children.values():
+ child.reset_mock()
+
+ ret = self._mock_return_value
+ if _is_instance_mock(ret) and ret is not self:
+ ret.reset_mock()
+
+
+ def configure_mock(self, **kwargs):
+ """Set attributes on the mock through keyword arguments.
+
+ Attributes plus return values and side effects can be set on child
+ mocks using standard dot notation and unpacking a dictionary in the
+ method call:
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock.configure_mock(**attrs)"""
+ for arg, val in sorted(kwargs.items(),
+ # we sort on the number of dots so that
+ # attributes are set before we set attributes on
+ # attributes
+ key=lambda entry: entry[0].count('.')):
+ args = arg.split('.')
+ final = args.pop()
+ obj = self
+ for entry in args:
+ obj = getattr(obj, entry)
+ setattr(obj, final, val)
+
+
+ def __getattr__(self, name):
+ if name == '_mock_methods':
+ raise AttributeError(name)
+ elif self._mock_methods is not None:
+ if name not in self._mock_methods or name in _all_magics:
+ raise AttributeError("Mock object has no attribute %r" % name)
+ elif _is_magic(name):
+ raise AttributeError(name)
+
+ result = self._mock_children.get(name)
+ if result is _deleted:
+ raise AttributeError(name)
+ elif result is None:
+ wraps = None
+ if self._mock_wraps is not None:
+ # XXXX should we get the attribute without triggering code
+ # execution?
+ wraps = getattr(self._mock_wraps, name)
+
+ result = self._get_child_mock(
+ parent=self, name=name, wraps=wraps, _new_name=name,
+ _new_parent=self
+ )
+ self._mock_children[name] = result
+
+ elif isinstance(result, _SpecState):
+ result = create_autospec(
+ result.spec, result.spec_set, result.instance,
+ result.parent, result.name
+ )
+ self._mock_children[name] = result
+
+ return result
+
+
+ def __repr__(self):
+ _name_list = [self._mock_new_name]
+ _parent = self._mock_new_parent
+ last = self
+
+ dot = '.'
+ if _name_list == ['()']:
+ dot = ''
+ seen = set()
+ while _parent is not None:
+ last = _parent
+
+ _name_list.append(_parent._mock_new_name + dot)
+ dot = '.'
+ if _parent._mock_new_name == '()':
+ dot = ''
+
+ _parent = _parent._mock_new_parent
+
+ # use ids here so as not to call __hash__ on the mocks
+ if id(_parent) in seen:
+ break
+ seen.add(id(_parent))
+
+ _name_list = list(reversed(_name_list))
+ _first = last._mock_name or 'mock'
+ if len(_name_list) > 1:
+ if _name_list[1] not in ('()', '().'):
+ _first += '.'
+ _name_list[0] = _first
+ name = ''.join(_name_list)
+
+ name_string = ''
+ if name not in ('mock', 'mock.'):
+ name_string = ' name=%r' % name
+
+ spec_string = ''
+ if self._spec_class is not None:
+ spec_string = ' spec=%r'
+ if self._spec_set:
+ spec_string = ' spec_set=%r'
+ spec_string = spec_string % self._spec_class.__name__
+ return "<%s%s%s id='%s'>" % (
+ type(self).__name__,
+ name_string,
+ spec_string,
+ id(self)
+ )
+
+
+ def __dir__(self):
+ """Filter the output of `dir(mock)` to only useful members."""
+ extras = self._mock_methods or []
+ from_type = dir(type(self))
+ from_dict = list(self.__dict__)
+
+ if FILTER_DIR:
+ from_type = [e for e in from_type if not e.startswith('_')]
+ from_dict = [e for e in from_dict if not e.startswith('_') or
+ _is_magic(e)]
+ return sorted(set(extras + from_type + from_dict +
+ list(self._mock_children)))
+
+
+ def __setattr__(self, name, value):
+ if name in _allowed_names:
+ # property setters go through here
+ return object.__setattr__(self, name, value)
+ elif (self._spec_set and self._mock_methods is not None and
+ name not in self._mock_methods and
+ name not in self.__dict__):
+ raise AttributeError("Mock object has no attribute '%s'" % name)
+ elif name in _unsupported_magics:
+ msg = 'Attempting to set unsupported magic method %r.' % name
+ raise AttributeError(msg)
+ elif name in _all_magics:
+ if self._mock_methods is not None and name not in self._mock_methods:
+ raise AttributeError("Mock object has no attribute '%s'" % name)
+
+ if not _is_instance_mock(value):
+ setattr(type(self), name, _get_method(name, value))
+ original = value
+ value = lambda *args, **kw: original(self, *args, **kw)
+ else:
+ # only set _new_name and not name so that mock_calls is tracked
+ # but not method calls
+ _check_and_set_parent(self, value, None, name)
+ setattr(type(self), name, value)
+ elif name == '__class__':
+ self._spec_class = value
+ return
+ else:
+ if _check_and_set_parent(self, value, name, name):
+ self._mock_children[name] = value
+ return object.__setattr__(self, name, value)
+
+
+ def __delattr__(self, name):
+ if name in _all_magics and name in type(self).__dict__:
+ delattr(type(self), name)
+ if name not in self.__dict__:
+ # for magic methods that are still MagicProxy objects and
+ # not set on the instance itself
+ return
+
+ if name in self.__dict__:
+ object.__delattr__(self, name)
+
+ obj = self._mock_children.get(name, _missing)
+ if obj is _deleted:
+ raise AttributeError(name)
+ if obj is not _missing:
+ del self._mock_children[name]
+ self._mock_children[name] = _deleted
+
+
+
+ def _format_mock_call_signature(self, args, kwargs):
+ name = self._mock_name or 'mock'
+ return _format_call_signature(name, args, kwargs)
+
+
+ def _format_mock_failure_message(self, args, kwargs):
+ message = 'Expected call: %s\nActual call: %s'
+ expected_string = self._format_mock_call_signature(args, kwargs)
+ call_args = self.call_args
+ if len(call_args) == 3:
+ call_args = call_args[1:]
+ actual_string = self._format_mock_call_signature(*call_args)
+ return message % (expected_string, actual_string)
+
+
+ def assert_called_with(_mock_self, *args, **kwargs):
+ """assert that the mock was called with the specified arguments.
+
+ Raises an AssertionError if the args and keyword args passed in are
+ different to the last call to the mock."""
+ self = _mock_self
+ if self.call_args is None:
+ expected = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError('Expected call: %s\nNot called' % (expected,))
+
+ if self.call_args != (args, kwargs):
+ msg = self._format_mock_failure_message(args, kwargs)
+ raise AssertionError(msg)
+
+
+ def assert_called_once_with(_mock_self, *args, **kwargs):
+ """assert that the mock was called exactly once and with the specified
+ arguments."""
+ self = _mock_self
+ if not self.call_count == 1:
+ msg = ("Expected to be called once. Called %s times." %
+ self.call_count)
+ raise AssertionError(msg)
+ return self.assert_called_with(*args, **kwargs)
+
+
+ def assert_has_calls(self, calls, any_order=False):
+ """assert the mock has been called with the specified calls.
+ The `mock_calls` list is checked for the calls.
+
+ If `any_order` is False (the default) then the calls must be
+ sequential. There can be extra calls before or after the
+ specified calls.
+
+ If `any_order` is True then the calls can be in any order, but
+ they must all appear in `mock_calls`."""
+ if not any_order:
+ if calls not in self.mock_calls:
+ raise AssertionError(
+ 'Calls not found.\nExpected: %r\n'
+ 'Actual: %r' % (calls, self.mock_calls)
+ )
+ return
+
+ all_calls = list(self.mock_calls)
+
+ not_found = []
+ for kall in calls:
+ try:
+ all_calls.remove(kall)
+ except ValueError:
+ not_found.append(kall)
+ if not_found:
+ raise AssertionError(
+ '%r not all found in call list' % (tuple(not_found),)
+ )
+
+
+ def assert_any_call(self, *args, **kwargs):
+ """assert the mock has been called with the specified arguments.
+
+ The assert passes if the mock has *ever* been called, unlike
+ `assert_called_with` and `assert_called_once_with` that only pass if
+ the call is the most recent one."""
+ kall = call(*args, **kwargs)
+ if kall not in self.call_args_list:
+ expected_string = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError(
+ '%s call not found' % expected_string
+ )
+
+
+ def _get_child_mock(self, **kw):
+ """Create the child mocks for attributes and return value.
+ By default child mocks will be the same type as the parent.
+ Subclasses of Mock may want to override this to customize the way
+ child mocks are made.
+
+ For non-callable mocks the callable variant will be used (rather than
+ any custom subclass)."""
+ _type = type(self)
+ if not issubclass(_type, CallableMixin):
+ if issubclass(_type, NonCallableMagicMock):
+ klass = MagicMock
+ elif issubclass(_type, NonCallableMock) :
+ klass = Mock
+ else:
+ klass = _type.__mro__[1]
+ return klass(**kw)
+
+
+
+def _try_iter(obj):
+ if obj is None:
+ return obj
+ if _is_exception(obj):
+ return obj
+ if _callable(obj):
+ return obj
+ try:
+ return iter(obj)
+ except TypeError:
+ # XXXX backwards compatibility
+ # but this will blow up on first call - so maybe we should fail early?
+ return obj
+
+
+
+class CallableMixin(Base):
+
+ def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
+ wraps=None, name=None, spec_set=None, parent=None,
+ _spec_state=None, _new_name='', _new_parent=None, **kwargs):
+ self.__dict__['_mock_return_value'] = return_value
+
+ super(CallableMixin, self).__init__(
+ spec, wraps, name, spec_set, parent,
+ _spec_state, _new_name, _new_parent, **kwargs
+ )
+
+ self.side_effect = side_effect
+
+
+ def _mock_check_sig(self, *args, **kwargs):
+ # stub method that can be replaced with one with a specific signature
+ pass
+
+
+ def __call__(_mock_self, *args, **kwargs):
+ # can't use self in-case a function / method we are mocking uses self
+ # in the signature
+ _mock_self._mock_check_sig(*args, **kwargs)
+ return _mock_self._mock_call(*args, **kwargs)
+
+
+ def _mock_call(_mock_self, *args, **kwargs):
+ self = _mock_self
+ self.called = True
+ self.call_count += 1
+ self.call_args = _Call((args, kwargs), two=True)
+ self.call_args_list.append(_Call((args, kwargs), two=True))
+
+ _new_name = self._mock_new_name
+ _new_parent = self._mock_new_parent
+ self.mock_calls.append(_Call(('', args, kwargs)))
+
+ seen = set()
+ skip_next_dot = _new_name == '()'
+ do_method_calls = self._mock_parent is not None
+ name = self._mock_name
+ while _new_parent is not None:
+ this_mock_call = _Call((_new_name, args, kwargs))
+ if _new_parent._mock_new_name:
+ dot = '.'
+ if skip_next_dot:
+ dot = ''
+
+ skip_next_dot = False
+ if _new_parent._mock_new_name == '()':
+ skip_next_dot = True
+
+ _new_name = _new_parent._mock_new_name + dot + _new_name
+
+ if do_method_calls:
+ if _new_name == name:
+ this_method_call = this_mock_call
+ else:
+ this_method_call = _Call((name, args, kwargs))
+ _new_parent.method_calls.append(this_method_call)
+
+ do_method_calls = _new_parent._mock_parent is not None
+ if do_method_calls:
+ name = _new_parent._mock_name + '.' + name
+
+ _new_parent.mock_calls.append(this_mock_call)
+ _new_parent = _new_parent._mock_new_parent
+
+ # use ids here so as not to call __hash__ on the mocks
+ _new_parent_id = id(_new_parent)
+ if _new_parent_id in seen:
+ break
+ seen.add(_new_parent_id)
+
+ ret_val = DEFAULT
+ effect = self.side_effect
+ if effect is not None:
+ if _is_exception(effect):
+ raise effect
+
+ if not _callable(effect):
+ return next(effect)
+
+ ret_val = effect(*args, **kwargs)
+ if ret_val is DEFAULT:
+ ret_val = self.return_value
+
+ if (self._mock_wraps is not None and
+ self._mock_return_value is DEFAULT):
+ return self._mock_wraps(*args, **kwargs)
+ if ret_val is DEFAULT:
+ ret_val = self.return_value
+ return ret_val
+
+
+
+class Mock(CallableMixin, NonCallableMock):
+ """
+ Create a new `Mock` object. `Mock` takes several optional arguments
+ that specify the behaviour of the Mock object:
+
+ * `spec`: This can be either a list of strings or an existing object (a
+ class or instance) that acts as the specification for the mock object. If
+ you pass in an object then a list of strings is formed by calling dir on
+ the object (excluding unsupported magic attributes and methods). Accessing
+ any attribute not in this list will raise an `AttributeError`.
+
+ If `spec` is an object (rather than a list of strings) then
+ `mock.__class__` returns the class of the spec object. This allows mocks
+ to pass `isinstance` tests.
+
+ * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+ or get an attribute on the mock that isn't on the object passed as
+ `spec_set` will raise an `AttributeError`.
+
+ * `side_effect`: A function to be called whenever the Mock is called. See
+ the `side_effect` attribute. Useful for raising exceptions or
+ dynamically changing return values. The function is called with the same
+ arguments as the mock, and unless it returns `DEFAULT`, the return
+ value of this function is used as the return value.
+
+ Alternatively `side_effect` can be an exception class or instance. In
+ this case the exception will be raised when the mock is called.
+
+ If `side_effect` is an iterable then each call to the mock will return
+ the next value from the iterable.
+
+ * `return_value`: The value returned when the mock is called. By default
+ this is a new Mock (created on first access). See the
+ `return_value` attribute.
+
+ * `wraps`: Item for the mock object to wrap. If `wraps` is not None
+ then calling the Mock will pass the call through to the wrapped object
+ (returning the real result and ignoring `return_value`). Attribute
+ access on the mock will return a Mock object that wraps the corresponding
+ attribute of the wrapped object (so attempting to access an attribute that
+ doesn't exist will raise an `AttributeError`).
+
+ If the mock has an explicit `return_value` set then calls are not passed
+ to the wrapped object and the `return_value` is returned instead.
+
+ * `name`: If the mock has a name then it will be used in the repr of the
+ mock. This can be useful for debugging. The name is propagated to child
+ mocks.
+
+ Mocks can also be called with arbitrary keyword arguments. These will be
+ used to set attributes on the mock after it is created.
+ """
+
+
+
+def _dot_lookup(thing, comp, import_path):
+ try:
+ return getattr(thing, comp)
+ except AttributeError:
+ __import__(import_path)
+ return getattr(thing, comp)
+
+
+def _importer(target):
+ components = target.split('.')
+ import_path = components.pop(0)
+ thing = __import__(import_path)
+
+ for comp in components:
+ import_path += ".%s" % comp
+ thing = _dot_lookup(thing, comp, import_path)
+ return thing
+
+
+def _is_started(patcher):
+ # XXXX horrible
+ return hasattr(patcher, 'is_local')
+
+
+class _patch(object):
+
+ attribute_name = None
+
+ def __init__(
+ self, getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ ):
+ if new_callable is not None:
+ if new is not DEFAULT:
+ raise ValueError(
+ "Cannot use 'new' and 'new_callable' together"
+ )
+ if autospec is not False:
+ raise ValueError(
+ "Cannot use 'autospec' and 'new_callable' together"
+ )
+
+ self.getter = getter
+ self.attribute = attribute
+ self.new = new
+ self.new_callable = new_callable
+ self.spec = spec
+ self.create = create
+ self.has_local = False
+ self.spec_set = spec_set
+ self.autospec = autospec
+ self.kwargs = kwargs
+ self.additional_patchers = []
+
+
+ def copy(self):
+ patcher = _patch(
+ self.getter, self.attribute, self.new, self.spec,
+ self.create, self.spec_set,
+ self.autospec, self.new_callable, self.kwargs
+ )
+ patcher.attribute_name = self.attribute_name
+ patcher.additional_patchers = [
+ p.copy() for p in self.additional_patchers
+ ]
+ return patcher
+
+
+ def __call__(self, func):
+ if isinstance(func, type):
+ return self.decorate_class(func)
+ return self.decorate_callable(func)
+
+
+ def decorate_class(self, klass):
+ for attr in dir(klass):
+ if not attr.startswith(patch.TEST_PREFIX):
+ continue
+
+ attr_value = getattr(klass, attr)
+ if not hasattr(attr_value, "__call__"):
+ continue
+
+ patcher = self.copy()
+ setattr(klass, attr, patcher(attr_value))
+ return klass
+
+
+ def decorate_callable(self, func):
+ if hasattr(func, 'patchings'):
+ func.patchings.append(self)
+ return func
+
+ @wraps(func)
+ def patched(*args, **keywargs):
+ extra_args = []
+ entered_patchers = []
+
+ try:
+ for patching in patched.patchings:
+ arg = patching.__enter__()
+ entered_patchers.append(patching)
+ if patching.attribute_name is not None:
+ keywargs.update(arg)
+ elif patching.new is DEFAULT:
+ extra_args.append(arg)
+
+ args += tuple(extra_args)
+ return func(*args, **keywargs)
+ except:
+ if (patching not in entered_patchers and
+ _is_started(patching)):
+ # the patcher may have been started, but an exception
+ # raised whilst entering one of its additional_patchers
+ entered_patchers.append(patching)
+ # re-raise the exception
+ raise
+ finally:
+ for patching in reversed(entered_patchers):
+ patching.__exit__()
+
+ patched.patchings = [self]
+ if hasattr(func, 'func_code'):
+ # not in Python 3
+ patched.compat_co_firstlineno = getattr(
+ func, "compat_co_firstlineno",
+ func.func_code.co_firstlineno
+ )
+ return patched
+
+
+ def get_original(self):
+ target = self.getter()
+ name = self.attribute
+
+ original = DEFAULT
+ local = False
+
+ try:
+ original = target.__dict__[name]
+ except (AttributeError, KeyError):
+ original = getattr(target, name, DEFAULT)
+ else:
+ local = True
+
+ if not self.create and original is DEFAULT:
+ raise AttributeError(
+ "%s does not have the attribute %r" % (target, name)
+ )
+ return original, local
+
+
+ def __enter__(self):
+ """Perform the patch."""
+ new, spec, spec_set = self.new, self.spec, self.spec_set
+ autospec, kwargs = self.autospec, self.kwargs
+ new_callable = self.new_callable
+ self.target = self.getter()
+
+ original, local = self.get_original()
+
+ if new is DEFAULT and autospec is False:
+ inherit = False
+ if spec_set == True:
+ spec_set = original
+ elif spec == True:
+ # set spec to the object we are replacing
+ spec = original
+
+ if (spec or spec_set) is not None:
+ if isinstance(original, type):
+ # If we're patching out a class and there is a spec
+ inherit = True
+
+ Klass = MagicMock
+ _kwargs = {}
+ if new_callable is not None:
+ Klass = new_callable
+ elif (spec or spec_set) is not None:
+ if not _callable(spec or spec_set):
+ Klass = NonCallableMagicMock
+
+ if spec is not None:
+ _kwargs['spec'] = spec
+ if spec_set is not None:
+ _kwargs['spec_set'] = spec_set
+
+ # add a name to mocks
+ if (isinstance(Klass, type) and
+ issubclass(Klass, NonCallableMock) and self.attribute):
+ _kwargs['name'] = self.attribute
+
+ _kwargs.update(kwargs)
+ new = Klass(**_kwargs)
+
+ if inherit and _is_instance_mock(new):
+ # we can only tell if the instance should be callable if the
+ # spec is not a list
+ if (not _is_list(spec or spec_set) and not
+ _instance_callable(spec or spec_set)):
+ Klass = NonCallableMagicMock
+
+ _kwargs.pop('name')
+ new.return_value = Klass(_new_parent=new, _new_name='()',
+ **_kwargs)
+ elif autospec is not False:
+ # spec is ignored, new *must* be default, spec_set is treated
+ # as a boolean. Should we check spec is not None and that spec_set
+ # is a bool?
+ if new is not DEFAULT:
+ raise TypeError(
+ "autospec creates the mock for you. Can't specify "
+ "autospec and new."
+ )
+ spec_set = bool(spec_set)
+ if autospec is True:
+ autospec = original
+
+ new = create_autospec(autospec, spec_set=spec_set,
+ _name=self.attribute, **kwargs)
+ elif kwargs:
+ # can't set keyword args when we aren't creating the mock
+ # XXXX If new is a Mock we could call new.configure_mock(**kwargs)
+ raise TypeError("Can't pass kwargs to a mock we aren't creating")
+
+ new_attr = new
+
+ self.temp_original = original
+ self.is_local = local
+ setattr(self.target, self.attribute, new_attr)
+ if self.attribute_name is not None:
+ extra_args = {}
+ if self.new is DEFAULT:
+ extra_args[self.attribute_name] = new
+ for patching in self.additional_patchers:
+ arg = patching.__enter__()
+ if patching.new is DEFAULT:
+ extra_args.update(arg)
+ return extra_args
+
+ return new
+
+
+ def __exit__(self, *_):
+ """Undo the patch."""
+ if not _is_started(self):
+ raise RuntimeError('stop called on unstarted patcher')
+
+ if self.is_local and self.temp_original is not DEFAULT:
+ setattr(self.target, self.attribute, self.temp_original)
+ else:
+ delattr(self.target, self.attribute)
+ if not self.create and not hasattr(self.target, self.attribute):
+ # needed for proxy objects like django settings
+ setattr(self.target, self.attribute, self.temp_original)
+
+ del self.temp_original
+ del self.is_local
+ del self.target
+ for patcher in reversed(self.additional_patchers):
+ if _is_started(patcher):
+ patcher.__exit__()
+
+ start = __enter__
+ stop = __exit__
+
+
+
+def _get_target(target):
+ try:
+ target, attribute = target.rsplit('.', 1)
+ except (TypeError, ValueError):
+ raise TypeError("Need a valid target to patch. You supplied: %r" %
+ (target,))
+ getter = lambda: _importer(target)
+ return getter, attribute
+
+
+def _patch_object(
+ target, attribute, new=DEFAULT, spec=None,
+ create=False, spec_set=None, autospec=False,
+ new_callable=None, **kwargs
+ ):
+ """
+ patch.object(target, attribute, new=DEFAULT, spec=None, create=False,
+ spec_set=None, autospec=False,
+ new_callable=None, **kwargs)
+
+ patch the named member (`attribute`) on an object (`target`) with a mock
+ object.
+
+ `patch.object` can be used as a decorator, class decorator or a context
+ manager. Arguments `new`, `spec`, `create`, `spec_set`,
+ `autospec` and `new_callable` have the same meaning as for `patch`. Like
+ `patch`, `patch.object` takes arbitrary keyword arguments for configuring
+ the mock object it creates.
+
+ When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+ """
+ getter = lambda: target
+ return _patch(
+ getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ )
+
+
+def _patch_multiple(target, spec=None, create=False,
+ spec_set=None, autospec=False,
+ new_callable=None, **kwargs
+ ):
+ """Perform multiple patches in a single call. It takes the object to be
+ patched (either as an object or a string to fetch the object by importing)
+ and keyword arguments for the patches::
+
+ with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+ ...
+
+ Use `DEFAULT` as the value if you want `patch.multiple` to create
+ mocks for you. In this case the created mocks are passed into a decorated
+ function by keyword, and a dictionary is returned when `patch.multiple` is
+ used as a context manager.
+
+ `patch.multiple` can be used as a decorator, class decorator or a context
+ manager. The arguments `spec`, `spec_set`, `create`,
+ `autospec` and `new_callable` have the same meaning as for `patch`. These
+ arguments will be applied to *all* patches done by `patch.multiple`.
+
+ When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+ """
+ if type(target) is str:
+ getter = lambda: _importer(target)
+ else:
+ getter = lambda: target
+
+ if not kwargs:
+ raise ValueError(
+ 'Must supply at least one keyword argument with patch.multiple'
+ )
+ # need to wrap in a list for python 3, where items is a view
+ items = list(kwargs.items())
+ attribute, new = items[0]
+ patcher = _patch(
+ getter, attribute, new, spec, create, spec_set,
+ autospec, new_callable, {}
+ )
+ patcher.attribute_name = attribute
+ for attribute, new in items[1:]:
+ this_patcher = _patch(
+ getter, attribute, new, spec, create, spec_set,
+ autospec, new_callable, {}
+ )
+ this_patcher.attribute_name = attribute
+ patcher.additional_patchers.append(this_patcher)
+ return patcher
+
+
+def patch(
+ target, new=DEFAULT, spec=None, create=False,
+ spec_set=None, autospec=False,
+ new_callable=None, **kwargs
+ ):
+ """
+ `patch` acts as a function decorator, class decorator or a context
+ manager. Inside the body of the function or with statement, the `target`
+ (specified in the form `'package.module.ClassName'`) is patched
+ with a `new` object. When the function/with statement exits the patch is
+ undone.
+
+ The `target` is imported and the specified attribute patched with the new
+ object, so it must be importable from the environment you are calling the
+ decorator from. The target is imported when the decorated function is
+ executed, not at decoration time.
+
+ If `new` is omitted, then a new `MagicMock` is created and passed in as an
+ extra argument to the decorated function.
+
+ The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+ if patch is creating one for you.
+
+ In addition you can pass `spec=True` or `spec_set=True`, which causes
+ patch to pass in the object being mocked as the spec/spec_set object.
+
+ `new_callable` allows you to specify a different class, or callable object,
+ that will be called to create the `new` object. By default `MagicMock` is
+ used.
+
+ A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+ then the mock with be created with a spec from the object being replaced.
+ All attributes of the mock will also have the spec of the corresponding
+ attribute of the object being replaced. Methods and functions being
+ mocked will have their arguments checked and will raise a `TypeError` if
+ they are called with the wrong signature. For mocks replacing a class,
+ their return value (the 'instance') will have the same spec as the class.
+
+ Instead of `autospec=True` you can pass `autospec=some_object` to use an
+ arbitrary object as the spec instead of the one being replaced.
+
+ By default `patch` will fail to replace attributes that don't exist. If
+ you pass in `create=True`, and the attribute doesn't exist, patch will
+ create the attribute for you when the patched function is called, and
+ delete it again afterwards. This is useful for writing tests against
+ attributes that your production code creates at runtime. It is off by by
+ default because it can be dangerous. With it switched on you can write
+ passing tests against APIs that don't actually exist!
+
+ Patch can be used as a `TestCase` class decorator. It works by
+ decorating each test method in the class. This reduces the boilerplate
+ code when your test methods share a common patchings set. `patch` finds
+ tests by looking for method names that start with `patch.TEST_PREFIX`.
+ By default this is `test`, which matches the way `unittest` finds tests.
+ You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+ Patch can be used as a context manager, with the with statement. Here the
+ patching applies to the indented block after the with statement. If you
+ use "as" then the patched object will be bound to the name after the
+ "as"; very useful if `patch` is creating a mock object for you.
+
+ `patch` takes arbitrary keyword arguments. These will be passed to
+ the `Mock` (or `new_callable`) on construction.
+
+ `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+ available for alternate use-cases.
+ """
+ getter, attribute = _get_target(target)
+ return _patch(
+ getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ )
+
+
+class _patch_dict(object):
+ """
+ Patch a dictionary, or dictionary like object, and restore the dictionary
+ to its original state after the test.
+
+ `in_dict` can be a dictionary or a mapping like container. If it is a
+ mapping then it must at least support getting, setting and deleting items
+ plus iterating over keys.
+
+ `in_dict` can also be a string specifying the name of the dictionary, which
+ will then be fetched by importing it.
+
+ `values` can be a dictionary of values to set in the dictionary. `values`
+ can also be an iterable of `(key, value)` pairs.
+
+ If `clear` is True then the dictionary will be cleared before the new
+ values are set.
+
+ `patch.dict` can also be called with arbitrary keyword arguments to set
+ values in the dictionary::
+
+ with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):
+ ...
+
+ `patch.dict` can be used as a context manager, decorator or class
+ decorator. When used as a class decorator `patch.dict` honours
+ `patch.TEST_PREFIX` for choosing which methods to wrap.
+ """
+
+ def __init__(self, in_dict, values=(), clear=False, **kwargs):
+ if isinstance(in_dict, str):
+ in_dict = _importer(in_dict)
+ self.in_dict = in_dict
+ # support any argument supported by dict(...) constructor
+ self.values = dict(values)
+ self.values.update(kwargs)
+ self.clear = clear
+ self._original = None
+
+
+ def __call__(self, f):
+ if isinstance(f, type):
+ return self.decorate_class(f)
+ @wraps(f)
+ def _inner(*args, **kw):
+ self._patch_dict()
+ try:
+ return f(*args, **kw)
+ finally:
+ self._unpatch_dict()
+
+ return _inner
+
+
+ def decorate_class(self, klass):
+ for attr in dir(klass):
+ attr_value = getattr(klass, attr)
+ if (attr.startswith(patch.TEST_PREFIX) and
+ hasattr(attr_value, "__call__")):
+ decorator = _patch_dict(self.in_dict, self.values, self.clear)
+ decorated = decorator(attr_value)
+ setattr(klass, attr, decorated)
+ return klass
+
+
+ def __enter__(self):
+ """Patch the dict."""
+ self._patch_dict()
+
+
+ def _patch_dict(self):
+ values = self.values
+ in_dict = self.in_dict
+ clear = self.clear
+
+ try:
+ original = in_dict.copy()
+ except AttributeError:
+ # dict like object with no copy method
+ # must support iteration over keys
+ original = {}
+ for key in in_dict:
+ original[key] = in_dict[key]
+ self._original = original
+
+ if clear:
+ _clear_dict(in_dict)
+
+ try:
+ in_dict.update(values)
+ except AttributeError:
+ # dict like object with no update method
+ for key in values:
+ in_dict[key] = values[key]
+
+
+ def _unpatch_dict(self):
+ in_dict = self.in_dict
+ original = self._original
+
+ _clear_dict(in_dict)
+
+ try:
+ in_dict.update(original)
+ except AttributeError:
+ for key in original:
+ in_dict[key] = original[key]
+
+
+ def __exit__(self, *args):
+ """Unpatch the dict."""
+ self._unpatch_dict()
+ return False
+
+ start = __enter__
+ stop = __exit__
+
+
+def _clear_dict(in_dict):
+ try:
+ in_dict.clear()
+ except AttributeError:
+ keys = list(in_dict)
+ for key in keys:
+ del in_dict[key]
+
+
+patch.object = _patch_object
+patch.dict = _patch_dict
+patch.multiple = _patch_multiple
+patch.TEST_PREFIX = 'test'
+
+magic_methods = (
+ "lt le gt ge eq ne "
+ "getitem setitem delitem "
+ "len contains iter "
+ "hash str sizeof "
+ "enter exit "
+ "divmod neg pos abs invert "
+ "complex int float index "
+ "trunc floor ceil "
+ "bool next "
+)
+
+numerics = "add sub mul div floordiv mod lshift rshift and xor or pow "
+inplace = ' '.join('i%s' % n for n in numerics.split())
+right = ' '.join('r%s' % n for n in numerics.split())
+
+# not including __prepare__, __instancecheck__, __subclasscheck__
+# (as they are metaclass methods)
+# __del__ is not supported at all as it causes problems if it exists
+
+_non_defaults = set('__%s__' % method for method in [
+ 'cmp', 'getslice', 'setslice', 'coerce', 'subclasses',
+ 'format', 'get', 'set', 'delete', 'reversed',
+ 'missing', 'reduce', 'reduce_ex', 'getinitargs',
+ 'getnewargs', 'getstate', 'setstate', 'getformat',
+ 'setformat', 'repr', 'dir'
+])
+
+
+def _get_method(name, func):
+ "Turns a callable object (like a mock) into a real function"
+ def method(self, *args, **kw):
+ return func(self, *args, **kw)
+ method.__name__ = name
+ return method
+
+
+_magics = set(
+ '__%s__' % method for method in
+ ' '.join([magic_methods, numerics, inplace, right]).split()
+)
+
+_all_magics = _magics | _non_defaults
+
+_unsupported_magics = set([
+ '__getattr__', '__setattr__',
+ '__init__', '__new__', '__prepare__'
+ '__instancecheck__', '__subclasscheck__',
+ '__del__'
+])
+
+_calculate_return_value = {
+ '__hash__': lambda self: object.__hash__(self),
+ '__str__': lambda self: object.__str__(self),
+ '__sizeof__': lambda self: object.__sizeof__(self),
+}
+
+_return_values = {
+ '__int__': 1,
+ '__contains__': False,
+ '__len__': 0,
+ '__exit__': False,
+ '__complex__': 1j,
+ '__float__': 1.0,
+ '__bool__': True,
+ '__index__': 1,
+}
+
+
+def _get_eq(self):
+ def __eq__(other):
+ ret_val = self.__eq__._mock_return_value
+ if ret_val is not DEFAULT:
+ return ret_val
+ return self is other
+ return __eq__
+
+def _get_ne(self):
+ def __ne__(other):
+ if self.__ne__._mock_return_value is not DEFAULT:
+ return DEFAULT
+ return self is not other
+ return __ne__
+
+def _get_iter(self):
+ def __iter__():
+ ret_val = self.__iter__._mock_return_value
+ if ret_val is DEFAULT:
+ return iter([])
+ # if ret_val was already an iterator, then calling iter on it should
+ # return the iterator unchanged
+ return iter(ret_val)
+ return __iter__
+
+_side_effect_methods = {
+ '__eq__': _get_eq,
+ '__ne__': _get_ne,
+ '__iter__': _get_iter,
+}
+
+
+
+def _set_return_value(mock, method, name):
+ fixed = _return_values.get(name, DEFAULT)
+ if fixed is not DEFAULT:
+ method.return_value = fixed
+ return
+
+ return_calulator = _calculate_return_value.get(name)
+ if return_calulator is not None:
+ try:
+ return_value = return_calulator(mock)
+ except AttributeError:
+ # XXXX why do we return AttributeError here?
+ # set it as a side_effect instead?
+ return_value = AttributeError(name)
+ method.return_value = return_value
+ return
+
+ side_effector = _side_effect_methods.get(name)
+ if side_effector is not None:
+ method.side_effect = side_effector(mock)
+
+
+
+class MagicMixin(object):
+ def __init__(self, *args, **kw):
+ super(MagicMixin, self).__init__(*args, **kw)
+ self._mock_set_magics()
+
+
+ def _mock_set_magics(self):
+ these_magics = _magics
+
+ if self._mock_methods is not None:
+ these_magics = _magics.intersection(self._mock_methods)
+
+ remove_magics = set()
+ remove_magics = _magics - these_magics
+
+ for entry in remove_magics:
+ if entry in type(self).__dict__:
+ # remove unneeded magic methods
+ delattr(self, entry)
+
+ # don't overwrite existing attributes if called a second time
+ these_magics = these_magics - set(type(self).__dict__)
+
+ _type = type(self)
+ for entry in these_magics:
+ setattr(_type, entry, MagicProxy(entry, self))
+
+
+
+class NonCallableMagicMock(MagicMixin, NonCallableMock):
+ """A version of `MagicMock` that isn't callable."""
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+ self._mock_set_magics()
+
+
+
+class MagicMock(MagicMixin, Mock):
+ """
+ MagicMock is a subclass of Mock with default implementations
+ of most of the magic methods. You can use MagicMock without having to
+ configure the magic methods yourself.
+
+ If you use the `spec` or `spec_set` arguments then *only* magic
+ methods that exist in the spec will be created.
+
+ Attributes and the return value of a `MagicMock` will also be `MagicMocks`.
+ """
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+ self._mock_set_magics()
+
+
+
+class MagicProxy(object):
+ def __init__(self, name, parent):
+ self.name = name
+ self.parent = parent
+
+ def __call__(self, *args, **kwargs):
+ m = self.create_mock()
+ return m(*args, **kwargs)
+
+ def create_mock(self):
+ entry = self.name
+ parent = self.parent
+ m = parent._get_child_mock(name=entry, _new_name=entry,
+ _new_parent=parent)
+ setattr(parent, entry, m)
+ _set_return_value(parent, m, entry)
+ return m
+
+ def __get__(self, obj, _type=None):
+ return self.create_mock()
+
+
+
+class _ANY(object):
+ "A helper object that compares equal to everything."
+
+ def __eq__(self, other):
+ return True
+
+ def __ne__(self, other):
+ return False
+
+ def __repr__(self):
+ return '<ANY>'
+
+ANY = _ANY()
+
+
+
+def _format_call_signature(name, args, kwargs):
+ message = '%s(%%s)' % name
+ formatted_args = ''
+ args_string = ', '.join([repr(arg) for arg in args])
+ kwargs_string = ', '.join([
+ '%s=%r' % (key, value) for key, value in kwargs.items()
+ ])
+ if args_string:
+ formatted_args = args_string
+ if kwargs_string:
+ if formatted_args:
+ formatted_args += ', '
+ formatted_args += kwargs_string
+
+ return message % formatted_args
+
+
+
+class _Call(tuple):
+ """
+ A tuple for holding the results of a call to a mock, either in the form
+ `(args, kwargs)` or `(name, args, kwargs)`.
+
+ If args or kwargs are empty then a call tuple will compare equal to
+ a tuple without those values. This makes comparisons less verbose::
+
+ _Call(('name', (), {})) == ('name',)
+ _Call(('name', (1,), {})) == ('name', (1,))
+ _Call(((), {'a': 'b'})) == ({'a': 'b'},)
+
+ The `_Call` object provides a useful shortcut for comparing with call::
+
+ _Call(((1, 2), {'a': 3})) == call(1, 2, a=3)
+ _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)
+
+ If the _Call has no name then it will match any name.
+ """
+ def __new__(cls, value=(), name=None, parent=None, two=False,
+ from_kall=True):
+ name = ''
+ args = ()
+ kwargs = {}
+ _len = len(value)
+ if _len == 3:
+ name, args, kwargs = value
+ elif _len == 2:
+ first, second = value
+ if isinstance(first, str):
+ name = first
+ if isinstance(second, tuple):
+ args = second
+ else:
+ kwargs = second
+ else:
+ args, kwargs = first, second
+ elif _len == 1:
+ value, = value
+ if isinstance(value, str):
+ name = value
+ elif isinstance(value, tuple):
+ args = value
+ else:
+ kwargs = value
+
+ if two:
+ return tuple.__new__(cls, (args, kwargs))
+
+ return tuple.__new__(cls, (name, args, kwargs))
+
+
+ def __init__(self, value=(), name=None, parent=None, two=False,
+ from_kall=True):
+ self.name = name
+ self.parent = parent
+ self.from_kall = from_kall
+
+
+ def __eq__(self, other):
+ if other is ANY:
+ return True
+ try:
+ len_other = len(other)
+ except TypeError:
+ return False
+
+ self_name = ''
+ if len(self) == 2:
+ self_args, self_kwargs = self
+ else:
+ self_name, self_args, self_kwargs = self
+
+ other_name = ''
+ if len_other == 0:
+ other_args, other_kwargs = (), {}
+ elif len_other == 3:
+ other_name, other_args, other_kwargs = other
+ elif len_other == 1:
+ value, = other
+ if isinstance(value, tuple):
+ other_args = value
+ other_kwargs = {}
+ elif isinstance(value, str):
+ other_name = value
+ other_args, other_kwargs = (), {}
+ else:
+ other_args = ()
+ other_kwargs = value
+ else:
+ # len 2
+ # could be (name, args) or (name, kwargs) or (args, kwargs)
+ first, second = other
+ if isinstance(first, str):
+ other_name = first
+ if isinstance(second, tuple):
+ other_args, other_kwargs = second, {}
+ else:
+ other_args, other_kwargs = (), second
+ else:
+ other_args, other_kwargs = first, second
+
+ if self_name and other_name != self_name:
+ return False
+
+ # this order is important for ANY to work!
+ return (other_args, other_kwargs) == (self_args, self_kwargs)
+
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+ def __call__(self, *args, **kwargs):
+ if self.name is None:
+ return _Call(('', args, kwargs), name='()')
+
+ name = self.name + '()'
+ return _Call((self.name, args, kwargs), name=name, parent=self)
+
+
+ def __getattr__(self, attr):
+ if self.name is None:
+ return _Call(name=attr, from_kall=False)
+ name = '%s.%s' % (self.name, attr)
+ return _Call(name=name, parent=self, from_kall=False)
+
+
+ def __repr__(self):
+ if not self.from_kall:
+ name = self.name or 'call'
+ if name.startswith('()'):
+ name = 'call%s' % name
+ return name
+
+ if len(self) == 2:
+ name = 'call'
+ args, kwargs = self
+ else:
+ name, args, kwargs = self
+ if not name:
+ name = 'call'
+ elif not name.startswith('()'):
+ name = 'call.%s' % name
+ else:
+ name = 'call%s' % name
+ return _format_call_signature(name, args, kwargs)
+
+
+ def call_list(self):
+ """For a call object that represents multiple calls, `call_list`
+ returns a list of all the intermediate calls as well as the
+ final call."""
+ vals = []
+ thing = self
+ while thing is not None:
+ if thing.from_kall:
+ vals.append(thing)
+ thing = thing.parent
+ return _CallList(reversed(vals))
+
+
+call = _Call(from_kall=False)
+
+
+
+def create_autospec(spec, spec_set=False, instance=False, _parent=None,
+ _name=None, **kwargs):
+ """Create a mock object using another object as a spec. Attributes on the
+ mock will use the corresponding attribute on the `spec` object as their
+ spec.
+
+ Functions or methods being mocked will have their arguments checked
+ to check that they are called with the correct signature.
+
+ If `spec_set` is True then attempting to set attributes that don't exist
+ on the spec object will raise an `AttributeError`.
+
+ If a class is used as a spec then the return value of the mock (the
+ instance of the class) will have the same spec. You can use a class as the
+ spec for an instance object by passing `instance=True`. The returned mock
+ will only be callable if instances of the mock are callable.
+
+ `create_autospec` also takes arbitrary keyword arguments that are passed to
+ the constructor of the created mock."""
+ if _is_list(spec):
+ # can't pass a list instance to the mock constructor as it will be
+ # interpreted as a list of strings
+ spec = type(spec)
+
+ is_type = isinstance(spec, type)
+
+ _kwargs = {'spec': spec}
+ if spec_set:
+ _kwargs = {'spec_set': spec}
+ elif spec is None:
+ # None we mock with a normal mock without a spec
+ _kwargs = {}
+
+ _kwargs.update(kwargs)
+
+ Klass = MagicMock
+ if type(spec) in DescriptorTypes:
+ # descriptors don't have a spec
+ # because we don't know what type they return
+ _kwargs = {}
+ elif not _callable(spec):
+ Klass = NonCallableMagicMock
+ elif is_type and instance and not _instance_callable(spec):
+ Klass = NonCallableMagicMock
+
+ _new_name = _name
+ if _parent is None:
+ # for a top level object no _new_name should be set
+ _new_name = ''
+
+ mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
+ name=_name, **_kwargs)
+
+ if isinstance(spec, FunctionTypes):
+ # should only happen at the top level because we don't
+ # recurse for functions
+ mock = _set_signature(mock, spec)
+ else:
+ _check_signature(spec, mock, is_type, instance)
+
+ if _parent is not None and not instance:
+ _parent._mock_children[_name] = mock
+
+ if is_type and not instance and 'return_value' not in kwargs:
+ # XXXX could give a name to the return_value mock?
+ mock.return_value = create_autospec(spec, spec_set, instance=True,
+ _name='()', _parent=mock)
+
+ for entry in dir(spec):
+ if _is_magic(entry):
+ # MagicMock already does the useful magic methods for us
+ continue
+
+ if isinstance(spec, FunctionTypes) and entry in FunctionAttributes:
+ # allow a mock to actually be a function
+ continue
+
+ # XXXX do we need a better way of getting attributes without
+ # triggering code execution (?) Probably not - we need the actual
+ # object to mock it so we would rather trigger a property than mock
+ # the property descriptor. Likewise we want to mock out dynamically
+ # provided attributes.
+ # XXXX what about attributes that raise exceptions on being fetched
+ # we could be resilient against it, or catch and propagate the
+ # exception when the attribute is fetched from the mock
+ original = getattr(spec, entry)
+
+ kwargs = {'spec': original}
+ if spec_set:
+ kwargs = {'spec_set': original}
+
+ if not isinstance(original, FunctionTypes):
+ new = _SpecState(original, spec_set, mock, entry, instance)
+ mock._mock_children[entry] = new
+ else:
+ parent = mock
+ if isinstance(spec, FunctionTypes):
+ parent = mock.mock
+
+ new = MagicMock(parent=parent, name=entry, _new_name=entry,
+ _new_parent=parent, **kwargs)
+ mock._mock_children[entry] = new
+ skipfirst = _must_skip(spec, entry, is_type)
+ _check_signature(original, new, skipfirst=skipfirst)
+
+ # so functions created with _set_signature become instance attributes,
+ # *plus* their underlying mock exists in _mock_children of the parent
+ # mock. Adding to _mock_children may be unnecessary where we are also
+ # setting as an instance attribute?
+ if isinstance(new, FunctionTypes):
+ setattr(mock, entry, new)
+
+ return mock
+
+
+def _must_skip(spec, entry, is_type):
+ if not isinstance(spec, type):
+ if entry in getattr(spec, '__dict__', {}):
+ # instance attribute - shouldn't skip
+ return False
+ spec = spec.__class__
+
+ for klass in spec.__mro__:
+ result = klass.__dict__.get(entry, DEFAULT)
+ if result is DEFAULT:
+ continue
+ if isinstance(result, (staticmethod, classmethod)):
+ return False
+ return is_type
+
+ # shouldn't get here unless function is a dynamically provided attribute
+ # XXXX untested behaviour
+ return is_type
+
+
+def _get_class(obj):
+ try:
+ return obj.__class__
+ except AttributeError:
+ # in Python 2, _sre.SRE_Pattern objects have no __class__
+ return type(obj)
+
+
+class _SpecState(object):
+
+ def __init__(self, spec, spec_set=False, parent=None,
+ name=None, ids=None, instance=False):
+ self.spec = spec
+ self.ids = ids
+ self.spec_set = spec_set
+ self.parent = parent
+ self.instance = instance
+ self.name = name
+
+
+FunctionTypes = (
+ # python function
+ type(create_autospec),
+ # instance method
+ type(ANY.__eq__),
+ # unbound method
+ type(_ANY.__eq__),
+)
+
+FunctionAttributes = set([
+ 'func_closure',
+ 'func_code',
+ 'func_defaults',
+ 'func_dict',
+ 'func_doc',
+ 'func_globals',
+ 'func_name',
+])
+
+import _io
+file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
+
+
+def mock_open(mock=None, read_data=None):
+ if mock is None:
+ mock = MagicMock(spec=file_spec)
+
+ handle = MagicMock(spec=file_spec)
+ handle.write.return_value = None
+ handle.__enter__.return_value = handle
+
+ if read_data is not None:
+ handle.read.return_value = read_data
+
+ mock.return_value = handle
+ return mock
+
+
+class PropertyMock(Mock):
+ """A Mock variant with __get__ and __set__ methods to act as a property"""
+ def __get__(self, obj, obj_type):
+ return self()
+ def __set__(self, obj, val):
+ self(val)
diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py
index 44bf186..97e5426 100644
--- a/Lib/unittest/result.py
+++ b/Lib/unittest/result.py
@@ -1,6 +1,5 @@
"""Test result object"""
-import os
import io
import sys
import traceback
diff --git a/Lib/unittest/test/__init__.py b/Lib/unittest/test/__init__.py
index 99b730b..cdae8a7 100644
--- a/Lib/unittest/test/__init__.py
+++ b/Lib/unittest/test/__init__.py
@@ -14,6 +14,7 @@ def suite():
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
+ suite.addTest(loader.loadTestsFromName('unittest.test.testmock'))
return suite
diff --git a/Lib/unittest/test/_test_warnings.py b/Lib/unittest/test/_test_warnings.py
index d0be18d..5cbfb53 100644
--- a/Lib/unittest/test/_test_warnings.py
+++ b/Lib/unittest/test/_test_warnings.py
@@ -10,7 +10,6 @@ combinations of warnings args and -W flags and check that the output is correct.
See #10535.
"""
-import io
import sys
import unittest
import warnings
diff --git a/Lib/unittest/test/test_assertions.py b/Lib/unittest/test/test_assertions.py
index a1d20eb..d43fe5a 100644
--- a/Lib/unittest/test/test_assertions.py
+++ b/Lib/unittest/test/test_assertions.py
@@ -1,6 +1,7 @@
import datetime
import warnings
import unittest
+from itertools import product
class Test_Assertions(unittest.TestCase):
@@ -145,6 +146,14 @@ class TestLongMessage(unittest.TestCase):
self.testableTrue._formatMessage(one, '\uFFFD')
def assertMessages(self, methodName, args, errors):
+ """
+ Check that methodName(*args) raises the correct error messages.
+ errors should be a list of 4 regex that match the error when:
+ 1) longMessage = False and no msg passed;
+ 2) longMessage = False and msg passed;
+ 3) longMessage = True and no msg passed;
+ 4) longMessage = True and msg passed;
+ """
def getMethod(i):
useTestableFalse = i < 2
if useTestableFalse:
@@ -284,3 +293,67 @@ class TestLongMessage(unittest.TestCase):
["^unexpectedly identical: None$", "^oops$",
"^unexpectedly identical: None$",
"^unexpectedly identical: None : oops$"])
+
+
+ def assertMessagesCM(self, methodName, args, func, errors):
+ """
+ Check that the correct error messages are raised while executing:
+ with method(*args):
+ func()
+ *errors* should be a list of 4 regex that match the error when:
+ 1) longMessage = False and no msg passed;
+ 2) longMessage = False and msg passed;
+ 3) longMessage = True and no msg passed;
+ 4) longMessage = True and msg passed;
+ """
+ p = product((self.testableFalse, self.testableTrue),
+ ({}, {"msg": "oops"}))
+ for (cls, kwargs), err in zip(p, errors):
+ method = getattr(cls, methodName)
+ with self.assertRaisesRegex(cls.failureException, err):
+ with method(*args, **kwargs) as cm:
+ func()
+
+ def testAssertRaises(self):
+ self.assertMessagesCM('assertRaises', (TypeError,), lambda: None,
+ ['^TypeError not raised$', '^oops$',
+ '^TypeError not raised$',
+ '^TypeError not raised : oops$'])
+
+ def testAssertRaisesRegex(self):
+ # test error not raised
+ self.assertMessagesCM('assertRaisesRegex', (TypeError, 'unused regex'),
+ lambda: None,
+ ['^TypeError not raised$', '^oops$',
+ '^TypeError not raised$',
+ '^TypeError not raised : oops$'])
+ # test error raised but with wrong message
+ def raise_wrong_message():
+ raise TypeError('foo')
+ self.assertMessagesCM('assertRaisesRegex', (TypeError, 'regex'),
+ raise_wrong_message,
+ ['^"regex" does not match "foo"$', '^oops$',
+ '^"regex" does not match "foo"$',
+ '^"regex" does not match "foo" : oops$'])
+
+ def testAssertWarns(self):
+ self.assertMessagesCM('assertWarns', (UserWarning,), lambda: None,
+ ['^UserWarning not triggered$', '^oops$',
+ '^UserWarning not triggered$',
+ '^UserWarning not triggered : oops$'])
+
+ def testAssertWarnsRegex(self):
+ # test error not raised
+ self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'unused regex'),
+ lambda: None,
+ ['^UserWarning not triggered$', '^oops$',
+ '^UserWarning not triggered$',
+ '^UserWarning not triggered : oops$'])
+ # test warning raised but with wrong message
+ def raise_wrong_message():
+ warnings.warn('foo')
+ self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'regex'),
+ raise_wrong_message,
+ ['^"regex" does not match "foo"$', '^oops$',
+ '^"regex" does not match "foo"$',
+ '^"regex" does not match "foo" : oops$'])
diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py
index c74a539..fdb2e78 100644
--- a/Lib/unittest/test/test_case.py
+++ b/Lib/unittest/test/test_case.py
@@ -4,6 +4,7 @@ import pickle
import re
import sys
import warnings
+import weakref
import inspect
from copy import deepcopy
@@ -386,27 +387,62 @@ class Test_TestCase(unittest.TestCase, TestEquality, TestHashing):
self.assertIsInstance(Foo().id(), str)
- # "If result is omitted or None, a temporary result object is created
- # and used, but is not made available to the caller. As TestCase owns the
+ # "If result is omitted or None, a temporary result object is created,
+ # used, and is made available to the caller. As TestCase owns the
# temporary result startTestRun and stopTestRun are called.
def test_run__uses_defaultTestResult(self):
events = []
+ defaultResult = LoggingResult(events)
class Foo(unittest.TestCase):
def test(self):
events.append('test')
def defaultTestResult(self):
- return LoggingResult(events)
+ return defaultResult
# Make run() find a result object on its own
- Foo('test').run()
+ result = Foo('test').run()
+ self.assertIs(result, defaultResult)
expected = ['startTestRun', 'startTest', 'test', 'addSuccess',
'stopTest', 'stopTestRun']
self.assertEqual(events, expected)
+
+ # "The result object is returned to run's caller"
+ def test_run__returns_given_result(self):
+
+ class Foo(unittest.TestCase):
+ def test(self):
+ pass
+
+ result = unittest.TestResult()
+
+ retval = Foo('test').run(result)
+ self.assertIs(retval, result)
+
+
+ # "The same effect [as method run] may be had by simply calling the
+ # TestCase instance."
+ def test_call__invoking_an_instance_delegates_to_run(self):
+ resultIn = unittest.TestResult()
+ resultOut = unittest.TestResult()
+
+ class Foo(unittest.TestCase):
+ def test(self):
+ pass
+
+ def run(self, result):
+ self.assertIs(result, resultIn)
+ return resultOut
+
+ retval = Foo('test')(resultIn)
+
+ self.assertIs(retval, resultOut)
+
+
def testShortDescriptionWithoutDocstring(self):
self.assertIsNone(self.shortDescription())
@@ -1140,7 +1176,6 @@ test case
(self.assert_, (True,)),
(self.failUnlessRaises, (TypeError, lambda _: 3.14 + 'spam')),
(self.failIf, (False,)),
- (self.assertSameElements, ([1, 1, 2, 3], [1, 2, 3])),
(self.assertDictContainsSubset, (dict(a=1, b=2), dict(a=1, b=2, c=3))),
(self.assertRaisesRegexp, (KeyError, 'foo', lambda: {}['foo'])),
(self.assertRegexpMatches, ('bar', 'bar')),
@@ -1149,18 +1184,20 @@ test case
with self.assertWarns(DeprecationWarning):
meth(*args)
- def testDeprecatedFailMethods(self):
- """Test that the deprecated fail* methods get removed in 3.3"""
+ # disable this test for now. When the version where the fail* methods will
+ # be removed is decided, re-enable it and update the version
+ def _testDeprecatedFailMethods(self):
+ """Test that the deprecated fail* methods get removed in 3.x"""
if sys.version_info[:2] < (3, 3):
return
deprecated_names = [
'failIfEqual', 'failUnlessEqual', 'failUnlessAlmostEqual',
'failIfAlmostEqual', 'failUnless', 'failUnlessRaises', 'failIf',
- 'assertSameElements', 'assertDictContainsSubset',
+ 'assertDictContainsSubset',
]
for deprecated_name in deprecated_names:
with self.assertRaises(AttributeError):
- getattr(self, deprecated_name) # remove these in 3.3
+ getattr(self, deprecated_name) # remove these in 3.x
def testDeepcopy(self):
# Issue: 5660
@@ -1268,3 +1305,11 @@ test case
klass('test_something').run(result)
self.assertEqual(len(result.errors), 1)
self.assertEqual(result.testsRun, 1)
+
+ @support.cpython_only
+ def testNoCycles(self):
+ case = unittest.TestCase()
+ wr = weakref.ref(case)
+ with support.disable_gc():
+ del case
+ self.assertFalse(wr())
diff --git a/Lib/unittest/test/test_loader.py b/Lib/unittest/test/test_loader.py
index f7e31a5..d1b9ef5 100644
--- a/Lib/unittest/test/test_loader.py
+++ b/Lib/unittest/test/test_loader.py
@@ -239,7 +239,7 @@ class Test_TestLoader(unittest.TestCase):
try:
loader.loadTestsFromName('sdasfasfasdf')
except ImportError as e:
- self.assertEqual(str(e), "No module named sdasfasfasdf")
+ self.assertEqual(str(e), "No module named 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ImportError")
@@ -619,7 +619,7 @@ class Test_TestLoader(unittest.TestCase):
try:
loader.loadTestsFromNames(['sdasfasfasdf'])
except ImportError as e:
- self.assertEqual(str(e), "No module named sdasfasfasdf")
+ self.assertEqual(str(e), "No module named 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ImportError")
diff --git a/Lib/unittest/test/test_program.py b/Lib/unittest/test/test_program.py
index d5d0f5a..9794868 100644
--- a/Lib/unittest/test/test_program.py
+++ b/Lib/unittest/test/test_program.py
@@ -131,23 +131,6 @@ class TestCommandLineArgs(unittest.TestCase):
FakeRunner.test = None
FakeRunner.raiseError = False
- def testHelpAndUnknown(self):
- program = self.program
- def usageExit(msg=None):
- program.msg = msg
- program.exit = True
- program.usageExit = usageExit
-
- for opt in '-h', '-H', '--help':
- program.exit = False
- program.parseArgs([None, opt])
- self.assertTrue(program.exit)
- self.assertIsNone(program.msg)
-
- program.parseArgs([None, '-$'])
- self.assertTrue(program.exit)
- self.assertIsNotNone(program.msg)
-
def testVerbosity(self):
program = self.program
diff --git a/Lib/unittest/test/testmock/__init__.py b/Lib/unittest/test/testmock/__init__.py
new file mode 100644
index 0000000..87d7ae9
--- /dev/null
+++ b/Lib/unittest/test/testmock/__init__.py
@@ -0,0 +1,17 @@
+import os
+import sys
+import unittest
+
+
+here = os.path.dirname(__file__)
+loader = unittest.defaultTestLoader
+
+def load_tests(*args):
+ suite = unittest.TestSuite()
+ for fn in os.listdir(here):
+ if fn.startswith("test") and fn.endswith(".py"):
+ modname = "unittest.test.testmock." + fn[:-3]
+ __import__(modname)
+ module = sys.modules[modname]
+ suite.addTest(loader.loadTestsFromModule(module))
+ return suite
diff --git a/Lib/unittest/test/testmock/support.py b/Lib/unittest/test/testmock/support.py
new file mode 100644
index 0000000..f473879
--- /dev/null
+++ b/Lib/unittest/test/testmock/support.py
@@ -0,0 +1,23 @@
+import sys
+
+def is_instance(obj, klass):
+ """Version of is_instance that doesn't access __class__"""
+ return issubclass(type(obj), klass)
+
+
+class SomeClass(object):
+ class_attribute = None
+
+ def wibble(self):
+ pass
+
+
+class X(object):
+ pass
+
+
+def examine_warnings(func):
+ def wrapper():
+ with catch_warnings(record=True) as ws:
+ func(ws)
+ return wrapper
diff --git a/Lib/unittest/test/testmock/testcallable.py b/Lib/unittest/test/testmock/testcallable.py
new file mode 100644
index 0000000..7b2dd00
--- /dev/null
+++ b/Lib/unittest/test/testmock/testcallable.py
@@ -0,0 +1,147 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+import unittest
+from unittest.test.testmock.support import is_instance, X, SomeClass
+
+from unittest.mock import (
+ Mock, MagicMock, NonCallableMagicMock,
+ NonCallableMock, patch, create_autospec,
+ CallableMixin
+)
+
+
+
+class TestCallable(unittest.TestCase):
+
+ def assertNotCallable(self, mock):
+ self.assertTrue(is_instance(mock, NonCallableMagicMock))
+ self.assertFalse(is_instance(mock, CallableMixin))
+
+
+ def test_non_callable(self):
+ for mock in NonCallableMagicMock(), NonCallableMock():
+ self.assertRaises(TypeError, mock)
+ self.assertFalse(hasattr(mock, '__call__'))
+ self.assertIn(mock.__class__.__name__, repr(mock))
+
+
+ def test_heirarchy(self):
+ self.assertTrue(issubclass(MagicMock, Mock))
+ self.assertTrue(issubclass(NonCallableMagicMock, NonCallableMock))
+
+
+ def test_attributes(self):
+ one = NonCallableMock()
+ self.assertTrue(issubclass(type(one.one), Mock))
+
+ two = NonCallableMagicMock()
+ self.assertTrue(issubclass(type(two.two), MagicMock))
+
+
+ def test_subclasses(self):
+ class MockSub(Mock):
+ pass
+
+ one = MockSub()
+ self.assertTrue(issubclass(type(one.one), MockSub))
+
+ class MagicSub(MagicMock):
+ pass
+
+ two = MagicSub()
+ self.assertTrue(issubclass(type(two.two), MagicSub))
+
+
+ def test_patch_spec(self):
+ patcher = patch('%s.X' % __name__, spec=True)
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertNotCallable(instance)
+ self.assertRaises(TypeError, instance)
+
+
+ def test_patch_spec_set(self):
+ patcher = patch('%s.X' % __name__, spec_set=True)
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertNotCallable(instance)
+ self.assertRaises(TypeError, instance)
+
+
+ def test_patch_spec_instance(self):
+ patcher = patch('%s.X' % __name__, spec=X())
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ self.assertNotCallable(mock)
+ self.assertRaises(TypeError, mock)
+
+
+ def test_patch_spec_set_instance(self):
+ patcher = patch('%s.X' % __name__, spec_set=X())
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ self.assertNotCallable(mock)
+ self.assertRaises(TypeError, mock)
+
+
+ def test_patch_spec_callable_class(self):
+ class CallableX(X):
+ def __call__(self):
+ pass
+
+ class Sub(CallableX):
+ pass
+
+ class Multi(SomeClass, Sub):
+ pass
+
+ for arg in 'spec', 'spec_set':
+ for Klass in CallableX, Sub, Multi:
+ with patch('%s.X' % __name__, **{arg: Klass}) as mock:
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertTrue(is_instance(instance, MagicMock))
+ # inherited spec
+ self.assertRaises(AttributeError, getattr, instance,
+ 'foobarbaz')
+
+ result = instance()
+ # instance is callable, result has no spec
+ instance.assert_called_once_with()
+
+ result(3, 2, 1)
+ result.assert_called_once_with(3, 2, 1)
+ result.foo(3, 2, 1)
+ result.foo.assert_called_once_with(3, 2, 1)
+
+
+ def test_create_autopsec(self):
+ mock = create_autospec(X)
+ instance = mock()
+ self.assertRaises(TypeError, instance)
+
+ mock = create_autospec(X())
+ self.assertRaises(TypeError, mock)
+
+
+ def test_create_autospec_instance(self):
+ mock = create_autospec(SomeClass, instance=True)
+
+ self.assertRaises(TypeError, mock)
+ mock.wibble()
+ mock.wibble.assert_called_once_with()
+
+ self.assertRaises(TypeError, mock.wibble, 'some', 'args')
diff --git a/Lib/unittest/test/testmock/testhelpers.py b/Lib/unittest/test/testmock/testhelpers.py
new file mode 100644
index 0000000..3674778
--- /dev/null
+++ b/Lib/unittest/test/testmock/testhelpers.py
@@ -0,0 +1,835 @@
+import unittest
+
+from unittest.mock import (
+ call, _Call, create_autospec, MagicMock,
+ Mock, ANY, _CallList, patch, PropertyMock
+)
+
+from datetime import datetime
+
+class SomeClass(object):
+ def one(self, a, b):
+ pass
+ def two(self):
+ pass
+ def three(self, a=None):
+ pass
+
+
+
+class AnyTest(unittest.TestCase):
+
+ def test_any(self):
+ self.assertEqual(ANY, object())
+
+ mock = Mock()
+ mock(ANY)
+ mock.assert_called_with(ANY)
+
+ mock = Mock()
+ mock(foo=ANY)
+ mock.assert_called_with(foo=ANY)
+
+ def test_repr(self):
+ self.assertEqual(repr(ANY), '<ANY>')
+ self.assertEqual(str(ANY), '<ANY>')
+
+
+ def test_any_and_datetime(self):
+ mock = Mock()
+ mock(datetime.now(), foo=datetime.now())
+
+ mock.assert_called_with(ANY, foo=ANY)
+
+
+ def test_any_mock_calls_comparison_order(self):
+ mock = Mock()
+ d = datetime.now()
+ class Foo(object):
+ def __eq__(self, other):
+ return False
+ def __ne__(self, other):
+ return True
+
+ for d in datetime.now(), Foo():
+ mock.reset_mock()
+
+ mock(d, foo=d, bar=d)
+ mock.method(d, zinga=d, alpha=d)
+ mock().method(a1=d, z99=d)
+
+ expected = [
+ call(ANY, foo=ANY, bar=ANY),
+ call.method(ANY, zinga=ANY, alpha=ANY),
+ call(), call().method(a1=ANY, z99=ANY)
+ ]
+ self.assertEqual(expected, mock.mock_calls)
+ self.assertEqual(mock.mock_calls, expected)
+
+
+
+class CallTest(unittest.TestCase):
+
+ def test_call_with_call(self):
+ kall = _Call()
+ self.assertEqual(kall, _Call())
+ self.assertEqual(kall, _Call(('',)))
+ self.assertEqual(kall, _Call(((),)))
+ self.assertEqual(kall, _Call(({},)))
+ self.assertEqual(kall, _Call(('', ())))
+ self.assertEqual(kall, _Call(('', {})))
+ self.assertEqual(kall, _Call(('', (), {})))
+ self.assertEqual(kall, _Call(('foo',)))
+ self.assertEqual(kall, _Call(('bar', ())))
+ self.assertEqual(kall, _Call(('baz', {})))
+ self.assertEqual(kall, _Call(('spam', (), {})))
+
+ kall = _Call(((1, 2, 3),))
+ self.assertEqual(kall, _Call(((1, 2, 3),)))
+ self.assertEqual(kall, _Call(('', (1, 2, 3))))
+ self.assertEqual(kall, _Call(((1, 2, 3), {})))
+ self.assertEqual(kall, _Call(('', (1, 2, 3), {})))
+
+ kall = _Call(((1, 2, 4),))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 3))))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 3), {})))
+
+ kall = _Call(('foo', (1, 2, 4),))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 4))))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 4), {})))
+ self.assertNotEqual(kall, _Call(('bar', (1, 2, 4))))
+ self.assertNotEqual(kall, _Call(('bar', (1, 2, 4), {})))
+
+ kall = _Call(({'a': 3},))
+ self.assertEqual(kall, _Call(('', (), {'a': 3})))
+ self.assertEqual(kall, _Call(('', {'a': 3})))
+ self.assertEqual(kall, _Call(((), {'a': 3})))
+ self.assertEqual(kall, _Call(({'a': 3},)))
+
+
+ def test_empty__Call(self):
+ args = _Call()
+
+ self.assertEqual(args, ())
+ self.assertEqual(args, ('foo',))
+ self.assertEqual(args, ((),))
+ self.assertEqual(args, ('foo', ()))
+ self.assertEqual(args, ('foo',(), {}))
+ self.assertEqual(args, ('foo', {}))
+ self.assertEqual(args, ({},))
+
+
+ def test_named_empty_call(self):
+ args = _Call(('foo', (), {}))
+
+ self.assertEqual(args, ('foo',))
+ self.assertEqual(args, ('foo', ()))
+ self.assertEqual(args, ('foo',(), {}))
+ self.assertEqual(args, ('foo', {}))
+
+ self.assertNotEqual(args, ((),))
+ self.assertNotEqual(args, ())
+ self.assertNotEqual(args, ({},))
+ self.assertNotEqual(args, ('bar',))
+ self.assertNotEqual(args, ('bar', ()))
+ self.assertNotEqual(args, ('bar', {}))
+
+
+ def test_call_with_args(self):
+ args = _Call(((1, 2, 3), {}))
+
+ self.assertEqual(args, ((1, 2, 3),))
+ self.assertEqual(args, ('foo', (1, 2, 3)))
+ self.assertEqual(args, ('foo', (1, 2, 3), {}))
+ self.assertEqual(args, ((1, 2, 3), {}))
+
+
+ def test_named_call_with_args(self):
+ args = _Call(('foo', (1, 2, 3), {}))
+
+ self.assertEqual(args, ('foo', (1, 2, 3)))
+ self.assertEqual(args, ('foo', (1, 2, 3), {}))
+
+ self.assertNotEqual(args, ((1, 2, 3),))
+ self.assertNotEqual(args, ((1, 2, 3), {}))
+
+
+ def test_call_with_kwargs(self):
+ args = _Call(((), dict(a=3, b=4)))
+
+ self.assertEqual(args, (dict(a=3, b=4),))
+ self.assertEqual(args, ('foo', dict(a=3, b=4)))
+ self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
+ self.assertEqual(args, ((), dict(a=3, b=4)))
+
+
+ def test_named_call_with_kwargs(self):
+ args = _Call(('foo', (), dict(a=3, b=4)))
+
+ self.assertEqual(args, ('foo', dict(a=3, b=4)))
+ self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
+
+ self.assertNotEqual(args, (dict(a=3, b=4),))
+ self.assertNotEqual(args, ((), dict(a=3, b=4)))
+
+
+ def test_call_with_args_call_empty_name(self):
+ args = _Call(((1, 2, 3), {}))
+ self.assertEqual(args, call(1, 2, 3))
+ self.assertEqual(call(1, 2, 3), args)
+ self.assertTrue(call(1, 2, 3) in [args])
+
+
+ def test_call_ne(self):
+ self.assertNotEqual(_Call(((1, 2, 3),)), call(1, 2))
+ self.assertFalse(_Call(((1, 2, 3),)) != call(1, 2, 3))
+ self.assertTrue(_Call(((1, 2), {})) != call(1, 2, 3))
+
+
+ def test_call_non_tuples(self):
+ kall = _Call(((1, 2, 3),))
+ for value in 1, None, self, int:
+ self.assertNotEqual(kall, value)
+ self.assertFalse(kall == value)
+
+
+ def test_repr(self):
+ self.assertEqual(repr(_Call()), 'call()')
+ self.assertEqual(repr(_Call(('foo',))), 'call.foo()')
+
+ self.assertEqual(repr(_Call(((1, 2, 3), {'a': 'b'}))),
+ "call(1, 2, 3, a='b')")
+ self.assertEqual(repr(_Call(('bar', (1, 2, 3), {'a': 'b'}))),
+ "call.bar(1, 2, 3, a='b')")
+
+ self.assertEqual(repr(call), 'call')
+ self.assertEqual(str(call), 'call')
+
+ self.assertEqual(repr(call()), 'call()')
+ self.assertEqual(repr(call(1)), 'call(1)')
+ self.assertEqual(repr(call(zz='thing')), "call(zz='thing')")
+
+ self.assertEqual(repr(call().foo), 'call().foo')
+ self.assertEqual(repr(call(1).foo.bar(a=3).bing),
+ 'call().foo.bar().bing')
+ self.assertEqual(
+ repr(call().foo(1, 2, a=3)),
+ "call().foo(1, 2, a=3)"
+ )
+ self.assertEqual(repr(call()()), "call()()")
+ self.assertEqual(repr(call(1)(2)), "call()(2)")
+ self.assertEqual(
+ repr(call()().bar().baz.beep(1)),
+ "call()().bar().baz.beep(1)"
+ )
+
+
+ def test_call(self):
+ self.assertEqual(call(), ('', (), {}))
+ self.assertEqual(call('foo', 'bar', one=3, two=4),
+ ('', ('foo', 'bar'), {'one': 3, 'two': 4}))
+
+ mock = Mock()
+ mock(1, 2, 3)
+ mock(a=3, b=6)
+ self.assertEqual(mock.call_args_list,
+ [call(1, 2, 3), call(a=3, b=6)])
+
+ def test_attribute_call(self):
+ self.assertEqual(call.foo(1), ('foo', (1,), {}))
+ self.assertEqual(call.bar.baz(fish='eggs'),
+ ('bar.baz', (), {'fish': 'eggs'}))
+
+ mock = Mock()
+ mock.foo(1, 2 ,3)
+ mock.bar.baz(a=3, b=6)
+ self.assertEqual(mock.method_calls,
+ [call.foo(1, 2, 3), call.bar.baz(a=3, b=6)])
+
+
+ def test_extended_call(self):
+ result = call(1).foo(2).bar(3, a=4)
+ self.assertEqual(result, ('().foo().bar', (3,), dict(a=4)))
+
+ mock = MagicMock()
+ mock(1, 2, a=3, b=4)
+ self.assertEqual(mock.call_args, call(1, 2, a=3, b=4))
+ self.assertNotEqual(mock.call_args, call(1, 2, 3))
+
+ self.assertEqual(mock.call_args_list, [call(1, 2, a=3, b=4)])
+ self.assertEqual(mock.mock_calls, [call(1, 2, a=3, b=4)])
+
+ mock = MagicMock()
+ mock.foo(1).bar()().baz.beep(a=6)
+
+ last_call = call.foo(1).bar()().baz.beep(a=6)
+ self.assertEqual(mock.mock_calls[-1], last_call)
+ self.assertEqual(mock.mock_calls, last_call.call_list())
+
+
+ def test_call_list(self):
+ mock = MagicMock()
+ mock(1)
+ self.assertEqual(call(1).call_list(), mock.mock_calls)
+
+ mock = MagicMock()
+ mock(1).method(2)
+ self.assertEqual(call(1).method(2).call_list(),
+ mock.mock_calls)
+
+ mock = MagicMock()
+ mock(1).method(2)(3)
+ self.assertEqual(call(1).method(2)(3).call_list(),
+ mock.mock_calls)
+
+ mock = MagicMock()
+ int(mock(1).method(2)(3).foo.bar.baz(4)(5))
+ kall = call(1).method(2)(3).foo.bar.baz(4)(5).__int__()
+ self.assertEqual(kall.call_list(), mock.mock_calls)
+
+
+ def test_call_any(self):
+ self.assertEqual(call, ANY)
+
+ m = MagicMock()
+ int(m)
+ self.assertEqual(m.mock_calls, [ANY])
+ self.assertEqual([ANY], m.mock_calls)
+
+
+ def test_two_args_call(self):
+ args = _Call(((1, 2), {'a': 3}), two=True)
+ self.assertEqual(len(args), 2)
+ self.assertEqual(args[0], (1, 2))
+ self.assertEqual(args[1], {'a': 3})
+
+ other_args = _Call(((1, 2), {'a': 3}))
+ self.assertEqual(args, other_args)
+
+
+class SpecSignatureTest(unittest.TestCase):
+
+ def _check_someclass_mock(self, mock):
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+ mock.one(1, 2)
+ mock.one.assert_called_with(1, 2)
+ self.assertRaises(AssertionError,
+ mock.one.assert_called_with, 3, 4)
+ self.assertRaises(TypeError, mock.one, 1)
+
+ mock.two()
+ mock.two.assert_called_with()
+ self.assertRaises(AssertionError,
+ mock.two.assert_called_with, 3)
+ self.assertRaises(TypeError, mock.two, 1)
+
+ mock.three()
+ mock.three.assert_called_with()
+ self.assertRaises(AssertionError,
+ mock.three.assert_called_with, 3)
+ self.assertRaises(TypeError, mock.three, 3, 2)
+
+ mock.three(1)
+ mock.three.assert_called_with(1)
+
+ mock.three(a=1)
+ mock.three.assert_called_with(a=1)
+
+
+ def test_basic(self):
+ for spec in (SomeClass, SomeClass()):
+ mock = create_autospec(spec)
+ self._check_someclass_mock(mock)
+
+
+ def test_create_autospec_return_value(self):
+ def f():
+ pass
+ mock = create_autospec(f, return_value='foo')
+ self.assertEqual(mock(), 'foo')
+
+ class Foo(object):
+ pass
+
+ mock = create_autospec(Foo, return_value='foo')
+ self.assertEqual(mock(), 'foo')
+
+
+ def test_mocking_unbound_methods(self):
+ class Foo(object):
+ def foo(self, foo):
+ pass
+ p = patch.object(Foo, 'foo')
+ mock_foo = p.start()
+ Foo().foo(1)
+
+ mock_foo.assert_called_with(1)
+
+
+ def test_create_autospec_unbound_methods(self):
+ # see issue 128
+ # this is expected to fail until the issue is fixed
+ return
+ class Foo(object):
+ def foo(self):
+ pass
+
+ klass = create_autospec(Foo)
+ instance = klass()
+ self.assertRaises(TypeError, instance.foo, 1)
+
+ # Note: no type checking on the "self" parameter
+ klass.foo(1)
+ klass.foo.assert_called_with(1)
+ self.assertRaises(TypeError, klass.foo)
+
+
+ def test_create_autospec_keyword_arguments(self):
+ class Foo(object):
+ a = 3
+ m = create_autospec(Foo, a='3')
+ self.assertEqual(m.a, '3')
+
+
+ def test_function_as_instance_attribute(self):
+ obj = SomeClass()
+ def f(a):
+ pass
+ obj.f = f
+
+ mock = create_autospec(obj)
+ mock.f('bing')
+ mock.f.assert_called_with('bing')
+
+
+ def test_spec_as_list(self):
+ # because spec as a list of strings in the mock constructor means
+ # something very different we treat a list instance as the type.
+ mock = create_autospec([])
+ mock.append('foo')
+ mock.append.assert_called_with('foo')
+
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+
+ class Foo(object):
+ foo = []
+
+ mock = create_autospec(Foo)
+ mock.foo.append(3)
+ mock.foo.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.foo, 'foo')
+
+
+ def test_attributes(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ sub_mock = create_autospec(Sub)
+
+ for mock in (sub_mock, sub_mock.attr):
+ self._check_someclass_mock(mock)
+
+
+ def test_builtin_functions_types(self):
+ # we could replace builtin functions / methods with a function
+ # with *args / **kwargs signature. Using the builtin method type
+ # as a spec seems to work fairly well though.
+ class BuiltinSubclass(list):
+ def bar(self, arg):
+ pass
+ sorted = sorted
+ attr = {}
+
+ mock = create_autospec(BuiltinSubclass)
+ mock.append(3)
+ mock.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.append, 'foo')
+
+ mock.bar('foo')
+ mock.bar.assert_called_with('foo')
+ self.assertRaises(TypeError, mock.bar, 'foo', 'bar')
+ self.assertRaises(AttributeError, getattr, mock.bar, 'foo')
+
+ mock.sorted([1, 2])
+ mock.sorted.assert_called_with([1, 2])
+ self.assertRaises(AttributeError, getattr, mock.sorted, 'foo')
+
+ mock.attr.pop(3)
+ mock.attr.pop.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.attr, 'foo')
+
+
+ def test_method_calls(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ mock = create_autospec(Sub)
+ mock.one(1, 2)
+ mock.two()
+ mock.three(3)
+
+ expected = [call.one(1, 2), call.two(), call.three(3)]
+ self.assertEqual(mock.method_calls, expected)
+
+ mock.attr.one(1, 2)
+ mock.attr.two()
+ mock.attr.three(3)
+
+ expected.extend(
+ [call.attr.one(1, 2), call.attr.two(), call.attr.three(3)]
+ )
+ self.assertEqual(mock.method_calls, expected)
+
+
+ def test_magic_methods(self):
+ class BuiltinSubclass(list):
+ attr = {}
+
+ mock = create_autospec(BuiltinSubclass)
+ self.assertEqual(list(mock), [])
+ self.assertRaises(TypeError, int, mock)
+ self.assertRaises(TypeError, int, mock.attr)
+ self.assertEqual(list(mock), [])
+
+ self.assertIsInstance(mock['foo'], MagicMock)
+ self.assertIsInstance(mock.attr['foo'], MagicMock)
+
+
+ def test_spec_set(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ for spec in (Sub, Sub()):
+ mock = create_autospec(spec, spec_set=True)
+ self._check_someclass_mock(mock)
+
+ self.assertRaises(AttributeError, setattr, mock, 'foo', 'bar')
+ self.assertRaises(AttributeError, setattr, mock.attr, 'foo', 'bar')
+
+
+ def test_descriptors(self):
+ class Foo(object):
+ @classmethod
+ def f(cls, a, b):
+ pass
+ @staticmethod
+ def g(a, b):
+ pass
+
+ class Bar(Foo):
+ pass
+
+ class Baz(SomeClass, Bar):
+ pass
+
+ for spec in (Foo, Foo(), Bar, Bar(), Baz, Baz()):
+ mock = create_autospec(spec)
+ mock.f(1, 2)
+ mock.f.assert_called_once_with(1, 2)
+
+ mock.g(3, 4)
+ mock.g.assert_called_once_with(3, 4)
+
+
+ def test_recursive(self):
+ class A(object):
+ def a(self):
+ pass
+ foo = 'foo bar baz'
+ bar = foo
+
+ A.B = A
+ mock = create_autospec(A)
+
+ mock()
+ self.assertFalse(mock.B.called)
+
+ mock.a()
+ mock.B.a()
+ self.assertEqual(mock.method_calls, [call.a(), call.B.a()])
+
+ self.assertIs(A.foo, A.bar)
+ self.assertIsNot(mock.foo, mock.bar)
+ mock.foo.lower()
+ self.assertRaises(AssertionError, mock.bar.lower.assert_called_with)
+
+
+ def test_spec_inheritance_for_classes(self):
+ class Foo(object):
+ def a(self):
+ pass
+ class Bar(object):
+ def f(self):
+ pass
+
+ class_mock = create_autospec(Foo)
+
+ self.assertIsNot(class_mock, class_mock())
+
+ for this_mock in class_mock, class_mock():
+ this_mock.a()
+ this_mock.a.assert_called_with()
+ self.assertRaises(TypeError, this_mock.a, 'foo')
+ self.assertRaises(AttributeError, getattr, this_mock, 'b')
+
+ instance_mock = create_autospec(Foo())
+ instance_mock.a()
+ instance_mock.a.assert_called_with()
+ self.assertRaises(TypeError, instance_mock.a, 'foo')
+ self.assertRaises(AttributeError, getattr, instance_mock, 'b')
+
+ # The return value isn't isn't callable
+ self.assertRaises(TypeError, instance_mock)
+
+ instance_mock.Bar.f()
+ instance_mock.Bar.f.assert_called_with()
+ self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g')
+
+ instance_mock.Bar().f()
+ instance_mock.Bar().f.assert_called_with()
+ self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g')
+
+
+ def test_inherit(self):
+ class Foo(object):
+ a = 3
+
+ Foo.Foo = Foo
+
+ # class
+ mock = create_autospec(Foo)
+ instance = mock()
+ self.assertRaises(AttributeError, getattr, instance, 'b')
+
+ attr_instance = mock.Foo()
+ self.assertRaises(AttributeError, getattr, attr_instance, 'b')
+
+ # instance
+ mock = create_autospec(Foo())
+ self.assertRaises(AttributeError, getattr, mock, 'b')
+ self.assertRaises(TypeError, mock)
+
+ # attribute instance
+ call_result = mock.Foo()
+ self.assertRaises(AttributeError, getattr, call_result, 'b')
+
+
+ def test_builtins(self):
+ # used to fail with infinite recursion
+ create_autospec(1)
+
+ create_autospec(int)
+ create_autospec('foo')
+ create_autospec(str)
+ create_autospec({})
+ create_autospec(dict)
+ create_autospec([])
+ create_autospec(list)
+ create_autospec(set())
+ create_autospec(set)
+ create_autospec(1.0)
+ create_autospec(float)
+ create_autospec(1j)
+ create_autospec(complex)
+ create_autospec(False)
+ create_autospec(True)
+
+
+ def test_function(self):
+ def f(a, b):
+ pass
+
+ mock = create_autospec(f)
+ self.assertRaises(TypeError, mock)
+ mock(1, 2)
+ mock.assert_called_with(1, 2)
+
+ f.f = f
+ mock = create_autospec(f)
+ self.assertRaises(TypeError, mock.f)
+ mock.f(3, 4)
+ mock.f.assert_called_with(3, 4)
+
+
+ def test_signature_class(self):
+ class Foo(object):
+ def __init__(self, a, b=3):
+ pass
+
+ mock = create_autospec(Foo)
+
+ self.assertRaises(TypeError, mock)
+ mock(1)
+ mock.assert_called_once_with(1)
+
+ mock(4, 5)
+ mock.assert_called_with(4, 5)
+
+
+ def test_class_with_no_init(self):
+ # this used to raise an exception
+ # due to trying to get a signature from object.__init__
+ class Foo(object):
+ pass
+ create_autospec(Foo)
+
+
+ def test_signature_callable(self):
+ class Callable(object):
+ def __init__(self):
+ pass
+ def __call__(self, a):
+ pass
+
+ mock = create_autospec(Callable)
+ mock()
+ mock.assert_called_once_with()
+ self.assertRaises(TypeError, mock, 'a')
+
+ instance = mock()
+ self.assertRaises(TypeError, instance)
+ instance(a='a')
+ instance.assert_called_once_with(a='a')
+ instance('a')
+ instance.assert_called_with('a')
+
+ mock = create_autospec(Callable())
+ mock(a='a')
+ mock.assert_called_once_with(a='a')
+ self.assertRaises(TypeError, mock)
+ mock('a')
+ mock.assert_called_with('a')
+
+
+ def test_signature_noncallable(self):
+ class NonCallable(object):
+ def __init__(self):
+ pass
+
+ mock = create_autospec(NonCallable)
+ instance = mock()
+ mock.assert_called_once_with()
+ self.assertRaises(TypeError, mock, 'a')
+ self.assertRaises(TypeError, instance)
+ self.assertRaises(TypeError, instance, 'a')
+
+ mock = create_autospec(NonCallable())
+ self.assertRaises(TypeError, mock)
+ self.assertRaises(TypeError, mock, 'a')
+
+
+ def test_create_autospec_none(self):
+ class Foo(object):
+ bar = None
+
+ mock = create_autospec(Foo)
+ none = mock.bar
+ self.assertNotIsInstance(none, type(None))
+
+ none.foo()
+ none.foo.assert_called_once_with()
+
+
+ def test_autospec_functions_with_self_in_odd_place(self):
+ class Foo(object):
+ def f(a, self):
+ pass
+
+ a = create_autospec(Foo)
+ a.f(self=10)
+ a.f.assert_called_with(self=10)
+
+
+ def test_autospec_property(self):
+ class Foo(object):
+ @property
+ def foo(self):
+ return 3
+
+ foo = create_autospec(Foo)
+ mock_property = foo.foo
+
+ # no spec on properties
+ self.assertTrue(isinstance(mock_property, MagicMock))
+ mock_property(1, 2, 3)
+ mock_property.abc(4, 5, 6)
+ mock_property.assert_called_once_with(1, 2, 3)
+ mock_property.abc.assert_called_once_with(4, 5, 6)
+
+
+ def test_autospec_slots(self):
+ class Foo(object):
+ __slots__ = ['a']
+
+ foo = create_autospec(Foo)
+ mock_slot = foo.a
+
+ # no spec on slots
+ mock_slot(1, 2, 3)
+ mock_slot.abc(4, 5, 6)
+ mock_slot.assert_called_once_with(1, 2, 3)
+ mock_slot.abc.assert_called_once_with(4, 5, 6)
+
+
+class TestCallList(unittest.TestCase):
+
+ def test_args_list_contains_call_list(self):
+ mock = Mock()
+ self.assertIsInstance(mock.call_args_list, _CallList)
+
+ mock(1, 2)
+ mock(a=3)
+ mock(3, 4)
+ mock(b=6)
+
+ for kall in call(1, 2), call(a=3), call(3, 4), call(b=6):
+ self.assertTrue(kall in mock.call_args_list)
+
+ calls = [call(a=3), call(3, 4)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(1, 2), call(a=3)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(3, 4), call(b=6)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(3, 4)]
+ self.assertTrue(calls in mock.call_args_list)
+
+ self.assertFalse(call('fish') in mock.call_args_list)
+ self.assertFalse([call('fish')] in mock.call_args_list)
+
+
+ def test_call_list_str(self):
+ mock = Mock()
+ mock(1, 2)
+ mock.foo(a=3)
+ mock.foo.bar().baz('fish', cat='dog')
+
+ expected = (
+ "[call(1, 2),\n"
+ " call.foo(a=3),\n"
+ " call.foo.bar(),\n"
+ " call.foo.bar().baz('fish', cat='dog')]"
+ )
+ self.assertEqual(str(mock.mock_calls), expected)
+
+
+ def test_propertymock(self):
+ p = patch('%s.SomeClass.one' % __name__, new_callable=PropertyMock)
+ mock = p.start()
+ try:
+ SomeClass.one
+ mock.assert_called_once_with()
+
+ s = SomeClass()
+ s.one
+ mock.assert_called_with()
+ self.assertEqual(mock.mock_calls, [call(), call()])
+
+ s.one = 3
+ self.assertEqual(mock.mock_calls, [call(), call(), call(3)])
+ finally:
+ p.stop()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Lib/unittest/test/testmock/testmagicmethods.py b/Lib/unittest/test/testmock/testmagicmethods.py
new file mode 100644
index 0000000..65d2711
--- /dev/null
+++ b/Lib/unittest/test/testmock/testmagicmethods.py