From 477c8d5e70240744d24631b18341ad892c8a8e1c Mon Sep 17 00:00:00 2001 From: Thomas Wouters Date: Sat, 27 May 2006 19:21:47 +0000 Subject: Much-needed merge (using svnmerge.py this time) of trunk changes into p3yk. Inherits test_gzip/test_tarfile failures on 64-bit platforms from the trunk, but I don't want the merge to hang around too long (even though the regular p3yk-contributors are/have been busy with other things.) Merged revisions 45621-46490 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r45621 | george.yoshida | 2006-04-21 18:34:17 +0200 (Fri, 21 Apr 2006) | 2 lines Correct the grammar ........ r45622 | tim.peters | 2006-04-21 18:34:54 +0200 (Fri, 21 Apr 2006) | 2 lines Whitespace normalization. ........ r45624 | thomas.heller | 2006-04-21 18:48:56 +0200 (Fri, 21 Apr 2006) | 1 line Merge in changes from ctypes 0.9.9.6 upstream version. ........ r45625 | thomas.heller | 2006-04-21 18:51:04 +0200 (Fri, 21 Apr 2006) | 1 line Merge in changes from ctypes 0.9.9.6 upstream version. ........ r45630 | thomas.heller | 2006-04-21 20:29:17 +0200 (Fri, 21 Apr 2006) | 8 lines Documentation for ctypes. I think that 'generic operating system services' is the best category. Note that the Doc/lib/libctypes.latex file is generated from reST sources. You are welcome to make typo fixes, and I'll try to keep the reST sources in sync, but markup changes would be lost - they should be fixed in the tool that creates the latex file. The conversion script is external/ctypes/docs/manual/mkpydoc.py. ........ r45631 | tim.peters | 2006-04-21 23:18:10 +0200 (Fri, 21 Apr 2006) | 24 lines SF bug #1473760 TempFile can hang on Windows. Python 2.4 changed ntpath.abspath to do an import inside the function. As a result, due to Python's import lock, anything calling abspath on Windows (directly, or indirectly like tempfile.TemporaryFile) hung when it was called from a thread spawned as a side effect of importing a module. This is a depressingly frequent problem, and deserves a more general fix. I'm settling for a micro-fix here because this specific one accounts for a report of Zope Corp's ZEO hanging on Windows, and it was an odd way to change abspath to begin with (ntpath needs a different implementation depending on whether we're actually running on Windows, and the _obvious_ way to arrange for that is not to bury a possibly-failing import _inside_ the function). Note that if/when other micro-fixes of this kind get made, the new Lib/test/threaded_import_hangers.py is a convenient place to add tests for them. ........ r45634 | phillip.eby | 2006-04-21 23:53:37 +0200 (Fri, 21 Apr 2006) | 2 lines Guido wrote contextlib, not me, but thanks anyway. ;) ........ r45636 | andrew.kuchling | 2006-04-22 03:51:41 +0200 (Sat, 22 Apr 2006) | 1 line Typo fixes ........ r45638 | andrew.kuchling | 2006-04-22 03:58:40 +0200 (Sat, 22 Apr 2006) | 1 line Fix comment typo ........ r45639 | andrew.kuchling | 2006-04-22 04:06:03 +0200 (Sat, 22 Apr 2006) | 8 lines Make copy of test_mailbox.py. We'll still want to check the backward compatibility classes in the new mailbox.py that I'll be committing in a few minutes. One change has been made: the tests use len(mbox) instead of len(mbox.boxes). The 'boxes' attribute was never documented and contains some internal state that seems unlikely to have been useful. ........ r45640 | andrew.kuchling | 2006-04-22 04:32:43 +0200 (Sat, 22 Apr 2006) | 16 lines Add Gregory K. Johnson's revised version of mailbox.py (funded by the 2005 Summer of Code). The revision adds a number of new mailbox classes that support adding and removing messages; these classes also support mailbox locking and default to using email.Message instead of rfc822.Message. The old mailbox classes are largely left alone for backward compatibility. The exception is the Maildir class, which was present in the old module and now inherits from the new classes. The Maildir class's interface is pretty simple, though, so I think it'll be compatible with existing code. (The change to the NEWS file also adds a missing word to a different news item, which unfortunately required rewrapping the line.) ........ r45641 | tim.peters | 2006-04-22 07:52:59 +0200 (Sat, 22 Apr 2006) | 2 lines Whitespace normalization. ........ r45642 | neal.norwitz | 2006-04-22 08:07:46 +0200 (Sat, 22 Apr 2006) | 1 line Add libctypes as a dep ........ r45643 | martin.v.loewis | 2006-04-22 13:15:41 +0200 (Sat, 22 Apr 2006) | 1 line Fix more ssize_t problems. ........ r45644 | martin.v.loewis | 2006-04-22 13:40:03 +0200 (Sat, 22 Apr 2006) | 1 line Fix more ssize_t issues. ........ r45645 | george.yoshida | 2006-04-22 17:10:49 +0200 (Sat, 22 Apr 2006) | 2 lines Typo fixes ........ r45647 | martin.v.loewis | 2006-04-22 17:19:54 +0200 (Sat, 22 Apr 2006) | 1 line Port to Python 2.5. Drop .DEF file. Change output file names to .pyd. ........ r45648 | george.yoshida | 2006-04-22 17:27:14 +0200 (Sat, 22 Apr 2006) | 3 lines - add versionadded tag - make arbitrary arguments come last ........ r45649 | hyeshik.chang | 2006-04-22 17:48:15 +0200 (Sat, 22 Apr 2006) | 3 lines Remove $CJKCodecs$ RCS tags. The CJKCodecs isn't maintained outside anymore. ........ r45654 | greg.ward | 2006-04-23 05:47:58 +0200 (Sun, 23 Apr 2006) | 2 lines Update optparse to Optik 1.5.1. ........ r45658 | george.yoshida | 2006-04-23 11:27:10 +0200 (Sun, 23 Apr 2006) | 2 lines wrap SyntaxError with \exception{} ........ r45660 | ronald.oussoren | 2006-04-23 13:59:25 +0200 (Sun, 23 Apr 2006) | 6 lines Patch 1471925 - Weak linking support for OSX This patch causes several symbols in the socket and posix module to be weakly linked on OSX and disables usage of ftime on OSX. These changes make it possible to use a binary build on OSX 10.4 on a 10.3 system. ........ r45661 | ronald.oussoren | 2006-04-23 14:36:23 +0200 (Sun, 23 Apr 2006) | 5 lines Patch 1471761 - test for broken poll at runtime This patch checks if poll is broken when the select module is loaded instead of doing so at configure-time. This functionality is only active on Mac OS X. ........ r45662 | nick.coghlan | 2006-04-23 17:13:32 +0200 (Sun, 23 Apr 2006) | 1 line Add a Context Types section to parallel the Iterator Types section (uses the same terminology as the 2.5a1 implementation) ........ r45663 | nick.coghlan | 2006-04-23 17:14:37 +0200 (Sun, 23 Apr 2006) | 1 line Update contextlib documentation to use the same terminology as the module implementation ........ r45664 | gerhard.haering | 2006-04-23 17:24:26 +0200 (Sun, 23 Apr 2006) | 2 lines Updated the sqlite3 module to the external pysqlite 2.2.2 version. ........ r45666 | nick.coghlan | 2006-04-23 17:39:16 +0200 (Sun, 23 Apr 2006) | 1 line Update with statement documentation to use same terminology as 2.5a1 implementation ........ r45667 | nick.coghlan | 2006-04-23 18:05:04 +0200 (Sun, 23 Apr 2006) | 1 line Add a (very) brief mention of the with statement to the end of chapter 8 ........ r45668 | nick.coghlan | 2006-04-23 18:35:19 +0200 (Sun, 23 Apr 2006) | 1 line Take 2 on mentioning the with statement, this time without inadvertently killing the Unicode examples ........ r45669 | nick.coghlan | 2006-04-23 19:04:07 +0200 (Sun, 23 Apr 2006) | 1 line Backdated NEWS entry to record the implementation of PEP 338 for alpha 1 ........ r45670 | tim.peters | 2006-04-23 20:13:45 +0200 (Sun, 23 Apr 2006) | 2 lines Whitespace normalization. ........ r45671 | skip.montanaro | 2006-04-23 21:14:27 +0200 (Sun, 23 Apr 2006) | 1 line first cut at trace module doc ........ r45672 | skip.montanaro | 2006-04-23 21:26:33 +0200 (Sun, 23 Apr 2006) | 1 line minor tweak ........ r45673 | skip.montanaro | 2006-04-23 21:30:50 +0200 (Sun, 23 Apr 2006) | 1 line it's always helpful if the example works... ........ r45674 | skip.montanaro | 2006-04-23 21:32:14 +0200 (Sun, 23 Apr 2006) | 1 line correct example ........ r45675 | andrew.kuchling | 2006-04-23 23:01:04 +0200 (Sun, 23 Apr 2006) | 1 line Edits to the PEP 343 section ........ r45676 | andrew.kuchling | 2006-04-23 23:51:10 +0200 (Sun, 23 Apr 2006) | 1 line Add two items ........ r45677 | tim.peters | 2006-04-24 04:03:16 +0200 (Mon, 24 Apr 2006) | 5 lines Bug #1337990: clarified that `doctest` does not support examples requiring both expected output and an exception. I'll backport to 2.4 next. ........ r45679 | nick.coghlan | 2006-04-24 05:04:43 +0200 (Mon, 24 Apr 2006) | 1 line Note changes made to PEP 343 related documentation ........ r45681 | nick.coghlan | 2006-04-24 06:17:02 +0200 (Mon, 24 Apr 2006) | 1 line Change PEP 343 related documentation to use the term context specifier instead of context object ........ r45682 | nick.coghlan | 2006-04-24 06:32:47 +0200 (Mon, 24 Apr 2006) | 1 line Add unit tests for the -m and -c command line switches ........ r45683 | nick.coghlan | 2006-04-24 06:37:15 +0200 (Mon, 24 Apr 2006) | 1 line Fix contextlib.nested to cope with exit methods raising and handling exceptions ........ r45685 | nick.coghlan | 2006-04-24 06:59:28 +0200 (Mon, 24 Apr 2006) | 1 line Fix broken contextlib test from last checkin (I'd've sworn I tested that before checking it in. . .) ........ r45686 | nick.coghlan | 2006-04-24 07:24:26 +0200 (Mon, 24 Apr 2006) | 1 line Back out new command line tests (broke buildbot) ........ r45687 | nick.coghlan | 2006-04-24 07:52:15 +0200 (Mon, 24 Apr 2006) | 1 line More reliable version of new command line tests that just checks the exit codes ........ r45688 | thomas.wouters | 2006-04-24 13:37:13 +0200 (Mon, 24 Apr 2006) | 4 lines Stop test_tcl's testLoadTk from leaking the Tk commands 'loadtk' registers. ........ r45690 | andrew.kuchling | 2006-04-24 16:30:47 +0200 (Mon, 24 Apr 2006) | 2 lines Edits, using the new term 'context specifier' in a few places ........ r45697 | phillip.eby | 2006-04-24 22:53:13 +0200 (Mon, 24 Apr 2006) | 2 lines Revert addition of setuptools ........ r45698 | tim.peters | 2006-04-25 00:45:13 +0200 (Tue, 25 Apr 2006) | 2 lines Whitespace normalization. ........ r45700 | trent.mick | 2006-04-25 02:34:50 +0200 (Tue, 25 Apr 2006) | 4 lines Put break at correct level so *all* root HKEYs acutally get checked for an installed VC6. Otherwise only the first such tree gets checked and this warning doesn't get displayed. ........ r45701 | tim.peters | 2006-04-25 05:31:36 +0200 (Tue, 25 Apr 2006) | 3 lines Patch #1475231: add a new SKIP doctest option, thanks to Edward Loper. ........ r45702 | neal.norwitz | 2006-04-25 07:04:35 +0200 (Tue, 25 Apr 2006) | 1 line versionadded for SKIP ........ r45703 | neal.norwitz | 2006-04-25 07:05:03 +0200 (Tue, 25 Apr 2006) | 1 line Restore Walters name ........ r45704 | neal.norwitz | 2006-04-25 07:49:42 +0200 (Tue, 25 Apr 2006) | 1 line Revert previous change, SKIP had a versionadded elsewhere ........ r45706 | nick.coghlan | 2006-04-25 12:56:51 +0200 (Tue, 25 Apr 2006) | 31 lines Move the PEP 343 documentation and implementation closer to the terminology in the alpha 1 documentation. - "context manager" reverts to its alpha 1 definition - the term "context specifier" goes away entirely - contextlib.GeneratorContextManager is renamed GeneratorContext There are still a number of changes relative to alpha 1: - the expression in the with statement is explicitly called the "context expression" in the language reference - the terms 'with statement context', 'context object' or 'with statement context' are used in several places instead of a bare 'context'. The aim of this is to avoid ambiguity in relation to the runtime context set up when the block is executed, and the context objects that already exist in various application domains (such as decimal.Context) - contextlib.contextmanager is renamed to contextfactory This best reflects the nature of the function resulting from the use of that decorator - decimal.ContextManager is renamed to WithStatementContext Simple dropping the 'Manager' part wasn't possible due to the fact that decimal.Context already exists and means something different. WithStatementContext is ugly but workable. A technically unrelated change snuck into this commit: contextlib.closing now avoids the overhead of creating a generator, since it's trivial to implement that particular context manager directly. ........ r45707 | nick.coghlan | 2006-04-25 13:05:56 +0200 (Tue, 25 Apr 2006) | 1 line Fix latex typo ........ r45708 | thomas.wouters | 2006-04-25 14:28:56 +0200 (Tue, 25 Apr 2006) | 4 lines Fix markup glitch in unittest docs. Will backport. ........ r45710 | andrew.kuchling | 2006-04-25 14:31:38 +0200 (Tue, 25 Apr 2006) | 1 line Add two items; easy_install is now off the table, though pkgutil still is ........ r45711 | andrew.kuchling | 2006-04-25 14:47:25 +0200 (Tue, 25 Apr 2006) | 1 line Rework context terminology ........ r45712 | thomas.wouters | 2006-04-25 15:53:23 +0200 (Tue, 25 Apr 2006) | 9 lines SF bug/patch #1433877: string parameter to ioctl not null terminated The new char-array used in ioctl calls wasn't explicitly NUL-terminated; quite probably the cause for the test_pty failures on Solaris that we circumvented earlier. (I wasn't able to reproduce it with this patch, but it has been somewhat elusive to start with.) ........ r45713 | george.yoshida | 2006-04-25 16:09:58 +0200 (Tue, 25 Apr 2006) | 2 lines minor tweak ........ r45714 | thomas.wouters | 2006-04-25 17:08:10 +0200 (Tue, 25 Apr 2006) | 7 lines Fix SF bug #1476111: SystemError in socket sendto. The AF_INET6 and AF_PACKET cases in getsockaddrarg were missing their own checks for tuple-ness of the address argument, which means a confusing SystemError was raised by PyArg_ParseTuple instead. ........ r45715 | thomas.wouters | 2006-04-25 17:29:46 +0200 (Tue, 25 Apr 2006) | 10 lines Define MAXPATHLEN to be at least PATH_MAX, if that's defined. Python uses MAXPATHLEN-sized buffers for various output-buffers (like to realpath()), and that's correct on BSD platforms, but not Linux (which uses PATH_MAX, and does not define MAXPATHLEN.) Cursory googling suggests Linux is following a newer standard than BSD, but in cases like this, who knows. Using the greater of PATH_MAX and 1024 as a fallback for MAXPATHLEN seems to be the most portable solution. ........ r45717 | thomas.heller | 2006-04-25 20:26:08 +0200 (Tue, 25 Apr 2006) | 3 lines Fix compiler warnings on Darwin. Patch by Brett Canon, see https://sourceforge.net/tracker/?func=detail&atid=532156&aid=1475959&group_id=71702 ........ r45718 | guido.van.rossum | 2006-04-25 22:12:45 +0200 (Tue, 25 Apr 2006) | 4 lines Implement MvL's improvement on __context__ in Condition; this can just call __context__ on the underlying lock. (The same change for Semaphore does *not* work!) ........ r45721 | tim.peters | 2006-04-26 03:15:53 +0200 (Wed, 26 Apr 2006) | 13 lines Rev 45706 renamed stuff in contextlib.py, but didn't rename uses of it in test_with.py. As a result, test_with has been skipped (due to failing imports) on all buildbot boxes since. Alas, that's not a test failure -- you have to pay attention to the 1 skip unexpected on PLATFORM: test_with kinds of output at the ends of test runs to notice that this got broken. It's likely that more renaming in test_with.py would be desirable. ........ r45722 | fred.drake | 2006-04-26 07:15:41 +0200 (Wed, 26 Apr 2006) | 1 line markup fixes, cleanup ........ r45723 | fred.drake | 2006-04-26 07:19:39 +0200 (Wed, 26 Apr 2006) | 1 line minor adjustment suggested by Peter Gephardt ........ r45724 | neal.norwitz | 2006-04-26 07:34:03 +0200 (Wed, 26 Apr 2006) | 10 lines Patch from Aldo Cortesi (OpenBSD buildbot owner). After the patch (45590) to add extra debug stats to the gc module, Python was crashing on OpenBSD due to: Fatal Python error: Interpreter not initialized (version mismatch?) This seems to occur due to calling collect() when initialized (in pythonrun.c) is set to 0. Now, the import will occur in the init function which shouldn't suffer this problem. ........ r45725 | neal.norwitz | 2006-04-26 08:26:12 +0200 (Wed, 26 Apr 2006) | 3 lines Fix this test on Solaris. There can be embedded \r, so don't just replace the one at the end. ........ r45727 | nick.coghlan | 2006-04-26 13:50:04 +0200 (Wed, 26 Apr 2006) | 1 line Fix an error in the last contextlib.closing example ........ r45728 | andrew.kuchling | 2006-04-26 14:21:06 +0200 (Wed, 26 Apr 2006) | 1 line [Bug #1475080] Fix example ........ r45729 | andrew.kuchling | 2006-04-26 14:23:39 +0200 (Wed, 26 Apr 2006) | 1 line Add labels to all sections ........ r45730 | thomas.wouters | 2006-04-26 17:53:30 +0200 (Wed, 26 Apr 2006) | 7 lines The result of SF patch #1471578: big-memory tests for strings, lists and tuples. Lots to be added, still, but this will give big-memory people something to play with in 2.5 alpha 2, and hopefully get more people to write these tests. ........ r45731 | tim.peters | 2006-04-26 19:11:16 +0200 (Wed, 26 Apr 2006) | 2 lines Whitespace normalization. ........ r45732 | martin.v.loewis | 2006-04-26 19:19:44 +0200 (Wed, 26 Apr 2006) | 1 line Use GS- and bufferoverlowU.lib where appropriate, for AMD64. ........ r45733 | thomas.wouters | 2006-04-26 20:46:01 +0200 (Wed, 26 Apr 2006) | 5 lines Add tests for += and *= on strings, and fix the memory-use estimate for the list.extend tests (they were estimating half the actual use.) ........ r45734 | thomas.wouters | 2006-04-26 21:14:46 +0200 (Wed, 26 Apr 2006) | 5 lines Some more test-size-estimate fixes: test_append and test_insert trigger a list resize, which overallocates. ........ r45735 | hyeshik.chang | 2006-04-26 21:20:26 +0200 (Wed, 26 Apr 2006) | 3 lines Fix build on MIPS for libffi. I haven't tested this yet because I don't have an access on MIPS machines. Will be tested by buildbot. :) ........ r45737 | fred.drake | 2006-04-27 01:40:32 +0200 (Thu, 27 Apr 2006) | 1 line one more place to use the current Python version ........ r45738 | fred.drake | 2006-04-27 02:02:24 +0200 (Thu, 27 Apr 2006) | 3 lines - update version numbers in file names again, until we have a better way - elaborate instructions for Cygwin support (closes SF #839709) ........ r45739 | fred.drake | 2006-04-27 02:20:14 +0200 (Thu, 27 Apr 2006) | 1 line add missing word ........ r45740 | anthony.baxter | 2006-04-27 04:11:24 +0200 (Thu, 27 Apr 2006) | 2 lines 2.5a2 ........ r45741 | anthony.baxter | 2006-04-27 04:13:13 +0200 (Thu, 27 Apr 2006) | 1 line 2.5a2 ........ r45749 | andrew.kuchling | 2006-04-27 14:22:37 +0200 (Thu, 27 Apr 2006) | 1 line Now that 2.5a2 is out, revert to the current date ........ r45750 | andrew.kuchling | 2006-04-27 14:23:07 +0200 (Thu, 27 Apr 2006) | 1 line Bump document version ........ r45751 | andrew.kuchling | 2006-04-27 14:34:39 +0200 (Thu, 27 Apr 2006) | 6 lines [Bug #1477102] Add necessary import to example This may be a useful style question for the docs -- should examples show the necessary imports, or should it be assumed that the reader will figure it out? In the What's New, I'm not consistent but usually opt for omitting the imports. ........ r45753 | andrew.kuchling | 2006-04-27 14:38:35 +0200 (Thu, 27 Apr 2006) | 1 line [Bug #1477140] Import Error base class ........ r45754 | andrew.kuchling | 2006-04-27 14:42:54 +0200 (Thu, 27 Apr 2006) | 1 line Mention the xmlrpclib.Error base class, which is used in one of the examples ........ r45756 | george.yoshida | 2006-04-27 15:41:07 +0200 (Thu, 27 Apr 2006) | 2 lines markup fix ........ r45757 | thomas.wouters | 2006-04-27 15:46:59 +0200 (Thu, 27 Apr 2006) | 4 lines Some more size-estimate fixes, for large-list-tests. ........ r45758 | thomas.heller | 2006-04-27 17:50:42 +0200 (Thu, 27 Apr 2006) | 3 lines Rerun the libffi configuration if any of the files used for that are newer then fficonfig.py. ........ r45766 | thomas.wouters | 2006-04-28 00:37:50 +0200 (Fri, 28 Apr 2006) | 6 lines Some style fixes and size-calculation fixes. Also do the small-memory run using a prime number, rather than a convenient power-of-2-and-multiple-of-5, so incorrect testing algorithms fail more easily. ........ r45767 | thomas.wouters | 2006-04-28 00:38:32 +0200 (Fri, 28 Apr 2006) | 6 lines Do the small-memory run of big-meormy tests using a prime number, rather than a convenient power-of-2-and-multiple-of-5, so incorrect testing algorithms fail more easily. ........ r45768 | david.goodger | 2006-04-28 00:53:05 +0200 (Fri, 28 Apr 2006) | 1 line Added SVN access for Steven Bethard and Talin, for PEP updating. ........ r45770 | thomas.wouters | 2006-04-28 01:13:20 +0200 (Fri, 28 Apr 2006) | 16 lines - Add new Warning class, ImportWarning - Warn-raise ImportWarning when importing would have picked up a directory as package, if only it'd had an __init__.py. This swaps two tests (for case-ness and __init__-ness), but case-test is not really more expensive, and it's not in a speed-critical section. - Test for the new warning by importing a common non-package directory on sys.path: site-packages - In regrtest.py, silence warnings generated by the build-environment because Modules/ (which is added to sys.path for Setup-created modules) has 'zlib' and '_ctypes' directories without __init__.py's. ........ r45771 | thomas.wouters | 2006-04-28 01:41:27 +0200 (Fri, 28 Apr 2006) | 6 lines Add more ignores of ImportWarnings; these are all just potential triggers (since they won't trigger if zlib is already sucessfully imported); they were found by grepping .py files, instead of looking at warning output :) ........ r45773 | neal.norwitz | 2006-04-28 06:32:20 +0200 (Fri, 28 Apr 2006) | 1 line Add some whitespace to be more consistent. ........ r45774 | neal.norwitz | 2006-04-28 06:34:43 +0200 (Fri, 28 Apr 2006) | 5 lines Try to really fix the slow buildbots this time. Printing to stdout, doesn't mean the data was actually written. It depends on the buffering, so we need to flush. This will hopefully really fix the buildbots getting killed due to no output on the slow bots. ........ r45775 | neal.norwitz | 2006-04-28 07:28:05 +0200 (Fri, 28 Apr 2006) | 1 line Fix some warnings on Mac OS X 10.4 ........ r45776 | neal.norwitz | 2006-04-28 07:28:30 +0200 (Fri, 28 Apr 2006) | 1 line Fix a warning on alpha ........ r45777 | neal.norwitz | 2006-04-28 07:28:54 +0200 (Fri, 28 Apr 2006) | 1 line Fix a warning on ppc (debian) ........ r45778 | george.yoshida | 2006-04-28 18:09:45 +0200 (Fri, 28 Apr 2006) | 2 lines fix markup glitch ........ r45780 | georg.brandl | 2006-04-28 18:31:17 +0200 (Fri, 28 Apr 2006) | 3 lines Add SeaMonkey to the list of Mozilla browsers. ........ r45781 | georg.brandl | 2006-04-28 18:36:55 +0200 (Fri, 28 Apr 2006) | 2 lines Bug #1475009: clarify ntpath.join behavior with absolute components ........ r45783 | george.yoshida | 2006-04-28 18:40:14 +0200 (Fri, 28 Apr 2006) | 2 lines correct a dead link ........ r45785 | georg.brandl | 2006-04-28 18:54:25 +0200 (Fri, 28 Apr 2006) | 4 lines Bug #1472949: stringify IOErrors in shutil.copytree when appending them to the Error errors list. ........ r45786 | georg.brandl | 2006-04-28 18:58:52 +0200 (Fri, 28 Apr 2006) | 3 lines Bug #1478326: don't allow '/' in distutils.util.get_platform machine names since this value is used to name the build directory. ........ r45788 | thomas.heller | 2006-04-28 19:02:18 +0200 (Fri, 28 Apr 2006) | 1 line Remove a duplicated test (the same test is in test_incomplete.py). ........ r45792 | georg.brandl | 2006-04-28 21:09:24 +0200 (Fri, 28 Apr 2006) | 3 lines Bug #1478429: make datetime.datetime.fromtimestamp accept every float, possibly "rounding up" to the next whole second. ........ r45796 | george.yoshida | 2006-04-29 04:43:30 +0200 (Sat, 29 Apr 2006) | 2 lines grammar fix ........ r45800 | ronald.oussoren | 2006-04-29 13:31:35 +0200 (Sat, 29 Apr 2006) | 2 lines Patch 1471883: --enable-universalsdk on Mac OS X ........ r45801 | andrew.kuchling | 2006-04-29 13:53:15 +0200 (Sat, 29 Apr 2006) | 1 line Add item ........ r45802 | andrew.kuchling | 2006-04-29 14:10:28 +0200 (Sat, 29 Apr 2006) | 1 line Make case of 'ZIP' consistent ........ r45803 | andrew.kuchling | 2006-04-29 14:10:43 +0200 (Sat, 29 Apr 2006) | 1 line Add item ........ r45808 | martin.v.loewis | 2006-04-29 14:37:25 +0200 (Sat, 29 Apr 2006) | 3 lines Further changes for #1471883: Edit Misc/NEWS, and add expat_config.h. ........ r45809 | brett.cannon | 2006-04-29 23:29:50 +0200 (Sat, 29 Apr 2006) | 2 lines Fix docstring for contextfactory; mentioned old contextmanager name. ........ r45810 | gerhard.haering | 2006-04-30 01:12:41 +0200 (Sun, 30 Apr 2006) | 3 lines This is the start of documentation for the sqlite3 module. Please feel free to find a better place for the link to it than alongside bsddb & friends. ........ r45811 | andrew.kuchling | 2006-04-30 03:07:09 +0200 (Sun, 30 Apr 2006) | 1 line Add two items ........ r45814 | george.yoshida | 2006-04-30 05:49:56 +0200 (Sun, 30 Apr 2006) | 2 lines Use \versionchanged instead of \versionadded for new parameter support. ........ r45815 | georg.brandl | 2006-04-30 09:06:11 +0200 (Sun, 30 Apr 2006) | 2 lines Patch #1470846: fix urllib2 ProxyBasicAuthHandler. ........ r45817 | georg.brandl | 2006-04-30 10:57:35 +0200 (Sun, 30 Apr 2006) | 3 lines In stdlib, use hashlib instead of deprecated md5 and sha modules. ........ r45819 | georg.brandl | 2006-04-30 11:23:59 +0200 (Sun, 30 Apr 2006) | 3 lines Patch #1470976: don't NLST files when retrieving over FTP. ........ r45821 | georg.brandl | 2006-04-30 13:13:56 +0200 (Sun, 30 Apr 2006) | 6 lines Bug #1473625: stop cPickle making float dumps locale dependent in protocol 0. On the way, add a decorator to test_support to facilitate running single test functions in different locales with automatic cleanup. ........ r45822 | phillip.eby | 2006-04-30 17:59:26 +0200 (Sun, 30 Apr 2006) | 2 lines Fix infinite regress when inspecting or frames. ........ r45824 | georg.brandl | 2006-04-30 19:42:26 +0200 (Sun, 30 Apr 2006) | 3 lines Fix another problem in inspect: if the module for an object cannot be found, don't try to give its __dict__ to linecache. ........ r45825 | georg.brandl | 2006-04-30 20:14:54 +0200 (Sun, 30 Apr 2006) | 3 lines Patch #1472854: make the rlcompleter.Completer class usable on non- UNIX platforms. ........ r45826 | georg.brandl | 2006-04-30 21:34:19 +0200 (Sun, 30 Apr 2006) | 3 lines Patch #1479438: add \keyword markup for "with". ........ r45827 | andrew.kuchling | 2006-04-30 23:19:31 +0200 (Sun, 30 Apr 2006) | 1 line Add urllib2 HOWTO from Michael Foord ........ r45828 | andrew.kuchling | 2006-04-30 23:19:49 +0200 (Sun, 30 Apr 2006) | 1 line Add item ........ r45830 | barry.warsaw | 2006-05-01 05:03:02 +0200 (Mon, 01 May 2006) | 11 lines Port forward from 2.4 branch: Patch #1464708 from William McVey: fixed handling of nested comments in mail addresses. E.g. "Foo ((Foo Bar)) " Fixes for both rfc822.py and email package. This patch needs to be back ported to Python 2.3 for email 2.5. ........ r45832 | fred.drake | 2006-05-01 08:25:58 +0200 (Mon, 01 May 2006) | 4 lines - minor clarification in section title - markup adjustments (there is clearly much to be done in this section) ........ r45833 | martin.v.loewis | 2006-05-01 08:28:01 +0200 (Mon, 01 May 2006) | 2 lines Work around deadlock risk. Will backport. ........ r45836 | andrew.kuchling | 2006-05-01 14:45:02 +0200 (Mon, 01 May 2006) | 1 line Some ElementTree fixes: import from xml, not xmlcore; fix case of module name; mention list() instead of getchildren() ........ r45837 | gerhard.haering | 2006-05-01 17:14:48 +0200 (Mon, 01 May 2006) | 3 lines Further integration of the documentation for the sqlite3 module. There's still quite some content to move over from the pysqlite manual, but it's a start now. ........ r45838 | martin.v.loewis | 2006-05-01 17:56:03 +0200 (Mon, 01 May 2006) | 2 lines Rename uisample to text, drop all non-text tables. ........ r45839 | martin.v.loewis | 2006-05-01 18:12:44 +0200 (Mon, 01 May 2006) | 2 lines Add msilib documentation. ........ r45840 | martin.v.loewis | 2006-05-01 18:14:16 +0200 (Mon, 01 May 2006) | 4 lines Rename parameters to match the documentation (which in turn matches Microsoft's documentation). Drop unused parameter in CAB.append. ........ r45841 | fred.drake | 2006-05-01 18:28:54 +0200 (Mon, 01 May 2006) | 1 line add dependency ........ r45842 | andrew.kuchling | 2006-05-01 18:30:25 +0200 (Mon, 01 May 2006) | 1 line Markup fixes; add some XXX comments noting problems ........ r45843 | andrew.kuchling | 2006-05-01 18:32:49 +0200 (Mon, 01 May 2006) | 1 line Add item ........ r45844 | andrew.kuchling | 2006-05-01 19:06:54 +0200 (Mon, 01 May 2006) | 1 line Markup fixes ........ r45850 | neal.norwitz | 2006-05-02 06:43:14 +0200 (Tue, 02 May 2006) | 3 lines SF #1479181: split open() and file() from being aliases for each other. ........ r45852 | neal.norwitz | 2006-05-02 08:23:22 +0200 (Tue, 02 May 2006) | 1 line Try to fix breakage caused by patch #1479181, r45850 ........ r45853 | fred.drake | 2006-05-02 08:53:59 +0200 (Tue, 02 May 2006) | 3 lines SF #1479988: add methods to allow access to weakrefs for the weakref.WeakKeyDictionary and weakref.WeakValueDictionary ........ r45854 | neal.norwitz | 2006-05-02 09:27:47 +0200 (Tue, 02 May 2006) | 5 lines Fix breakage from patch 1471883 (r45800 & r45808) on OSF/1. The problem was that pyconfig.h was being included before some system headers which caused redefinitions and other breakage. This moves system headers after expat_config.h which includes pyconfig.h. ........ r45855 | vinay.sajip | 2006-05-02 10:35:36 +0200 (Tue, 02 May 2006) | 1 line Replaced my dumb way of calculating seconds to midnight with Tim Peters' much more sensible suggestion. What was I thinking ?!? ........ r45856 | andrew.kuchling | 2006-05-02 13:30:03 +0200 (Tue, 02 May 2006) | 1 line Provide encoding as keyword argument; soften warning paragraph about encodings ........ r45858 | guido.van.rossum | 2006-05-02 19:36:09 +0200 (Tue, 02 May 2006) | 2 lines Fix the formatting of KeyboardInterrupt -- a bad issubclass() call. ........ r45862 | guido.van.rossum | 2006-05-02 21:47:52 +0200 (Tue, 02 May 2006) | 7 lines Get rid of __context__, per the latest changes to PEP 343 and python-dev discussion. There are two places of documentation that still mention __context__: Doc/lib/libstdtypes.tex -- I wasn't quite sure how to rewrite that without spending a whole lot of time thinking about it; and whatsnew, which Andrew usually likes to change himself. ........ r45863 | armin.rigo | 2006-05-02 21:52:32 +0200 (Tue, 02 May 2006) | 4 lines Documentation bug: PySet_Pop() returns a new reference (because the caller becomes the owner of that reference). ........ r45864 | guido.van.rossum | 2006-05-02 22:47:36 +0200 (Tue, 02 May 2006) | 4 lines Hopefully this will fix the spurious failures of test_mailbox.py that I'm experiencing. (This code and mailbox.py itself are full of calls to file() that should be calls to open() -- but I'm not fixing those.) ........ r45865 | andrew.kuchling | 2006-05-02 23:44:33 +0200 (Tue, 02 May 2006) | 1 line Use open() instead of file() ........ r45866 | andrew.kuchling | 2006-05-03 00:47:49 +0200 (Wed, 03 May 2006) | 1 line Update context manager section for removal of __context__ ........ r45867 | fred.drake | 2006-05-03 03:46:52 +0200 (Wed, 03 May 2006) | 1 line remove unnecessary assignment ........ r45868 | fred.drake | 2006-05-03 03:48:24 +0200 (Wed, 03 May 2006) | 4 lines tell LaTeX2HTML to: - use UTF-8 output - not mess with the >>> prompt! ........ r45869 | fred.drake | 2006-05-03 04:04:40 +0200 (Wed, 03 May 2006) | 3 lines avoid ugly markup based on the unfortunate conversions of ">>" and "<<" to guillemets; no need for magic here ........ r45870 | fred.drake | 2006-05-03 04:12:47 +0200 (Wed, 03 May 2006) | 1 line at least comment on why curly-quotes are not enabled ........ r45871 | fred.drake | 2006-05-03 04:27:40 +0200 (Wed, 03 May 2006) | 1 line one more place to avoid extra markup ........ r45872 | fred.drake | 2006-05-03 04:29:09 +0200 (Wed, 03 May 2006) | 1 line one more place to avoid extra markup (how many will there be?) ........ r45873 | fred.drake | 2006-05-03 04:29:39 +0200 (Wed, 03 May 2006) | 1 line fix up whitespace in prompt strings ........ r45876 | tim.peters | 2006-05-03 06:46:14 +0200 (Wed, 03 May 2006) | 2 lines Whitespace normalization. ........ r45877 | martin.v.loewis | 2006-05-03 06:52:04 +0200 (Wed, 03 May 2006) | 2 lines Correct some formulations, fix XXX comments. ........ r45879 | georg.brandl | 2006-05-03 07:05:02 +0200 (Wed, 03 May 2006) | 2 lines Patch #1480067: don't redirect HTTP digest auth in urllib2 ........ r45881 | georg.brandl | 2006-05-03 07:15:10 +0200 (Wed, 03 May 2006) | 3 lines Move network tests from test_urllib2 to test_urllib2net. ........ r45887 | nick.coghlan | 2006-05-03 15:02:47 +0200 (Wed, 03 May 2006) | 1 line Finish bringing SVN into line with latest version of PEP 343 by getting rid of all remaining references to context objects that I could find. Without a __context__() method context objects no longer exist. Also get test_with working again, and adopt a suggestion from Neal for decimal.Context.get_manager() ........ r45888 | nick.coghlan | 2006-05-03 15:17:49 +0200 (Wed, 03 May 2006) | 1 line Get rid of a couple more context object references, fix some markup and clarify what happens when a generator context function swallows an exception. ........ r45889 | georg.brandl | 2006-05-03 19:46:13 +0200 (Wed, 03 May 2006) | 3 lines Add seamonkey to list of Windows browsers too. ........ r45890 | georg.brandl | 2006-05-03 20:03:22 +0200 (Wed, 03 May 2006) | 3 lines RFE #1472176: In httplib, don't encode the netloc and hostname with "idna" if not necessary. ........ r45891 | georg.brandl | 2006-05-03 20:12:33 +0200 (Wed, 03 May 2006) | 2 lines Bug #1472191: convert breakpoint indices to ints before comparing them to ints ........ r45893 | georg.brandl | 2006-05-03 20:18:32 +0200 (Wed, 03 May 2006) | 3 lines Bug #1385040: don't allow "def foo(a=1, b): pass" in the compiler package. ........ r45894 | thomas.heller | 2006-05-03 20:35:39 +0200 (Wed, 03 May 2006) | 1 line Don't fail the tests when libglut.so or libgle.so cannot be loaded. ........ r45895 | georg.brandl | 2006-05-04 07:08:10 +0200 (Thu, 04 May 2006) | 2 lines Bug #1481530: allow "from os.path import ..." with imputil ........ r45897 | martin.v.loewis | 2006-05-04 07:51:03 +0200 (Thu, 04 May 2006) | 2 lines Patch #1475845: Raise IndentationError for unexpected indent. ........ r45898 | martin.v.loewis | 2006-05-04 12:08:42 +0200 (Thu, 04 May 2006) | 1 line Implement os.{chdir,rename,rmdir,remove} using Win32 directly. ........ r45899 | martin.v.loewis | 2006-05-04 14:04:27 +0200 (Thu, 04 May 2006) | 2 lines Drop now-unnecessary arguments to posix_2str. ........ r45900 | martin.v.loewis | 2006-05-04 16:27:52 +0200 (Thu, 04 May 2006) | 1 line Update checks to consider Windows error numbers. ........ r45913 | thomas.heller | 2006-05-05 20:42:14 +0200 (Fri, 05 May 2006) | 2 lines Export the 'free' standard C function for use in the test suite. ........ r45914 | thomas.heller | 2006-05-05 20:43:24 +0200 (Fri, 05 May 2006) | 3 lines Fix memory leaks in the ctypes test suite, reported by valgrind, by free()ing the memory we allocate. ........ r45915 | thomas.heller | 2006-05-05 20:46:27 +0200 (Fri, 05 May 2006) | 1 line oops - the function is exported as 'my_free', not 'free'. ........ r45916 | thomas.heller | 2006-05-05 21:14:24 +0200 (Fri, 05 May 2006) | 2 lines Clean up. ........ r45920 | george.yoshida | 2006-05-06 15:09:45 +0200 (Sat, 06 May 2006) | 2 lines describe optional arguments for DocFileSuite ........ r45924 | george.yoshida | 2006-05-06 16:16:51 +0200 (Sat, 06 May 2006) | 2 lines Use \versionchanged for the feature change ........ r45925 | martin.v.loewis | 2006-05-06 18:32:54 +0200 (Sat, 06 May 2006) | 1 line Port access, chmod, parts of getcwdu, mkdir, and utime to direct Win32 API. ........ r45926 | martin.v.loewis | 2006-05-06 22:04:08 +0200 (Sat, 06 May 2006) | 2 lines Handle ERROR_ALREADY_EXISTS. ........ r45931 | andrew.kuchling | 2006-05-07 19:12:12 +0200 (Sun, 07 May 2006) | 1 line [Patch #1479977] Revised version of urllib2 HOWTO, edited by John J. Lee ........ r45932 | andrew.kuchling | 2006-05-07 19:14:53 +0200 (Sun, 07 May 2006) | 1 line Minor language edit ........ r45934 | georg.brandl | 2006-05-07 22:44:34 +0200 (Sun, 07 May 2006) | 3 lines Patch #1483395: add new TLDs to cookielib ........ r45936 | martin.v.loewis | 2006-05-08 07:25:56 +0200 (Mon, 08 May 2006) | 2 lines Add missing PyMem_Free. ........ r45938 | georg.brandl | 2006-05-08 19:28:47 +0200 (Mon, 08 May 2006) | 3 lines Add test for rev. 45934. ........ r45939 | georg.brandl | 2006-05-08 19:36:08 +0200 (Mon, 08 May 2006) | 3 lines Patch #1479302: Make urllib2 digest auth and basic auth play together. ........ r45940 | georg.brandl | 2006-05-08 19:48:01 +0200 (Mon, 08 May 2006) | 3 lines Patch #1478993: take advantage of BaseException/Exception split in cookielib ........ r45941 | neal.norwitz | 2006-05-09 07:38:56 +0200 (Tue, 09 May 2006) | 5 lines Micro optimization. In the first case, we know that frame->f_exc_type is NULL, so there's no reason to do anything with it. In the second case, we know frame->f_exc_type is not NULL, so we can just do an INCREF. ........ r45943 | thomas.heller | 2006-05-09 22:20:15 +0200 (Tue, 09 May 2006) | 2 lines Disable a test that is unreliable. ........ r45944 | tim.peters | 2006-05-10 04:43:01 +0200 (Wed, 10 May 2006) | 4 lines Variant of patch #1478292. doctest.register_optionflag(name) shouldn't create a new flag when `name` is already the name of an option flag. ........ r45947 | neal.norwitz | 2006-05-10 08:57:58 +0200 (Wed, 10 May 2006) | 14 lines Fix problems found by Coverity. longobject.c: also fix an ssize_t problem could have been NULL, so hoist the size calc to not use . _ssl.c: under fail: self is DECREF'd, but it would have been NULL. _elementtree.c: delete self if there was an error. _csv.c: I'm not sure if lineterminator could have been anything other than a string. However, other string method calls are checked, so check this one too. ........ r45948 | thomas.wouters | 2006-05-10 17:04:11 +0200 (Wed, 10 May 2006) | 4 lines Ignore reflog.txt, too. ........ r45949 | georg.brandl | 2006-05-10 17:59:06 +0200 (Wed, 10 May 2006) | 3 lines Bug #1482988: indicate more prominently that the Stats class is in the pstats module. ........ r45950 | georg.brandl | 2006-05-10 18:09:03 +0200 (Wed, 10 May 2006) | 2 lines Bug #1485447: subprocess: document that the "cwd" parameter isn't used to find the executable. Misc. other markup fixes. ........ r45952 | georg.brandl | 2006-05-10 18:11:44 +0200 (Wed, 10 May 2006) | 2 lines Bug #1484978: curses.panel: clarify that Panel objects are destroyed on garbage collection. ........ r45954 | georg.brandl | 2006-05-10 18:26:03 +0200 (Wed, 10 May 2006) | 4 lines Patch #1484695: Update the tarfile module to version 0.8. This fixes a couple of issues, notably handling of long file names using the GNU LONGNAME extension. ........ r45955 | georg.brandl | 2006-05-10 19:13:20 +0200 (Wed, 10 May 2006) | 4 lines Patch #721464: pdb.Pdb instances can now be given explicit stdin and stdout arguments, making it possible to redirect input and output for remote debugging. ........ r45956 | andrew.kuchling | 2006-05-10 19:19:04 +0200 (Wed, 10 May 2006) | 1 line Clarify description of exception handling ........ r45957 | georg.brandl | 2006-05-10 22:09:23 +0200 (Wed, 10 May 2006) | 2 lines Fix two small errors in argument lists. ........ r45960 | brett.cannon | 2006-05-11 07:11:33 +0200 (Thu, 11 May 2006) | 5 lines Detect if %zd is supported by printf() during configure and sets PY_FORMAT_SIZE_T appropriately. Removes warnings on OS X under gcc 4.0.1 when PY_FORMAT_SIZE_T is set to "" instead of "z" as is needed. ........ r45963 | neal.norwitz | 2006-05-11 09:51:59 +0200 (Thu, 11 May 2006) | 1 line Don't mask a no memory error with a less meaningful one as discussed on python-checkins ........ r45964 | martin.v.loewis | 2006-05-11 15:28:43 +0200 (Thu, 11 May 2006) | 3 lines Change WindowsError to carry the Win32 error code in winerror, and the DOS error code in errno. Revert changes where WindowsError catch blocks unnecessarily special-case OSError. ........ r45965 | george.yoshida | 2006-05-11 17:53:27 +0200 (Thu, 11 May 2006) | 2 lines Grammar fix ........ r45967 | andrew.kuchling | 2006-05-11 18:32:24 +0200 (Thu, 11 May 2006) | 1 line typo fix ........ r45968 | tim.peters | 2006-05-11 18:37:42 +0200 (Thu, 11 May 2006) | 5 lines BaseThreadedTestCase.setup(): stop special-casing WindowsError. Rev 45964 fiddled with WindowsError, and broke test_bsddb3 on all the Windows buildbot slaves as a result. This should repair it. ........ r45969 | georg.brandl | 2006-05-11 21:57:09 +0200 (Thu, 11 May 2006) | 2 lines Typo fix. ........ r45970 | tim.peters | 2006-05-12 03:57:59 +0200 (Fri, 12 May 2006) | 5 lines SF patch #1473132: Improve docs for tp_clear and tp_traverse, by Collin Winter. Bugfix candidate (but I'm not going to bother). ........ r45974 | martin.v.loewis | 2006-05-12 14:27:28 +0200 (Fri, 12 May 2006) | 4 lines Dynamically allocate path name buffer for Unicode path name in listdir. Fixes #1431582. Stop overallocating MAX_PATH characters for ANSI path names. Stop assigning to errno. ........ r45975 | martin.v.loewis | 2006-05-12 15:57:36 +0200 (Fri, 12 May 2006) | 1 line Move icon files into DLLs dir. Fixes #1477968. ........ r45976 | george.yoshida | 2006-05-12 18:40:11 +0200 (Fri, 12 May 2006) | 2 lines At first there were 6 steps, but one was removed after that. ........ r45977 | martin.v.loewis | 2006-05-12 19:22:04 +0200 (Fri, 12 May 2006) | 1 line Fix alignment error on Itanium. ........ r45978 | george.yoshida | 2006-05-12 19:25:26 +0200 (Fri, 12 May 2006) | 3 lines Duplicated description about the illegal continue usage can be found in nearly the same place. They are same, so keep the original one and remove the later-added one. ........ r45980 | thomas.heller | 2006-05-12 20:16:03 +0200 (Fri, 12 May 2006) | 2 lines Add missing svn properties. ........ r45981 | thomas.heller | 2006-05-12 20:47:35 +0200 (Fri, 12 May 2006) | 1 line set svn properties ........ r45982 | thomas.heller | 2006-05-12 21:31:46 +0200 (Fri, 12 May 2006) | 1 line add svn:eol-style native svn:keywords Id ........ r45987 | gerhard.haering | 2006-05-13 01:49:49 +0200 (Sat, 13 May 2006) | 3 lines Integrated the rest of the pysqlite reference manual into the Python documentation. Ready to be reviewed and improved upon. ........ r45988 | george.yoshida | 2006-05-13 08:53:31 +0200 (Sat, 13 May 2006) | 2 lines Add \exception markup ........ r45990 | martin.v.loewis | 2006-05-13 15:34:04 +0200 (Sat, 13 May 2006) | 2 lines Revert 43315: Printing of %zd must be signed. ........ r45992 | tim.peters | 2006-05-14 01:28:20 +0200 (Sun, 14 May 2006) | 11 lines Teach PyString_FromFormat, PyErr_Format, and PyString_FromFormatV about "%u", "%lu" and "%zu" formats. Since PyString_FromFormat and PyErr_Format have exactly the same rules (both inherited from PyString_FromFormatV), it would be good if someone with more LaTeX Fu changed one of them to just point to the other. Their docs were way out of synch before this patch, and I just did a mass copy+paste to repair that. Not a backport candidate (this is a new feature). ........ r45993 | tim.peters | 2006-05-14 01:31:05 +0200 (Sun, 14 May 2006) | 2 lines Typo repair. ........ r45994 | tim.peters | 2006-05-14 01:33:19 +0200 (Sun, 14 May 2006) | 2 lines Remove lie in new comment. ........ r45995 | ronald.oussoren | 2006-05-14 21:56:34 +0200 (Sun, 14 May 2006) | 11 lines Rework the build system for osx applications: * Don't use xcodebuild for building PythonLauncher, but use a normal unix makefile. This makes it a lot easier to use the same build flags as for the rest of python (e.g. make a universal version of python launcher) * Convert the mac makefile-s to makefile.in-s and use configure to set makefile variables instead of forwarding them as command-line arguments * Add a C version of pythonw, that we you can use '#!/usr/local/bin/pythonw' * Build IDLE.app using bundlebuilder instead of BuildApplet, that will allow easier modification of the bundle contents later on. ........ r45996 | ronald.oussoren | 2006-05-14 22:35:41 +0200 (Sun, 14 May 2006) | 6 lines A first cut at replacing the icons on MacOS X. This replaces all icons by icons based on the new python.org logo. These are also the first icons that are "proper" OSX icons. These icons were created by Jacob Rus. ........ r45997 | ronald.oussoren | 2006-05-14 23:07:41 +0200 (Sun, 14 May 2006) | 3 lines I missed one small detail in my rewrite of the osx build files: the path to the Python.app template. ........ r45998 | martin.v.loewis | 2006-05-15 07:51:36 +0200 (Mon, 15 May 2006) | 2 lines Fix memory leak. ........ r45999 | neal.norwitz | 2006-05-15 08:48:14 +0200 (Mon, 15 May 2006) | 1 line Move items implemented after a2 into the new a3 section ........ r46000 | neal.norwitz | 2006-05-15 09:04:36 +0200 (Mon, 15 May 2006) | 5 lines - Bug #1487966: Fix SystemError with conditional expression in assignment Most of the test_syntax changes are just updating the numbers. ........ r46001 | neal.norwitz | 2006-05-15 09:17:23 +0200 (Mon, 15 May 2006) | 1 line Patch #1488312, Fix memory alignment problem on SPARC in unicode. Will backport ........ r46003 | martin.v.loewis | 2006-05-15 11:22:27 +0200 (Mon, 15 May 2006) | 3 lines Remove bogus DECREF of self. Change __str__() functions to METH_O. Change WindowsError__str__ to use PyTuple_Pack. ........ r46005 | georg.brandl | 2006-05-15 21:30:35 +0200 (Mon, 15 May 2006) | 3 lines [ 1488881 ] tarfile.py: support for file-objects and bz2 (cp. #1488634) ........ r46007 | tim.peters | 2006-05-15 22:44:10 +0200 (Mon, 15 May 2006) | 9 lines ReadDetectFileobjTest: repair Windows disasters by opening the file object in binary mode. The Windows buildbot slaves shouldn't swap themselves to death anymore. However, test_tarfile may still fail because of a temp directory left behind from a previous failing run. Windows buildbot owners may need to remove that directory by hand. ........ r46009 | tim.peters | 2006-05-15 23:32:25 +0200 (Mon, 15 May 2006) | 3 lines test_directory(): Remove the leftover temp directory that's making the Windows buildbots fail test_tarfile. ........ r46010 | martin.v.loewis | 2006-05-16 09:05:37 +0200 (Tue, 16 May 2006) | 4 lines - Test for sys/statvfs.h before including it, as statvfs is present on some OSX installation, but its header file is not. Will backport to 2.4 ........ r46012 | georg.brandl | 2006-05-16 09:38:27 +0200 (Tue, 16 May 2006) | 3 lines Patch #1435422: zlib's compress and decompress objects now have a copy() method. ........ r46015 | andrew.kuchling | 2006-05-16 18:11:54 +0200 (Tue, 16 May 2006) | 1 line Add item ........ r46016 | andrew.kuchling | 2006-05-16 18:27:31 +0200 (Tue, 16 May 2006) | 3 lines PEP 243 has been withdrawn, so don't refer to it any more. The PyPI upload material has been moved into the section on PEP314. ........ r46017 | george.yoshida | 2006-05-16 19:42:16 +0200 (Tue, 16 May 2006) | 2 lines Update for 'ImportWarning' ........ r46018 | george.yoshida | 2006-05-16 20:07:00 +0200 (Tue, 16 May 2006) | 4 lines Mention that Exception is now a subclass of BaseException. Remove a sentence that says that BaseException inherits from BaseException. (I guess this is just a copy & paste mistake.) ........ r46019 | george.yoshida | 2006-05-16 20:26:10 +0200 (Tue, 16 May 2006) | 2 lines Document ImportWarning ........ r46020 | tim.peters | 2006-05-17 01:22:20 +0200 (Wed, 17 May 2006) | 2 lines Whitespace normalization. ........ r46021 | tim.peters | 2006-05-17 01:24:08 +0200 (Wed, 17 May 2006) | 2 lines Text files missing the SVN eol-style property. ........ r46022 | tim.peters | 2006-05-17 03:30:11 +0200 (Wed, 17 May 2006) | 2 lines PyZlib_copy(), PyZlib_uncopy(): Repair leaks on the normal-case path. ........ r46023 | georg.brandl | 2006-05-17 16:06:07 +0200 (Wed, 17 May 2006) | 3 lines Remove misleading comment about type-class unification. ........ r46024 | georg.brandl | 2006-05-17 16:11:36 +0200 (Wed, 17 May 2006) | 3 lines Apply patch #1489784 from Michael Foord. ........ r46025 | georg.brandl | 2006-05-17 16:18:20 +0200 (Wed, 17 May 2006) | 3 lines Fix typo in os.utime docstring (patch #1490189) ........ r46026 | georg.brandl | 2006-05-17 16:26:50 +0200 (Wed, 17 May 2006) | 3 lines Patch #1490224: set time.altzone correctly on Cygwin. ........ r46027 | georg.brandl | 2006-05-17 16:45:06 +0200 (Wed, 17 May 2006) | 4 lines Add global debug flag to cookielib to avoid heavy dependency on the logging module. Resolves #1484758. ........ r46028 | georg.brandl | 2006-05-17 16:56:04 +0200 (Wed, 17 May 2006) | 3 lines Patch #1486962: Several bugs in the turtle Tk demo module were fixed and several features added, such as speed and geometry control. ........ r46029 | georg.brandl | 2006-05-17 17:17:00 +0200 (Wed, 17 May 2006) | 4 lines Delay-import some large modules to speed up urllib2 import. (fixes #1484793). ........ r46030 | georg.brandl | 2006-05-17 17:51:16 +0200 (Wed, 17 May 2006) | 3 lines Patch #1180296: improve locale string formatting functions ........ r46032 | tim.peters | 2006-05-18 04:06:40 +0200 (Thu, 18 May 2006) | 2 lines Whitespace normalization. ........ r46033 | georg.brandl | 2006-05-18 08:11:19 +0200 (Thu, 18 May 2006) | 3 lines Amendments to patch #1484695. ........ r46034 | georg.brandl | 2006-05-18 08:18:06 +0200 (Thu, 18 May 2006) | 3 lines Remove unused import. ........ r46035 | georg.brandl | 2006-05-18 08:33:27 +0200 (Thu, 18 May 2006) | 3 lines Fix test_locale for platforms without a default thousands separator. ........ r46036 | neal.norwitz | 2006-05-18 08:51:46 +0200 (Thu, 18 May 2006) | 1 line Little cleanup ........ r46037 | georg.brandl | 2006-05-18 09:01:27 +0200 (Thu, 18 May 2006) | 4 lines Bug #1462152: file() now checks more thoroughly for invalid mode strings and removes a possible "U" before passing the mode to the C library function. ........ r46038 | georg.brandl | 2006-05-18 09:20:05 +0200 (Thu, 18 May 2006) | 3 lines Bug #1490688: properly document %e, %f, %g format subtleties. ........ r46039 | vinay.sajip | 2006-05-18 09:28:58 +0200 (Thu, 18 May 2006) | 1 line Changed status from "beta" to "production"; since logging has been part of the stdlib since 2.3, it should be safe to make this assertion ;-) ........ r46040 | ronald.oussoren | 2006-05-18 11:04:15 +0200 (Thu, 18 May 2006) | 2 lines Fix some minor issues with the generated application bundles on MacOSX ........ r46041 | andrew.kuchling | 2006-05-19 02:03:55 +0200 (Fri, 19 May 2006) | 1 line Typo fix; add clarifying word ........ r46044 | neal.norwitz | 2006-05-19 08:31:23 +0200 (Fri, 19 May 2006) | 3 lines Fix #132 from Coverity, retval could have been derefed if a continue inside a try failed. ........ r46045 | neal.norwitz | 2006-05-19 08:43:50 +0200 (Fri, 19 May 2006) | 2 lines Fix #1474677, non-keyword argument following keyword. ........ r46046 | neal.norwitz | 2006-05-19 09:00:58 +0200 (Fri, 19 May 2006) | 4 lines Bug/Patch #1481770: Use .so extension for shared libraries on HP-UX for ia64. I suppose this could be backported if anyone cares. ........ r46047 | neal.norwitz | 2006-05-19 09:05:01 +0200 (Fri, 19 May 2006) | 7 lines Oops, I forgot to include this file in the last commit (46046): Bug/Patch #1481770: Use .so extension for shared libraries on HP-UX for ia64. I suppose this could be backported if anyone cares. ........ r46050 | ronald.oussoren | 2006-05-19 20:17:31 +0200 (Fri, 19 May 2006) | 6 lines * Change working directory to the users home directory, that makes the file open/save dialogs more useable. * Don't use argv emulator, its not needed for idle. ........ r46052 | tim.peters | 2006-05-19 21:16:34 +0200 (Fri, 19 May 2006) | 2 lines Whitespace normalization. ........ r46054 | ronald.oussoren | 2006-05-20 08:17:01 +0200 (Sat, 20 May 2006) | 9 lines Fix bug #1000914 (again). This patches a file that is generated by bgen, however the code is now the same as a current copy of bgen would generate. Without this patch most types in the Carbon.CF module are unusable. I haven't managed to coax bgen into generating a complete copy of _CFmodule.c yet :-(, hence the manual patching. ........ r46055 | george.yoshida | 2006-05-20 17:36:19 +0200 (Sat, 20 May 2006) | 3 lines - markup fix - add clarifying words ........ r46057 | george.yoshida | 2006-05-20 18:29:14 +0200 (Sat, 20 May 2006) | 3 lines - Add 'as' and 'with' as new keywords in 2.5. - Regenerate keyword lists with reswords.py. ........ r46058 | george.yoshida | 2006-05-20 20:07:26 +0200 (Sat, 20 May 2006) | 2 lines Apply patch #1492147 from Mike Foord. ........ r46059 | andrew.kuchling | 2006-05-20 21:25:16 +0200 (Sat, 20 May 2006) | 1 line Minor edits ........ r46061 | george.yoshida | 2006-05-21 06:22:59 +0200 (Sun, 21 May 2006) | 2 lines Fix the TeX compile error. ........ r46062 | george.yoshida | 2006-05-21 06:40:32 +0200 (Sun, 21 May 2006) | 2 lines Apply patch #1492255 from Mike Foord. ........ r46063 | martin.v.loewis | 2006-05-22 10:48:14 +0200 (Mon, 22 May 2006) | 1 line Patch 1490384: New Icons for the PC build. ........ r46064 | martin.v.loewis | 2006-05-22 11:15:18 +0200 (Mon, 22 May 2006) | 1 line Patch #1492356: Port to Windows CE (patch set 1). ........ r46065 | tim.peters | 2006-05-22 13:29:41 +0200 (Mon, 22 May 2006) | 4 lines Define SIZEOF_{DOUBLE,FLOAT} on Windows. Else Michael Hudson's nice gimmicks for IEEE special values (infinities, NaNs) don't work. ........ r46070 | bob.ippolito | 2006-05-22 16:31:24 +0200 (Mon, 22 May 2006) | 2 lines GzipFile.readline performance improvement (~30-40%), patch #1281707 ........ r46071 | bob.ippolito | 2006-05-22 17:22:46 +0200 (Mon, 22 May 2006) | 1 line Revert gzip readline performance patch #1281707 until a more generic performance improvement can be found ........ r46073 | fredrik.lundh | 2006-05-22 17:35:12 +0200 (Mon, 22 May 2006) | 4 lines docstring tweaks: count counts non-overlapping substrings, not total number of occurences ........ r46075 | bob.ippolito | 2006-05-22 17:59:12 +0200 (Mon, 22 May 2006) | 1 line Apply revised patch for GzipFile.readline performance #1281707 ........ r46076 | fredrik.lundh | 2006-05-22 18:29:30 +0200 (Mon, 22 May 2006) | 3 lines needforspeed: speed up unicode repeat, unicode string copy ........ r46079 | fredrik.lundh | 2006-05-22 19:12:58 +0200 (Mon, 22 May 2006) | 4 lines needforspeed: use memcpy for "long" strings; use a better algorithm for long repeats. ........ r46084 | tim.peters | 2006-05-22 21:17:04 +0200 (Mon, 22 May 2006) | 7 lines PyUnicode_Join(): Recent code changes introduced new compiler warnings on Windows (signed vs unsigned mismatch in comparisons). Cleaned that up by switching more locals to Py_ssize_t. Simplified overflow checking (it can _be_ simpler because while these things are declared as Py_ssize_t, then should in fact never be negative). ........ r46085 | tim.peters | 2006-05-23 07:47:16 +0200 (Tue, 23 May 2006) | 3 lines unicode_repeat(): Change type of local to Py_ssize_t, since that's what it should be. ........ r46094 | fredrik.lundh | 2006-05-23 12:10:57 +0200 (Tue, 23 May 2006) | 3 lines needforspeed: check first *and* last character before doing a full memcmp ........ r46095 | fredrik.lundh | 2006-05-23 12:12:21 +0200 (Tue, 23 May 2006) | 4 lines needforspeed: fixed unicode "in" operator to use same implementation approach as find/index ........ r46096 | richard.jones | 2006-05-23 12:37:38 +0200 (Tue, 23 May 2006) | 7 lines Merge from rjones-funccall branch. Applied patch zombie-frames-2.diff from sf patch 876206 with updates for Python 2.5 and also modified to retain the free_list to avoid the 67% slow-down in pybench recursion test. 5% speed up in function call pybench. ........ r46098 | ronald.oussoren | 2006-05-23 13:04:24 +0200 (Tue, 23 May 2006) | 2 lines Avoid creating a mess when installing a framework for the second time. ........ r46101 | georg.brandl | 2006-05-23 13:17:21 +0200 (Tue, 23 May 2006) | 3 lines PyErr_NewException now accepts a tuple of base classes as its "base" parameter. ........ r46103 | ronald.oussoren | 2006-05-23 13:47:16 +0200 (Tue, 23 May 2006) | 3 lines Disable linking extensions with -lpython2.5 for darwin. This should fix bug #1487105. ........ r46104 | ronald.oussoren | 2006-05-23 14:01:11 +0200 (Tue, 23 May 2006) | 6 lines Patch #1488098. This patchs makes it possible to create a universal build on OSX 10.4 and use the result to build extensions on 10.3. It also makes it possible to override the '-arch' and '-isysroot' compiler arguments for specific extensions. ........ r46108 | andrew.kuchling | 2006-05-23 14:44:36 +0200 (Tue, 23 May 2006) | 1 line Add some items; mention the sprint ........ r46109 | andrew.kuchling | 2006-05-23 14:47:01 +0200 (Tue, 23 May 2006) | 1 line Mention string improvements ........ r46110 | andrew.kuchling | 2006-05-23 14:49:35 +0200 (Tue, 23 May 2006) | 4 lines Use 'speed' instead of 'performance', because I agree with the argument at http://zestyping.livejournal.com/193260.html that 'erformance' really means something more general. ........ r46113 | ronald.oussoren | 2006-05-23 17:09:57 +0200 (Tue, 23 May 2006) | 2 lines An improved script for building the binary distribution on MacOSX. ........ r46128 | richard.jones | 2006-05-23 20:28:17 +0200 (Tue, 23 May 2006) | 3 lines Applied patch 1337051 by Neal Norwitz, saving 4 ints on frame objects. ........ r46129 | richard.jones | 2006-05-23 20:32:11 +0200 (Tue, 23 May 2006) | 1 line fix broken merge ........ r46130 | bob.ippolito | 2006-05-23 20:41:17 +0200 (Tue, 23 May 2006) | 1 line Update Misc/NEWS for gzip patch #1281707 ........ r46131 | bob.ippolito | 2006-05-23 20:43:47 +0200 (Tue, 23 May 2006) | 1 line Update Misc/NEWS for gzip patch #1281707 ........ r46132 | fredrik.lundh | 2006-05-23 20:44:25 +0200 (Tue, 23 May 2006) | 7 lines needforspeed: use append+reverse for rsplit, use "bloom filters" to speed up splitlines and strip with charsets; etc. rsplit is now as fast as split in all our tests (reverse takes no time at all), and splitlines() is nearly as fast as a plain split("\n") in our tests. and we're not done yet... ;-) ........ r46133 | tim.peters | 2006-05-23 20:45:30 +0200 (Tue, 23 May 2006) | 38 lines Bug #1334662 / patch #1335972: int(string, base) wrong answers. In rare cases of strings specifying true values near sys.maxint, and oddball bases (not decimal or a power of 2), int(string, base) could deliver insane answers. This repairs all such problems, and also speeds string->int significantly. On my box, here are % speedups for decimal strings of various lengths: length speedup ------ ------- 1 12.4% 2 15.7% 3 20.6% 4 28.1% 5 33.2% 6 37.5% 7 41.9% 8 46.3% 9 51.2% 10 19.5% 11 19.9% 12 23.9% 13 23.7% 14 23.3% 15 24.9% 16 25.3% 17 28.3% 18 27.9% 19 35.7% Note that the difference between 9 and 10 is the difference between short and long Python ints on a 32-bit box. The patch doesn't actually do anything to speed conversion to long: the speedup is due to detecting "unsigned long" overflow more quickly. This is a bugfix candidate, but it's a non-trivial patch and it would be painful to separate the "bug fix" from the "speed up" parts. ........ r46134 | bob.ippolito | 2006-05-23 20:46:41 +0200 (Tue, 23 May 2006) | 1 line Patch #1493701: performance enhancements for struct module. ........ r46136 | andrew.kuchling | 2006-05-23 21:00:45 +0200 (Tue, 23 May 2006) | 1 line Remove duplicate item ........ r46141 | bob.ippolito | 2006-05-23 21:09:51 +0200 (Tue, 23 May 2006) | 1 line revert #1493701 ........ r46142 | bob.ippolito | 2006-05-23 21:11:34 +0200 (Tue, 23 May 2006) | 1 line patch #1493701: performance enhancements for struct module ........ r46144 | bob.ippolito | 2006-05-23 21:12:41 +0200 (Tue, 23 May 2006) | 1 line patch #1493701: performance enhancements for struct module ........ r46148 | bob.ippolito | 2006-05-23 21:25:52 +0200 (Tue, 23 May 2006) | 1 line fix linking issue, warnings, in struct ........ r46149 | andrew.kuchling | 2006-05-23 21:29:38 +0200 (Tue, 23 May 2006) | 1 line Add two items ........ r46150 | bob.ippolito | 2006-05-23 21:31:23 +0200 (Tue, 23 May 2006) | 1 line forward declaration for PyStructType ........ r46151 | bob.ippolito | 2006-05-23 21:32:25 +0200 (Tue, 23 May 2006) | 1 line fix typo in _struct ........ r46152 | andrew.kuchling | 2006-05-23 21:32:35 +0200 (Tue, 23 May 2006) | 1 line Add item ........ r46153 | tim.peters | 2006-05-23 21:34:37 +0200 (Tue, 23 May 2006) | 3 lines Get the Windows build working again (recover from `struct` module changes). ........ r46155 | fredrik.lundh | 2006-05-23 21:47:35 +0200 (Tue, 23 May 2006) | 3 lines return 0 on misses, not -1. ........ r46156 | tim.peters | 2006-05-23 23:51:35 +0200 (Tue, 23 May 2006) | 4 lines test_struct grew weird behavior under regrtest.py -R, due to a module-level cache. Clearing the cache should make it stop showing up in refleak reports. ........ r46157 | tim.peters | 2006-05-23 23:54:23 +0200 (Tue, 23 May 2006) | 2 lines Whitespace normalization. ........ r46158 | tim.peters | 2006-05-23 23:55:53 +0200 (Tue, 23 May 2006) | 2 lines Add missing svn:eol-style property to text files. ........ r46161 | fredrik.lundh | 2006-05-24 12:20:36 +0200 (Wed, 24 May 2006) | 3 lines use Py_ssize_t for string indexes (thanks, neal!) ........ r46173 | fredrik.lundh | 2006-05-24 16:28:11 +0200 (Wed, 24 May 2006) | 14 lines needforspeed: use "fastsearch" for count and findstring helpers. this results in a 2.5x speedup on the stringbench count tests, and a 20x (!) speedup on the stringbench search/find/contains test, compared to 2.5a2. for more on the algorithm, see: http://effbot.org/zone/stringlib.htm if you get weird results, you can disable the new algoritm by undefining USE_FAST in Objects/unicodeobject.c. enjoy /F ........ r46182 | fredrik.lundh | 2006-05-24 17:11:01 +0200 (Wed, 24 May 2006) | 3 lines needforspeedindeed: use fastsearch also for __contains__ ........ r46184 | bob.ippolito | 2006-05-24 17:32:06 +0200 (Wed, 24 May 2006) | 1 line refactor unpack, add unpack_from ........ r46189 | fredrik.lundh | 2006-05-24 18:35:18 +0200 (Wed, 24 May 2006) | 4 lines needforspeed: refactored the replace code slightly; special-case constant-length changes; use fastsearch to locate the first match. ........ r46198 | andrew.dalke | 2006-05-24 20:55:37 +0200 (Wed, 24 May 2006) | 10 lines Added a slew of test for string replace, based various corner cases from the Need For Speed sprint coding. Includes commented out overflow tests which will be uncommented once the code is fixed. This test will break the 8-bit string tests because "".replace("", "A") == "" when it should == "A" We have a fix for it, which should be added tomorrow. ........ r46200 | tim.peters | 2006-05-24 22:27:18 +0200 (Wed, 24 May 2006) | 2 lines We can't leave the checked-in tests broken. ........ r46201 | tim.peters | 2006-05-24 22:29:44 +0200 (Wed, 24 May 2006) | 2 lines Whitespace normalization. ........ r46202 | tim.peters | 2006-05-24 23:00:45 +0200 (Wed, 24 May 2006) | 4 lines Disable the damn empty-string replace test -- it can't be make to pass now for unicode if it passes for str, or vice versa. ........ r46203 | tim.peters | 2006-05-24 23:10:40 +0200 (Wed, 24 May 2006) | 58 lines Heavily fiddled variant of patch #1442927: PyLong_FromString optimization. ``long(str, base)`` is now up to 6x faster for non-power-of-2 bases. The largest speedup is for inputs with about 1000 decimal digits. Conversion from non-power-of-2 bases remains quadratic-time in the number of input digits (it was and remains linear-time for bases 2, 4, 8, 16 and 32). Speedups at various lengths for decimal inputs, comparing 2.4.3 with current trunk. Note that it's actually a bit slower for 1-digit strings: len speedup ---- ------- 1 -4.5% 2 4.6% 3 8.3% 4 12.7% 5 16.9% 6 28.6% 7 35.5% 8 44.3% 9 46.6% 10 55.3% 11 65.7% 12 77.7% 13 73.4% 14 75.3% 15 85.2% 16 103.0% 17 95.1% 18 112.8% 19 117.9% 20 128.3% 30 174.5% 40 209.3% 50 236.3% 60 254.3% 70 262.9% 80 295.8% 90 297.3% 100 324.5% 200 374.6% 300 403.1% 400 391.1% 500 388.7% 600 440.6% 700 468.7% 800 498.0% 900 507.2% 1000 501.2% 2000 450.2% 3000 463.2% 4000 452.5% 5000 440.6% 6000 439.6% 7000 424.8% 8000 418.1% 9000 417.7% ........ r46204 | andrew.kuchling | 2006-05-25 02:23:03 +0200 (Thu, 25 May 2006) | 1 line Minor edits; add an item ........ r46205 | fred.drake | 2006-05-25 04:42:25 +0200 (Thu, 25 May 2006) | 3 lines fix broken links in PDF (SF patch #1281291, contributed by Rory Yorke) ........ r46208 | walter.doerwald | 2006-05-25 10:53:28 +0200 (Thu, 25 May 2006) | 2 lines Replace tab inside comment with space. ........ r46209 | thomas.wouters | 2006-05-25 13:25:51 +0200 (Thu, 25 May 2006) | 4 lines Fix #1488915, Multiple dots in relative import statement raise SyntaxError. ........ r46210 | thomas.wouters | 2006-05-25 13:26:25 +0200 (Thu, 25 May 2006) | 5 lines Update graminit.c for the fix for #1488915, Multiple dots in relative import statement raise SyntaxError, and add testcase. ........ r46211 | andrew.kuchling | 2006-05-25 14:27:59 +0200 (Thu, 25 May 2006) | 1 line Add entry; and fix a typo ........ r46214 | fredrik.lundh | 2006-05-25 17:22:03 +0200 (Thu, 25 May 2006) | 7 lines needforspeed: speed up upper and lower for 8-bit string objects. (the unicode versions of these are still 2x faster on windows, though...) based on work by Andrew Dalke, with tweaks by yours truly. ........ r46216 | fredrik.lundh | 2006-05-25 17:49:45 +0200 (Thu, 25 May 2006) | 5 lines needforspeed: make new upper/lower work properly for single-character strings too... (thanks to georg brandl for spotting the exact problem faster than anyone else) ........ r46217 | kristjan.jonsson | 2006-05-25 17:53:30 +0200 (Thu, 25 May 2006) | 1 line Added a new macro, Py_IS_FINITE(X). On windows there is an intrinsic for this and it is more efficient than to use !Py_IS_INFINITE(X) && !Py_IS_NAN(X). No change on other platforms ........ r46219 | fredrik.lundh | 2006-05-25 18:10:12 +0200 (Thu, 25 May 2006) | 4 lines needforspeed: _toupper/_tolower is a SUSv2 thing; fall back on ISO C versions if they're not defined. ........ r46220 | andrew.kuchling | 2006-05-25 18:23:15 +0200 (Thu, 25 May 2006) | 1 line Fix comment typos ........ r46221 | andrew.dalke | 2006-05-25 18:30:52 +0200 (Thu, 25 May 2006) | 2 lines Added tests for implementation error we came up with in the need for speed sprint. ........ r46222 | andrew.kuchling | 2006-05-25 18:34:54 +0200 (Thu, 25 May 2006) | 1 line Fix another typo ........ r46223 | kristjan.jonsson | 2006-05-25 18:39:27 +0200 (Thu, 25 May 2006) | 1 line Fix incorrect documentation for the Py_IS_FINITE(X) macro. ........ r46224 | fredrik.lundh | 2006-05-25 18:46:54 +0200 (Thu, 25 May 2006) | 3 lines needforspeed: check for overflow in replace (from Andrew Dalke) ........ r46226 | fredrik.lundh | 2006-05-25 19:08:14 +0200 (Thu, 25 May 2006) | 5 lines needforspeed: new replace implementation by Andrew Dalke. replace is now about 3x faster on my machine, for the replace tests from string- bench. ........ r46227 | tim.peters | 2006-05-25 19:34:03 +0200 (Thu, 25 May 2006) | 5 lines A new table to help string->integer conversion was added yesterday to both mystrtoul.c and longobject.c. Share the table instead. Also cut its size by 64 entries (they had been used for an inscrutable trick originally, but the code no longer tries to use that trick). ........ r46229 | andrew.dalke | 2006-05-25 19:53:00 +0200 (Thu, 25 May 2006) | 11 lines Fixed problem identified by Georg. The special-case in-place code for replace made a copy of the string using PyString_FromStringAndSize(s, n) and modify the copied string in-place. However, 1 (and 0) character strings are shared from a cache. This cause "A".replace("A", "a") to change the cached version of "A" -- used by everyone. Now may the copy with NULL as the string and do the memcpy manually. I've added regression tests to check if this happens in the future. Perhaps there should be a PyString_Copy for this case? ........ r46230 | fredrik.lundh | 2006-05-25 19:55:31 +0200 (Thu, 25 May 2006) | 4 lines needforspeed: use "fastsearch" for count. this results in a 3x speedup for the related stringbench tests. ........ r46231 | andrew.dalke | 2006-05-25 20:03:25 +0200 (Thu, 25 May 2006) | 4 lines Code had returned an ssize_t, upcast to long, then converted with PyInt_FromLong. Now using PyInt_FromSsize_t. ........ r46233 | andrew.kuchling | 2006-05-25 20:11:16 +0200 (Thu, 25 May 2006) | 1 line Comment typo ........ r46234 | andrew.dalke | 2006-05-25 20:18:39 +0200 (Thu, 25 May 2006) | 4 lines Added overflow test for adding two (very) large strings where the new string is over max Py_ssize_t. I have no way to test it on my box or any box I have access to. At least it doesn't break anything. ........ r46235 | bob.ippolito | 2006-05-25 20:20:23 +0200 (Thu, 25 May 2006) | 1 line Faster path for PyLong_FromLongLong, using PyLong_FromLong algorithm ........ r46238 | georg.brandl | 2006-05-25 20:44:09 +0200 (Thu, 25 May 2006) | 3 lines Guard the _active.remove() call to avoid errors when there is no _active list. ........ r46239 | fredrik.lundh | 2006-05-25 20:44:29 +0200 (Thu, 25 May 2006) | 4 lines needforspeed: use fastsearch also for find/index and contains. the related tests are now about 10x faster. ........ r46240 | bob.ippolito | 2006-05-25 20:44:50 +0200 (Thu, 25 May 2006) | 1 line Struct now unpacks to PY_LONG_LONG directly when possible, also include #ifdef'ed out code that will return int instead of long when in bounds (not active since it's an API and doc change) ........ r46241 | jack.diederich | 2006-05-25 20:47:15 +0200 (Thu, 25 May 2006) | 1 line * eliminate warning by reverting tmp_s type to 'const char*' ........ r46242 | bob.ippolito | 2006-05-25 21:03:19 +0200 (Thu, 25 May 2006) | 1 line Fix Cygwin compiler issue ........ r46243 | bob.ippolito | 2006-05-25 21:15:27 +0200 (Thu, 25 May 2006) | 1 line fix a struct regression where long would be returned for short unsigned integers ........ r46244 | georg.brandl | 2006-05-25 21:15:31 +0200 (Thu, 25 May 2006) | 4 lines Replace PyObject_CallFunction calls with only object args with PyObject_CallFunctionObjArgs, which is 30% faster. ........ r46245 | fredrik.lundh | 2006-05-25 21:19:05 +0200 (Thu, 25 May 2006) | 3 lines needforspeed: use insert+reverse instead of append ........ r46246 | bob.ippolito | 2006-05-25 21:33:38 +0200 (Thu, 25 May 2006) | 1 line Use LONG_MIN and LONG_MAX to check Python integer bounds instead of the incorrect INT_MIN and INT_MAX ........ r46248 | bob.ippolito | 2006-05-25 21:56:56 +0200 (Thu, 25 May 2006) | 1 line Use faster struct pack/unpack functions for the endian table that matches the host's ........ r46249 | bob.ippolito | 2006-05-25 21:59:56 +0200 (Thu, 25 May 2006) | 1 line enable darwin/x86 support for libffi and hence ctypes (doesn't yet support --enable-universalsdk) ........ r46252 | georg.brandl | 2006-05-25 22:28:10 +0200 (Thu, 25 May 2006) | 4 lines Someone seems to just have copy-pasted the docs of tp_compare to tp_richcompare ;) ........ r46253 | brett.cannon | 2006-05-25 22:44:08 +0200 (Thu, 25 May 2006) | 2 lines Swap out bare malloc()/free() use for PyMem_MALLOC()/PyMem_FREE() . ........ r46254 | bob.ippolito | 2006-05-25 22:52:38 +0200 (Thu, 25 May 2006) | 1 line squelch gcc4 darwin/x86 compiler warnings ........ r46255 | bob.ippolito | 2006-05-25 23:09:45 +0200 (Thu, 25 May 2006) | 1 line fix test_float regression and 64-bit size mismatch issue ........ r46256 | georg.brandl | 2006-05-25 23:11:56 +0200 (Thu, 25 May 2006) | 3 lines Add a x-ref to newer calling APIs. ........ r46257 | ronald.oussoren | 2006-05-25 23:30:54 +0200 (Thu, 25 May 2006) | 2 lines Fix minor typo in prep_cif.c ........ r46259 | brett.cannon | 2006-05-25 23:33:11 +0200 (Thu, 25 May 2006) | 4 lines Change test_values so that it compares the lowercasing of group names since getgrall() can return all lowercase names while getgrgid() returns proper casing. Discovered on Ubuntu 5.04 (custom). ........ r46261 | tim.peters | 2006-05-25 23:50:17 +0200 (Thu, 25 May 2006) | 7 lines Some Win64 pre-release in 2000 didn't support QueryPerformanceCounter(), but we believe Win64 does support it now. So use in time.clock(). It would be peachy if someone with a Win64 box tried this ;-) ........ r46262 | tim.peters | 2006-05-25 23:52:19 +0200 (Thu, 25 May 2006) | 2 lines Whitespace normalization. ........ r46263 | bob.ippolito | 2006-05-25 23:58:05 +0200 (Thu, 25 May 2006) | 1 line Add missing files from x86 darwin ctypes patch ........ r46264 | brett.cannon | 2006-05-26 00:00:14 +0200 (Fri, 26 May 2006) | 2 lines Move over to use of METH_O and METH_NOARGS. ........ r46265 | tim.peters | 2006-05-26 00:25:25 +0200 (Fri, 26 May 2006) | 3 lines Repair idiot typo, and complete the job of trying to use the Windows time.clock() implementation on Win64. ........ r46266 | tim.peters | 2006-05-26 00:28:46 +0200 (Fri, 26 May 2006) | 9 lines Patch #1494387: SVN longobject.c compiler warnings The SIGCHECK macro defined here has always been bizarre, but it apparently causes compiler warnings on "Sun Studio 11". I believe the warnings are bogus, but it doesn't hurt to make the macro definition saner. Bugfix candidate (but I'm not going to bother). ........ r46268 | fredrik.lundh | 2006-05-26 01:27:53 +0200 (Fri, 26 May 2006) | 8 lines needforspeed: partition for 8-bit strings. for some simple tests, this is on par with a corresponding find, and nearly twice as fast as split(sep, 1) full tests, a unicode version, and documentation will follow to- morrow. ........ r46271 | andrew.kuchling | 2006-05-26 03:46:22 +0200 (Fri, 26 May 2006) | 1 line Add Soc student ........ r46272 | ronald.oussoren | 2006-05-26 10:41:25 +0200 (Fri, 26 May 2006) | 3 lines Without this patch OSX users couldn't add new help sources because the code tried to update one item in a tuple. ........ r46273 | fredrik.lundh | 2006-05-26 10:54:28 +0200 (Fri, 26 May 2006) | 5 lines needforspeed: partition implementation, part two. feel free to improve the documentation and the docstrings. ........ r46274 | georg.brandl | 2006-05-26 11:05:54 +0200 (Fri, 26 May 2006) | 3 lines Clarify docs for str.partition(). ........ r46278 | fredrik.lundh | 2006-05-26 11:46:59 +0200 (Fri, 26 May 2006) | 5 lines needforspeed: use METH_O for argument handling, which made partition some ~15% faster for the current tests (which is noticable faster than a corre- sponding find call). thanks to neal-who-never-sleeps for the tip. ........ r46280 | fredrik.lundh | 2006-05-26 12:27:17 +0200 (Fri, 26 May 2006) | 5 lines needforspeed: use Py_ssize_t for the fastsearch counter and skip length (thanks, neal!). and yes, I've verified that this doesn't slow things down ;-) ........ r46285 | andrew.dalke | 2006-05-26 13:11:38 +0200 (Fri, 26 May 2006) | 2 lines Added a few more test cases for whitespace split. These strings have leading whitespace. ........ r46286 | jack.diederich | 2006-05-26 13:15:17 +0200 (Fri, 26 May 2006) | 1 line use Py_ssize_t in places that may need it ........ r46287 | andrew.dalke | 2006-05-26 13:15:22 +0200 (Fri, 26 May 2006) | 2 lines Added split whitespace checks for characters other than space. ........ r46288 | ronald.oussoren | 2006-05-26 13:17:55 +0200 (Fri, 26 May 2006) | 2 lines Fix buglet in postinstall script, it would generate an invalid .cshrc file. ........ r46290 | georg.brandl | 2006-05-26 13:26:11 +0200 (Fri, 26 May 2006) | 3 lines Add "partition" to UserString. ........ r46291 | fredrik.lundh | 2006-05-26 13:29:39 +0200 (Fri, 26 May 2006) | 5 lines needforspeed: added Py_LOCAL macro, based on the LOCAL macro used for SRE and others. applied Py_LOCAL to relevant portion of ceval, which gives a 1-2% speedup on my machine. ymmv. ........ r46292 | jack.diederich | 2006-05-26 13:37:20 +0200 (Fri, 26 May 2006) | 1 line when generating python code prefer to generate valid python code ........ r46293 | fredrik.lundh | 2006-05-26 13:38:15 +0200 (Fri, 26 May 2006) | 3 lines use Py_LOCAL also for string and unicode objects ........ r46294 | ronald.oussoren | 2006-05-26 13:38:39 +0200 (Fri, 26 May 2006) | 12 lines - Search the sqlite specific search directories after the normal include directories when looking for the version of sqlite to use. - On OSX: * Extract additional include and link directories from the CFLAGS and LDFLAGS, if the user has bothered to specify them we might as wel use them. * Add '-Wl,-search_paths_first' to the extra_link_args for readline and sqlite. This makes it possible to use a static library to override the system provided dynamic library. ........ r46295 | ronald.oussoren | 2006-05-26 13:43:26 +0200 (Fri, 26 May 2006) | 6 lines Integrate installing a framework in the 'make install' target. Until now users had to use 'make frameworkinstall' to install python when it is configured with '--enable-framework'. This tends to confuse users that don't hunt for readme files hidden in platform specific directories :-) ........ r46297 | fredrik.lundh | 2006-05-26 13:54:04 +0200 (Fri, 26 May 2006) | 4 lines needforspeed: added PY_LOCAL_AGGRESSIVE macro to enable "aggressive" LOCAL inlining; also added some missing whitespace ........ r46298 | andrew.kuchling | 2006-05-26 14:01:44 +0200 (Fri, 26 May 2006) | 1 line Typo fixes ........ r46299 | fredrik.lundh | 2006-05-26 14:01:49 +0200 (Fri, 26 May 2006) | 4 lines Py_LOCAL shouldn't be used for data; it works for some .NET 2003 compilers, but Trent's copy thinks that it's an anachronism... ........ r46300 | martin.blais | 2006-05-26 14:03:27 +0200 (Fri, 26 May 2006) | 12 lines Support for buffer protocol for socket and struct. * Added socket.recv_buf() and socket.recvfrom_buf() methods, that use the buffer protocol (send and sendto already did). * Added struct.pack_to(), that is the corresponding buffer compatible method to unpack_from(). * Fixed minor typos in arraymodule. ........ r46302 | ronald.oussoren | 2006-05-26 14:23:20 +0200 (Fri, 26 May 2006) | 6 lines - Remove previous version of the binary distribution script for OSX - Some small bugfixes for the IDLE.app wrapper - Tweaks to build-installer to ensure that python gets build in the right way, including sqlite3. - Updated readme files ........ r46305 | tim.peters | 2006-05-26 14:26:21 +0200 (Fri, 26 May 2006) | 2 lines Whitespace normalization. ........ r46307 | andrew.dalke | 2006-05-26 14:28:15 +0200 (Fri, 26 May 2006) | 7 lines I like tests. The new split functions use a preallocated list. Added tests which exceed the preallocation size, to exercise list appends/resizes. Also added more edge case tests. ........ r46308 | andrew.dalke | 2006-05-26 14:31:00 +0200 (Fri, 26 May 2006) | 2 lines Test cases for off-by-one errors in string split with multicharacter pattern. ........ r46309 | tim.peters | 2006-05-26 14:31:20 +0200 (Fri, 26 May 2006) | 2 lines Whitespace normalization. ........ r46313 | andrew.kuchling | 2006-05-26 14:39:48 +0200 (Fri, 26 May 2006) | 1 line Add str.partition() ........ r46314 | bob.ippolito | 2006-05-26 14:52:53 +0200 (Fri, 26 May 2006) | 1 line quick hack to fix busted binhex test ........ r46316 | andrew.dalke | 2006-05-26 15:05:55 +0200 (Fri, 26 May 2006) | 2 lines Added more rstrip tests, including for prealloc'ed arrays ........ r46320 | bob.ippolito | 2006-05-26 15:15:44 +0200 (Fri, 26 May 2006) | 1 line fix #1229380 No struct.pack exception for some out of range integers ........ r46325 | tim.peters | 2006-05-26 15:39:17 +0200 (Fri, 26 May 2006) | 2 lines Use open() to open files (was using file()). ........ r46327 | andrew.dalke | 2006-05-26 16:00:45 +0200 (Fri, 26 May 2006) | 37 lines Changes to string.split/rsplit on whitespace to preallocate space in the results list. Originally it allocated 0 items and used the list growth during append. Now it preallocates 12 items so the first few appends don't need list reallocs. ("Here are some words ."*2).split(None, 1) is 7% faster ("Here are some words ."*2).split() is is 15% faster (Your milage may vary, see dealership for details.) File parsing like this for line in f: count += len(line.split()) is also about 15% faster. There is a slowdown of about 3% for large strings because of the additional overhead of checking if the append is to a preallocated region of the list or not. This will be the rare case. It could be improved with special case code but we decided it was not useful enough. There is a cost of 12*sizeof(PyObject *) bytes per list. For the normal case of file parsing this is not a problem because of the lists have a short lifetime. We have not come up with cases where this is a problem in real life. I chose 12 because human text averages about 11 words per line in books, one of my data sets averages 6.2 words with a final peak at 11 words per line, and I work with a tab delimited data set with 8 tabs per line (or 9 words per line). 12 encompasses all of these. Also changed the last rstrip code to append then reverse, rather than doing insert(0). The strip() and rstrip() times are now comparable. ........ r46328 | tim.peters | 2006-05-26 16:02:05 +0200 (Fri, 26 May 2006) | 5 lines Explicitly close files. I'm trying to stop the frequent spurious test_tarfile failures on Windows buildbots, but it's hard to know how since the regrtest failure output is useless here, and it never fails when a buildbot slave runs test_tarfile the second time in verbose mode. ........ r46329 | andrew.kuchling | 2006-05-26 16:03:41 +0200 (Fri, 26 May 2006) | 1 line Add buffer support for struct, socket ........ r46330 | andrew.kuchling | 2006-05-26 16:04:19 +0200 (Fri, 26 May 2006) | 1 line Typo fix ........ r46331 | bob.ippolito | 2006-05-26 16:07:23 +0200 (Fri, 26 May 2006) | 1 line Fix distutils so that libffi will cross-compile between darwin/x86 and darwin/ppc ........ r46333 | bob.ippolito | 2006-05-26 16:23:21 +0200 (Fri, 26 May 2006) | 1 line Fix _struct typo that broke some 64-bit platforms ........ r46335 | bob.ippolito | 2006-05-26 16:29:35 +0200 (Fri, 26 May 2006) | 1 line Enable PY_USE_INT_WHEN_POSSIBLE in struct ........ r46343 | andrew.dalke | 2006-05-26 17:21:01 +0200 (Fri, 26 May 2006) | 2 lines Eeked out another 3% or so performance in split whitespace by cleaning up the algorithm. ........ r46352 | andrew.dalke | 2006-05-26 18:22:52 +0200 (Fri, 26 May 2006) | 3 lines Test for more edge strip cases; leading and trailing separator gets removed even with strip(..., 0) ........ r46354 | bob.ippolito | 2006-05-26 18:23:28 +0200 (Fri, 26 May 2006) | 1 line fix signed/unsigned mismatch in struct ........ r46355 | steve.holden | 2006-05-26 18:27:59 +0200 (Fri, 26 May 2006) | 5 lines Add -t option to allow easy test selection. Action verbose option correctly. Tweak operation counts. Add empty and new instances tests. Enable comparisons across different warp factors. Change version. ........ r46356 | fredrik.lundh | 2006-05-26 18:32:42 +0200 (Fri, 26 May 2006) | 3 lines needforspeed: use Py_LOCAL on a few more locals in stringobject.c ........ r46357 | thomas.heller | 2006-05-26 18:42:44 +0200 (Fri, 26 May 2006) | 4 lines For now, I gave up with automatic conversion of reST to Python-latex, so I'm writing this in latex now. Skeleton for the ctypes reference. ........ r46358 | tim.peters | 2006-05-26 18:49:28 +0200 (Fri, 26 May 2006) | 3 lines Repair Windows compiler warnings about mixing signed and unsigned integral types in comparisons. ........ r46359 | tim.peters | 2006-05-26 18:52:04 +0200 (Fri, 26 May 2006) | 2 lines Whitespace normalization. ........ r46360 | tim.peters | 2006-05-26 18:53:04 +0200 (Fri, 26 May 2006) | 2 lines Add missing svn:eol-style property to text files. ........ r46362 | fredrik.lundh | 2006-05-26 19:04:58 +0200 (Fri, 26 May 2006) | 3 lines needforspeed: stringlib refactoring (in progress) ........ r46363 | thomas.heller | 2006-05-26 19:18:33 +0200 (Fri, 26 May 2006) | 1 line Write some docs. ........ r46364 | fredrik.lundh | 2006-05-26 19:22:38 +0200 (Fri, 26 May 2006) | 3 lines needforspeed: stringlib refactoring (in progress) ........ r46366 | fredrik.lundh | 2006-05-26 19:26:39 +0200 (Fri, 26 May 2006) | 3 lines needforspeed: cleanup ........ r46367 | fredrik.lundh | 2006-05-26 19:31:41 +0200 (Fri, 26 May 2006) | 4 lines needforspeed: remove remaining USE_FAST macros; if fastsearch was broken, someone would have noticed by now ;-) ........ r46368 | steve.holden | 2006-05-26 19:41:32 +0200 (Fri, 26 May 2006) | 5 lines Use minimum calibration time rather than avergae to avoid the illusion of negative run times. Halt with an error if run times go below 10 ms, indicating that results will be unreliable. ........ r46370 | thomas.heller | 2006-05-26 19:47:40 +0200 (Fri, 26 May 2006) | 2 lines Reordered, and wrote more docs. ........ r46372 | georg.brandl | 2006-05-26 20:03:31 +0200 (Fri, 26 May 2006) | 9 lines Need for speed: Patch #921466 : sys.path_importer_cache is now used to cache valid and invalid file paths for the built-in import machinery which leads to fewer open calls on startup. Also fix issue with PEP 302 style import hooks which lead to more open() calls than necessary. ........ r46373 | fredrik.lundh | 2006-05-26 20:05:34 +0200 (Fri, 26 May 2006) | 3 lines removed unnecessary include ........ r46377 | fredrik.lundh | 2006-05-26 20:15:38 +0200 (Fri, 26 May 2006) | 3 lines needforspeed: added rpartition implementation ........ r46380 | fredrik.lundh | 2006-05-26 20:24:15 +0200 (Fri, 26 May 2006) | 5 lines needspeed: rpartition documentation, tests, and a bug fixes. feel free to add more tests and improve the documentation. ........ r46381 | steve.holden | 2006-05-26 20:26:21 +0200 (Fri, 26 May 2006) | 4 lines Revert tests to MAL's original round sizes to retiain comparability from long ago and far away. Stop calling this pybench 1.4 because it isn't. Remove the empty test, which was a bad idea. ........ r46387 | andrew.kuchling | 2006-05-26 20:41:18 +0200 (Fri, 26 May 2006) | 1 line Add rpartition() and path caching ........ r46388 | andrew.dalke | 2006-05-26 21:02:09 +0200 (Fri, 26 May 2006) | 10 lines substring split now uses /F's fast string matching algorithm. (If compiled without FAST search support, changed the pre-memcmp test to check the last character as well as the first. This gave a 25% speedup for my test case.) Rewrote the split algorithms so they stop when maxsplit gets to 0. Previously they did a string match first then checked if the maxsplit was reached. The new way prevents a needless string search. ........ r46391 | brett.cannon | 2006-05-26 21:04:47 +0200 (Fri, 26 May 2006) | 2 lines Change C spacing to 4 spaces by default to match PEP 7 for new C files. ........ r46392 | georg.brandl | 2006-05-26 21:04:47 +0200 (Fri, 26 May 2006) | 3 lines Exception isn't the root of all exception classes anymore. ........ r46397 | fredrik.lundh | 2006-05-26 21:23:21 +0200 (Fri, 26 May 2006) | 3 lines added rpartition method to UserString class ........ r46398 | fredrik.lundh | 2006-05-26 21:24:53 +0200 (Fri, 26 May 2006) | 4 lines needforspeed: stringlib refactoring, continued. added count and find helpers; updated unicodeobject to use stringlib_count ........ r46400 | fredrik.lundh | 2006-05-26 21:29:05 +0200 (Fri, 26 May 2006) | 4 lines needforspeed: stringlib refactoring: use stringlib/find for unicode find ........ r46403 | fredrik.lundh | 2006-05-26 21:33:03 +0200 (Fri, 26 May 2006) | 3 lines needforspeed: use a macro to fix slice indexes ........ r46404 | thomas.heller | 2006-05-26 21:43:45 +0200 (Fri, 26 May 2006) | 1 line Write more docs. ........ r46406 | fredrik.lundh | 2006-05-26 21:48:07 +0200 (Fri, 26 May 2006) | 3 lines needforspeed: stringlib refactoring: use stringlib/find for string find ........ r46407 | andrew.kuchling | 2006-05-26 21:51:10 +0200 (Fri, 26 May 2006) | 1 line Comment typo ........ r46409 | georg.brandl | 2006-05-26 22:04:44 +0200 (Fri, 26 May 2006) | 3 lines Replace Py_BuildValue("OO") by PyTuple_Pack. ........ r46411 | georg.brandl | 2006-05-26 22:14:47 +0200 (Fri, 26 May 2006) | 2 lines Patch #1492218: document None being a constant. ........ r46415 | georg.brandl | 2006-05-26 22:22:50 +0200 (Fri, 26 May 2006) | 3 lines Simplify calling. ........ r46416 | andrew.dalke | 2006-05-26 22:25:22 +0200 (Fri, 26 May 2006) | 4 lines Added limits to the replace code so it does not count all of the matching patterns in a string, only the number needed by the max limit. ........ r46417 | bob.ippolito | 2006-05-26 22:25:23 +0200 (Fri, 26 May 2006) | 1 line enable all of the struct tests, use ssize_t, fix some whitespace ........ r46418 | tim.peters | 2006-05-26 22:56:56 +0200 (Fri, 26 May 2006) | 2 lines Record Iceland sprint attendees. ........ r46421 | tim.peters | 2006-05-26 23:51:13 +0200 (Fri, 26 May 2006) | 2 lines Whitespace normalization. ........ r46422 | steve.holden | 2006-05-27 00:17:54 +0200 (Sat, 27 May 2006) | 2 lines Add Richard Tew to developers ........ r46423 | steve.holden | 2006-05-27 00:33:20 +0200 (Sat, 27 May 2006) | 2 lines Update help text and documentaition. ........ r46424 | steve.holden | 2006-05-27 00:39:27 +0200 (Sat, 27 May 2006) | 2 lines Blasted typos ... ........ r46425 | andrew.dalke | 2006-05-27 00:49:03 +0200 (Sat, 27 May 2006) | 2 lines Added description of why splitlines doesn't use the prealloc strategy ........ r46426 | tim.peters | 2006-05-27 01:14:37 +0200 (Sat, 27 May 2006) | 19 lines Patch 1145039. set_exc_info(), reset_exc_info(): By exploiting the likely (who knows?) invariant that when an exception's `type` is NULL, its `value` and `traceback` are also NULL, save some cycles in heavily-executed code. This is a "a kronar saved is a kronar earned" patch: the speedup isn't reliably measurable, but it obviously does reduce the operation count in the normal (no exception raised) path through PyEval_EvalFrameEx(). The tim-exc_sanity branch tries to push this harder, but is still blowing up (at least in part due to pre-existing subtle bugs that appear to have no other visible consequences!). Not a bugfix candidate. ........ r46429 | steve.holden | 2006-05-27 02:51:52 +0200 (Sat, 27 May 2006) | 2 lines Reinstate new-style object tests. ........ r46430 | neal.norwitz | 2006-05-27 07:18:57 +0200 (Sat, 27 May 2006) | 1 line Fix compiler warning (and whitespace) on Mac OS 10.4. (A lot of this code looked duplicated, I wonder if a utility function could help reduce the duplication here.) ........ r46431 | neal.norwitz | 2006-05-27 07:21:30 +0200 (Sat, 27 May 2006) | 4 lines Fix Coverity warnings. - Check the correct variable (str_obj, not str) for NULL - sep_len was already verified it wasn't 0 ........ r46432 | martin.v.loewis | 2006-05-27 10:36:52 +0200 (Sat, 27 May 2006) | 2 lines Patch 1494554: Update numeric properties to Unicode 4.1. ........ r46433 | martin.v.loewis | 2006-05-27 10:54:29 +0200 (Sat, 27 May 2006) | 2 lines Explain why 'consumed' is initialized. ........ r46436 | fredrik.lundh | 2006-05-27 12:05:10 +0200 (Sat, 27 May 2006) | 3 lines needforspeed: more stringlib refactoring ........ r46438 | fredrik.lundh | 2006-05-27 12:39:48 +0200 (Sat, 27 May 2006) | 5 lines needforspeed: backed out the Py_LOCAL-isation of ceval; the massive in- lining killed performance on certain Intel boxes, and the "aggressive" macro itself gives most of the benefits on others. ........ r46439 | andrew.dalke | 2006-05-27 13:04:36 +0200 (Sat, 27 May 2006) | 2 lines fixed typo ........ r46440 | martin.v.loewis | 2006-05-27 13:07:49 +0200 (Sat, 27 May 2006) | 2 lines Revert bogus change committed in 46432 to this file. ........ r46444 | andrew.kuchling | 2006-05-27 13:26:33 +0200 (Sat, 27 May 2006) | 1 line Add Py_LOCAL macros ........ r46450 | bob.ippolito | 2006-05-27 13:47:12 +0200 (Sat, 27 May 2006) | 1 line Remove the range checking and int usage #defines from _struct and strip out the now-dead code ........ r46454 | bob.ippolito | 2006-05-27 14:11:36 +0200 (Sat, 27 May 2006) | 1 line Fix up struct docstrings, add struct.pack_to function for symmetry ........ r46456 | richard.jones | 2006-05-27 14:29:24 +0200 (Sat, 27 May 2006) | 2 lines Conversion of exceptions over from faked-up classes to new-style C types. ........ r46457 | georg.brandl | 2006-05-27 14:30:25 +0200 (Sat, 27 May 2006) | 3 lines Add news item for new-style exception class branch merge. ........ r46458 | tim.peters | 2006-05-27 14:36:53 +0200 (Sat, 27 May 2006) | 3 lines More random thrashing trying to understand spurious Windows failures. Who's keeping a bz2 file open? ........ r46460 | andrew.kuchling | 2006-05-27 15:44:37 +0200 (Sat, 27 May 2006) | 1 line Mention new-style exceptions ........ r46461 | richard.jones | 2006-05-27 15:50:42 +0200 (Sat, 27 May 2006) | 1 line credit where credit is due ........ r46462 | georg.brandl | 2006-05-27 16:02:03 +0200 (Sat, 27 May 2006) | 3 lines Always close BZ2Proxy object. Remove unnecessary struct usage. ........ r46463 | tim.peters | 2006-05-27 16:13:13 +0200 (Sat, 27 May 2006) | 2 lines The cheery optimism of old age. ........ r46464 | andrew.dalke | 2006-05-27 16:16:40 +0200 (Sat, 27 May 2006) | 2 lines cleanup - removed trailing whitespace ........ r46465 | georg.brandl | 2006-05-27 16:41:55 +0200 (Sat, 27 May 2006) | 3 lines Remove spurious semicolons after macro invocations. ........ r46468 | fredrik.lundh | 2006-05-27 16:58:20 +0200 (Sat, 27 May 2006) | 4 lines needforspeed: replace improvements, changed to Py_LOCAL_INLINE where appropriate ........ r46469 | fredrik.lundh | 2006-05-27 17:20:22 +0200 (Sat, 27 May 2006) | 4 lines needforspeed: stringlib refactoring: changed find_obj to find_slice, to enable use from stringobject ........ r46470 | fredrik.lundh | 2006-05-27 17:26:19 +0200 (Sat, 27 May 2006) | 3 lines needforspeed: stringlib refactoring: use find_slice for stringobject ........ r46472 | kristjan.jonsson | 2006-05-27 17:41:31 +0200 (Sat, 27 May 2006) | 1 line Add a PCBuild8 build directory for building with Visual Studio .NET 2005. Contains a special project to perform profile guided optimizations on the pythoncore.dll, by instrumenting and running pybench.py ........ r46473 | jack.diederich | 2006-05-27 17:44:34 +0200 (Sat, 27 May 2006) | 3 lines needforspeed: use PyObject_MALLOC instead of system malloc for small allocations. Use PyMem_MALLOC for larger (1k+) chunks. 1%-2% speedup. ........ r46474 | bob.ippolito | 2006-05-27 17:53:49 +0200 (Sat, 27 May 2006) | 1 line fix struct regression on 64-bit platforms ........ r46475 | richard.jones | 2006-05-27 18:07:28 +0200 (Sat, 27 May 2006) | 1 line doc string additions and tweaks ........ r46477 | richard.jones | 2006-05-27 18:15:11 +0200 (Sat, 27 May 2006) | 1 line move semicolons ........ r46478 | george.yoshida | 2006-05-27 18:32:44 +0200 (Sat, 27 May 2006) | 2 lines minor markup nits ........ r46488 | george.yoshida | 2006-05-27 18:51:43 +0200 (Sat, 27 May 2006) | 3 lines End of Ch.3 is now about "with statement". Avoid obsolescence by directly referring to the section. ........ r46489 | george.yoshida | 2006-05-27 19:09:17 +0200 (Sat, 27 May 2006) | 2 lines fix typo ........ --- Doc/ACKS | 1 + Doc/Makefile.deps | 6 +- Doc/api/abstract.tex | 6 +- Doc/api/concrete.tex | 44 +- Doc/api/exceptions.tex | 47 +- Doc/api/newtypes.tex | 96 +- Doc/api/refcounts.dat | 2 +- Doc/commontex/boilerplate.tex | 2 +- Doc/dist/dist.tex | 8 +- Doc/ext/windows.tex | 2 +- Doc/howto/unicode.rst | 4 +- Doc/howto/urllib2.rst | 598 ++ Doc/inst/inst.tex | 22 +- Doc/lib/lib.tex | 6 +- Doc/lib/libcodecs.tex | 6 +- Doc/lib/libcodeop.tex | 2 +- Doc/lib/libcollections.tex | 4 +- Doc/lib/libcontextlib.tex | 84 +- Doc/lib/libctypes.tex | 1226 +++ Doc/lib/libctypesref.tex | 457 + Doc/lib/libcursespanel.tex | 3 + Doc/lib/libdecimal.tex | 6 +- Doc/lib/libdis.tex | 2 +- Doc/lib/libdoctest.tex | 42 +- Doc/lib/libexcs.tex | 13 +- Doc/lib/libfuncs.tex | 20 +- Doc/lib/libgetpass.tex | 2 +- Doc/lib/libhtmlparser.tex | 2 +- Doc/lib/liblocale.tex | 61 +- Doc/lib/libmailbox.tex | 1353 ++- Doc/lib/libmsilib.tex | 485 ++ Doc/lib/liboperator.tex | 4 +- Doc/lib/liboptparse.tex | 403 +- Doc/lib/libpdb.tex | 4 +- Doc/lib/libposixpath.tex | 5 +- Doc/lib/libprofile.tex | 9 +- Doc/lib/librlcompleter.tex | 16 +- Doc/lib/librunpy.tex | 2 +- Doc/lib/libsqlite3.tex | 503 ++ Doc/lib/libstdtypes.tex | 153 +- Doc/lib/libsubprocess.tex | 15 +- Doc/lib/libsys.tex | 2 +- Doc/lib/libtarfile.tex | 8 +- Doc/lib/libthread.tex | 4 +- Doc/lib/libtokenize.tex | 2 +- Doc/lib/libtrace.tex | 125 + Doc/lib/libunittest.tex | 2 +- Doc/lib/liburllib2.tex | 16 +- Doc/lib/libweakref.tex | 33 + Doc/lib/libxmlrpclib.tex | 9 +- Doc/lib/libzlib.tex | 13 + Doc/lib/sqlite3/adapter_datetime.py | 14 + Doc/lib/sqlite3/adapter_point_1.py | 16 + Doc/lib/sqlite3/adapter_point_2.py | 17 + Doc/lib/sqlite3/collation_reverse.py | 15 + Doc/lib/sqlite3/complete_statement.py | 30 + Doc/lib/sqlite3/connect_db_1.py | 3 + Doc/lib/sqlite3/connect_db_2.py | 3 + Doc/lib/sqlite3/converter_point.py | 47 + Doc/lib/sqlite3/countcursors.py | 15 + Doc/lib/sqlite3/createdb.py | 28 + Doc/lib/sqlite3/execsql_fetchonerow.py | 17 + Doc/lib/sqlite3/execsql_printall_1.py | 13 + Doc/lib/sqlite3/execute_1.py | 11 + Doc/lib/sqlite3/execute_2.py | 12 + Doc/lib/sqlite3/execute_3.py | 12 + Doc/lib/sqlite3/executemany_1.py | 24 + Doc/lib/sqlite3/executemany_2.py | 15 + Doc/lib/sqlite3/executescript.py | 24 + Doc/lib/sqlite3/insert_more_people.py | 16 + Doc/lib/sqlite3/md5func.py | 11 + Doc/lib/sqlite3/mysumaggr.py | 20 + Doc/lib/sqlite3/parse_colnames.py | 8 + Doc/lib/sqlite3/pysqlite_datetime.py | 20 + Doc/lib/sqlite3/row_factory.py | 13 + Doc/lib/sqlite3/rowclass.py | 12 + Doc/lib/sqlite3/shared_cache.py | 6 + Doc/lib/sqlite3/shortcut_methods.py | 21 + Doc/lib/sqlite3/simple_tableprinter.py | 26 + Doc/lib/sqlite3/text_factory.py | 42 + Doc/mac/scripting.tex | 4 +- Doc/perl/l2hinit.perl | 19 +- Doc/perl/python.perl | 1 - Doc/ref/ref2.tex | 30 +- Doc/ref/ref3.tex | 78 +- Doc/ref/ref4.tex | 2 +- Doc/ref/ref5.tex | 11 +- Doc/ref/ref6.tex | 11 +- Doc/ref/ref7.tex | 27 +- Doc/texinputs/python.sty | 24 +- Doc/tut/glossary.tex | 2 +- Doc/tut/tut.tex | 42 +- Doc/whatsnew/whatsnew20.tex | 6 +- Doc/whatsnew/whatsnew23.tex | 2 +- Doc/whatsnew/whatsnew25.tex | 560 +- Grammar/Grammar | 2 +- Include/Python.h | 2 + Include/code.h | 1 + Include/frameobject.h | 13 +- Include/longobject.h | 1 + Include/osdefs.h | 4 + Include/pyerrors.h | 69 +- Include/pyport.h | 58 + Include/unicodeobject.h | 48 +- Lib/UserString.py | 4 + Lib/_LWPCookieJar.py | 22 +- Lib/_MozillaCookieJar.py | 16 +- Lib/bdb.py | 16 +- Lib/binhex.py | 6 +- Lib/bsddb/test/test_thread.py | 6 + Lib/calendar.py | 3 - Lib/codeop.py | 10 +- Lib/compiler/pycodegen.py | 2 - Lib/compiler/transformer.py | 12 +- Lib/contextlib.py | 28 +- Lib/cookielib.py | 176 +- Lib/ctypes/__init__.py | 36 +- Lib/ctypes/_loader.py | 262 - Lib/ctypes/test/test_bitfields.py | 2 +- Lib/ctypes/test/test_byteswap.py | 2 +- Lib/ctypes/test/test_callbacks.py | 2 +- Lib/ctypes/test/test_cast.py | 27 +- Lib/ctypes/test/test_cfuncs.py | 2 +- Lib/ctypes/test/test_checkretval.py | 2 +- Lib/ctypes/test/test_find.py | 104 + Lib/ctypes/test/test_funcptr.py | 2 +- Lib/ctypes/test/test_functions.py | 4 +- Lib/ctypes/test/test_libc.py | 2 +- Lib/ctypes/test/test_loading.py | 38 +- Lib/ctypes/test/test_pointers.py | 29 +- Lib/ctypes/test/test_posix.py | 40 - Lib/ctypes/test/test_prototypes.py | 2 +- Lib/ctypes/test/test_python_api.py | 27 +- Lib/ctypes/test/test_refcounts.py | 2 +- Lib/ctypes/test/test_returnfuncptrs.py | 4 +- Lib/ctypes/test/test_slicing.py | 12 +- Lib/ctypes/test/test_stringptr.py | 2 +- Lib/ctypes/test/test_structures.py | 8 +- Lib/ctypes/test/test_unicode.py | 4 +- Lib/ctypes/test/test_values.py | 4 +- Lib/ctypes/test/test_win32.py | 2 +- Lib/ctypes/util.py | 122 + Lib/decimal.py | 4 +- Lib/distutils/ccompiler.py | 6 +- Lib/distutils/command/bdist_msi.py | 16 +- Lib/distutils/command/build_ext.py | 5 + Lib/distutils/command/upload.py | 2 +- Lib/distutils/msvccompiler.py | 2 +- Lib/distutils/sysconfig.py | 19 +- Lib/distutils/unixccompiler.py | 64 +- Lib/distutils/util.py | 49 + Lib/doctest.py | 16 +- Lib/dummy_thread.py | 3 - Lib/easy_install.py | 5 - Lib/email/_parseaddr.py | 1 + Lib/email/test/test_email.py | 6 + Lib/email/test/test_email_renamed.py | 6 + Lib/gzip.py | 37 +- Lib/httplib.py | 17 +- Lib/idlelib/NEWS.txt | 5 + Lib/idlelib/configHelpSourceEdit.py | 1 + Lib/idlelib/idlever.py | 2 +- Lib/imputil.py | 7 +- Lib/inspect.py | 14 +- Lib/lib-tk/turtle.py | 583 +- Lib/locale.py | 170 +- Lib/logging/__init__.py | 2 +- Lib/logging/handlers.py | 10 +- Lib/mailbox.py | 2035 ++++- Lib/msilib/__init__.py | 15 +- Lib/msilib/text.py | 129 + Lib/msilib/uisample.py | 1399 ---- Lib/ntpath.py | 43 +- Lib/optparse.py | 204 +- Lib/pdb.py | 199 +- Lib/pkg_resources.py | 2377 ------ Lib/pkgutil.py | 30 +- Lib/plat-mac/bundlebuilder.py | 23 +- Lib/plat-mac/pimp.py | 4 +- Lib/popen2.py | 5 +- Lib/poplib.py | 4 +- Lib/rfc822.py | 1 + Lib/rlcompleter.py | 8 +- Lib/setuptools.egg-info/PKG-INFO | 89 - Lib/setuptools.egg-info/entry_points.txt | 51 - Lib/setuptools.egg-info/top_level.txt | 3 - Lib/setuptools.egg-info/zip-safe | 0 Lib/setuptools/__init__.py | 64 - Lib/setuptools/archive_util.py | 200 - Lib/setuptools/cli.exe | Bin 6144 -> 0 bytes Lib/setuptools/command/__init__.py | 19 - Lib/setuptools/command/alias.py | 79 - Lib/setuptools/command/bdist_egg.py | 449 - Lib/setuptools/command/bdist_rpm.py | 37 - Lib/setuptools/command/build_ext.py | 285 - Lib/setuptools/command/build_py.py | 192 - Lib/setuptools/command/develop.py | 116 - Lib/setuptools/command/easy_install.py | 1555 ---- Lib/setuptools/command/egg_info.py | 365 - Lib/setuptools/command/install.py | 101 - Lib/setuptools/command/install_egg_info.py | 81 - Lib/setuptools/command/install_lib.py | 76 - Lib/setuptools/command/install_scripts.py | 56 - Lib/setuptools/command/rotate.py | 57 - Lib/setuptools/command/saveopts.py | 24 - Lib/setuptools/command/sdist.py | 163 - Lib/setuptools/command/setopt.py | 158 - Lib/setuptools/command/test.py | 119 - Lib/setuptools/command/upload.py | 178 - Lib/setuptools/depends.py | 239 - Lib/setuptools/dist.py | 798 -- Lib/setuptools/extension.py | 35 - Lib/setuptools/gui.exe | Bin 6144 -> 0 bytes Lib/setuptools/package_index.py | 674 -- Lib/setuptools/sandbox.py | 203 - Lib/setuptools/site-patch.py | 74 - Lib/setuptools/tests/__init__.py | 364 - Lib/setuptools/tests/api_tests.txt | 330 - Lib/setuptools/tests/test_resources.py | 483 -- Lib/shutil.py | 2 +- Lib/socket.py | 6 +- Lib/sqlite3/dbapi2.py | 68 +- Lib/sqlite3/test/hooks.py | 4 +- Lib/sqlite3/test/regression.py | 27 +- Lib/sqlite3/test/userfunctions.py | 14 + Lib/struct.py | 99 + Lib/subprocess.py | 4 +- Lib/tarfile.py | 539 +- Lib/test/exception_hierarchy.txt | 3 +- Lib/test/output/test_logging | 6 +- Lib/test/pickletester.py | 8 +- Lib/test/regrtest.py | 30 +- Lib/test/string_tests.py | 307 + Lib/test/test_bigmem.py | 964 +++ Lib/test/test_builtin.py | 189 +- Lib/test/test_cmd_line.py | 39 +- Lib/test/test_codeccallbacks.py | 73 - Lib/test/test_codecencodings_cn.py | 1 - Lib/test/test_codecencodings_hk.py | 1 - Lib/test/test_codecencodings_jp.py | 1 - Lib/test/test_codecencodings_kr.py | 1 - Lib/test/test_codecencodings_tw.py | 1 - Lib/test/test_codecmaps_cn.py | 1 - Lib/test/test_codecmaps_hk.py | 1 - Lib/test/test_codecmaps_jp.py | 1 - Lib/test/test_codecmaps_kr.py | 1 - Lib/test/test_codecmaps_tw.py | 1 - Lib/test/test_compiler.py | 4 + Lib/test/test_contextlib.py | 31 +- Lib/test/test_cookielib.py | 16 + Lib/test/test_datetime.py | 6 + Lib/test/test_doctest.py | 39 + Lib/test/test_exceptions.py | 93 +- Lib/test/test_file.py | 2 +- Lib/test/test_grp.py | 5 +- Lib/test/test_import.py | 17 + Lib/test/test_importhooks.py | 11 +- Lib/test/test_locale.py | 28 +- Lib/test/test_logging.py | 19 +- Lib/test/test_mailbox.py | 1676 +++- Lib/test/test_multibytecodec.py | 1 - Lib/test/test_old_mailbox.py | 120 + Lib/test/test_optparse.py | 200 +- Lib/test/test_os.py | 30 +- Lib/test/test_pty.py | 13 +- Lib/test/test_rfc822.py | 10 + Lib/test/test_setuptools.py | 16 - Lib/test/test_shutil.py | 4 +- Lib/test/test_socket.py | 33 +- Lib/test/test_sqlite.py | 5 +- Lib/test/test_stringprep.py | 6 +- Lib/test/test_struct.py | 103 +- Lib/test/test_subprocess.py | 4 +- Lib/test/test_sundry.py | 6 +- Lib/test/test_support.py | 106 +- Lib/test/test_syntax.py | 58 +- Lib/test/test_tarfile.py | 118 +- Lib/test/test_tcl.py | 1 + Lib/test/test_threaded_import.py | 21 +- Lib/test/test_traceback.py | 6 + Lib/test/test_unicode.py | 15 +- Lib/test/test_unicodedata.py | 10 +- Lib/test/test_urllib2.py | 372 +- Lib/test/test_urllib2net.py | 179 +- Lib/test/test_weakref.py | 44 + Lib/test/test_with.py | 84 +- Lib/test/test_zlib.py | 57 + Lib/test/testtar.tar | Bin 112640 -> 133120 bytes Lib/test/threaded_import_hangers.py | 42 + Lib/threading.py | 15 +- Lib/trace.py | 20 +- Lib/traceback.py | 2 +- Lib/urllib.py | 7 - Lib/urllib2.py | 79 +- Lib/warnings.py | 2 - Lib/weakref.py | 48 + Lib/webbrowser.py | 5 +- Mac/Modules/cf/_CFmodule.c | 18 +- Mac/Modules/file/_Filemodule.c | 9 +- Mac/Modules/mlte/_Mltemodule.c | 17 - Mac/OSX/BuildScript/README.txt | 35 + Mac/OSX/BuildScript/build-installer.py | 1014 +++ Mac/OSX/BuildScript/ncurses-5.5.patch | 36 + Mac/OSX/BuildScript/resources/ReadMe.txt | 31 + Mac/OSX/BuildScript/resources/Welcome.rtf | 15 + Mac/OSX/BuildScript/resources/background.jpg | Bin 0 -> 45421 bytes .../BuildScript/scripts/postflight.documentation | 12 + Mac/OSX/BuildScript/scripts/postflight.framework | 33 + .../BuildScript/scripts/postflight.patch-profile | 71 + Mac/OSX/Dist/README.txt | 50 - Mac/OSX/Dist/build | 164 - Mac/OSX/Dist/example-pimp-database.plist | 51 - Mac/OSX/Dist/makedmg | 95 - Mac/OSX/Dist/resources/ReadMe.txt | 31 - Mac/OSX/Dist/resources/Welcome.rtf | 15 - Mac/OSX/Dist/resources/postflight | 92 - Mac/OSX/Extras.ReadMe.txt | 9 - Mac/OSX/Extras.install.py | 1 + Mac/OSX/IDLE/Info.plist | 55 + Mac/OSX/IDLE/Makefile.in | 52 + Mac/OSX/IDLE/idlemain.py | 27 + Mac/OSX/Icons/Disk Image.icns | Bin 0 -> 50703 bytes Mac/OSX/Icons/IDLE.icns | Bin 0 -> 53456 bytes Mac/OSX/Icons/Python Folder.icns | Bin 0 -> 133608 bytes Mac/OSX/Icons/PythonCompiled.icns | Bin 0 -> 60777 bytes Mac/OSX/Icons/PythonLauncher.icns | Bin 0 -> 42658 bytes Mac/OSX/Icons/PythonSource.icns | Bin 0 -> 54522 bytes Mac/OSX/Icons/ReadMe.txt | 3 + Mac/OSX/Makefile | 273 - Mac/OSX/Makefile.in | 241 + .../PythonLauncher/English.lproj/InfoPlist.strings | Bin 618 -> 0 bytes .../English.lproj/PreferenceWindow.nib/classes.nib | 26 + .../English.lproj/PreferenceWindow.nib/info.nib | 16 + .../English.lproj/PreferenceWindow.nib/objects.nib | Bin 0 -> 5882 bytes Mac/OSX/PythonLauncher/Info.plist | 65 + Mac/OSX/PythonLauncher/Makefile.in | 78 + Mac/OSX/PythonLauncher/PythonCompiled.icns | Bin 57125 -> 0 bytes Mac/OSX/PythonLauncher/PythonInterpreter.icns | Bin 45913 -> 0 bytes .../PythonLauncher.pbproj/project.pbxproj | 681 -- Mac/OSX/PythonLauncher/PythonSource.icns | Bin 50112 -> 0 bytes Mac/OSX/PythonLauncher/PythonWSource.icns | Bin 49891 -> 0 bytes Mac/OSX/README | 156 +- Mac/OSX/Tools/pythonw.c | 17 + Mac/OSX/fixversions.py | 69 - Mac/OSX/sample_sitecustomize.py | 6 - Mac/OSXResources/app/Resources/PythonApplet.icns | Bin 36565 -> 63136 bytes .../app/Resources/PythonInterpreter.icns | Bin 45913 -> 42658 bytes Mac/OSXResources/framework/Info.plist | 4 +- Mac/scripts/BuildApplet.icns | Bin 35833 -> 120107 bytes Makefile.pre.in | 61 +- Misc/ACKS | 8 + Misc/NEWS | 59 - Misc/RPM/python-2.5.spec | 2 +- Misc/Vim/python.vim | 6 +- Misc/Vim/syntax_test.py | 2 - Misc/Vim/vimrc | 5 +- Misc/developers.txt | 33 +- Modules/Setup.dist | 2 +- Modules/_codecsmodule.c | 18 +- Modules/_csv.c | 2 + Modules/_ctypes/_ctypes.c | 19 +- Modules/_ctypes/_ctypes_test.c | 10 +- Modules/_ctypes/libffi/configure | 22 +- Modules/_ctypes/libffi/configure.ac | 36 +- Modules/_ctypes/libffi/fficonfig.h.in | 15 +- Modules/_ctypes/libffi/fficonfig.py.in | 15 +- Modules/_ctypes/libffi/src/darwin/ffitarget.h | 25 + Modules/_ctypes/libffi/src/powerpc/darwin.S | 2 + .../_ctypes/libffi/src/powerpc/darwin_closure.S | 2 + Modules/_ctypes/libffi/src/powerpc/ffi_darwin.c | 12 +- Modules/_ctypes/libffi/src/prep_cif.c | 46 +- Modules/_ctypes/libffi/src/x86/darwin.S | 195 + Modules/_ctypes/libffi/src/x86/ffi_darwin.c | 610 ++ Modules/_ctypes/libffi/src/x86/ffitarget.h | 2 +- Modules/_ctypes/libffi_msvc/ffi.c | 85 +- Modules/_ctypes/libffi_msvc/ffi.h | 4 - Modules/_ctypes/libffi_msvc/ffi_common.h | 19 +- Modules/_ctypes/libffi_msvc/ffitarget.h | 20 +- Modules/_ctypes/libffi_msvc/mingwin32.S | 228 + Modules/_ctypes/libffi_msvc/prep_cif.c | 2 +- Modules/_ctypes/libffi_msvc/win32.S | 20 +- Modules/_elementtree.c | 4 +- Modules/_hotshot.c | 2 +- Modules/_localemodule.c | 5 +- Modules/_sqlite/adapters.c | 40 - Modules/_sqlite/adapters.h | 33 - Modules/_sqlite/cache.c | 23 +- Modules/_sqlite/cache.h | 14 +- Modules/_sqlite/connection.c | 197 +- Modules/_sqlite/connection.h | 25 +- Modules/_sqlite/converters.c | 40 - Modules/_sqlite/converters.h | 33 - Modules/_sqlite/cursor.c | 62 +- Modules/_sqlite/cursor.h | 2 +- Modules/_sqlite/microprotocols.h | 4 +- Modules/_sqlite/module.c | 8 +- Modules/_sqlite/module.h | 4 +- Modules/_sqlite/statement.c | 9 +- Modules/_sqlite/statement.h | 1 + Modules/_sre.c | 8 +- Modules/_ssl.c | 6 +- Modules/_struct.c | 1660 ++++ Modules/_testcapimodule.c | 45 +- Modules/_tkinter.c | 18 +- Modules/arraymodule.c | 6 +- Modules/cPickle.c | 10 +- Modules/cjkcodecs/multibytecodec.c | 2 +- Modules/datetimemodule.c | 7 + Modules/expat/expat_config.h | 19 + Modules/expat/xmlparse.c | 8 +- Modules/expat/xmlrole.c | 4 +- Modules/expat/xmltok.c | 4 +- Modules/fcntlmodule.c | 15 +- Modules/gcmodule.c | 23 +- Modules/grpmodule.c | 36 +- Modules/itertoolsmodule.c | 58 +- Modules/main.c | 2 + Modules/operator.c | 10 +- Modules/parsermodule.c | 4 +- Modules/posixmodule.c | 653 +- Modules/rotatingtree.h | 2 +- Modules/selectmodule.c | 83 +- Modules/sha512module.c | 198 +- Modules/socketmodule.c | 504 +- Modules/structmodule.c | 1293 --- Modules/threadmodule.c | 9 - Modules/timemodule.c | 30 +- Modules/unicodedata.c | 4 +- Modules/zlibmodule.c | 106 + Objects/classobject.c | 27 +- Objects/codeobject.c | 3 + Objects/exceptions.c | 2141 +++++ Objects/fileobject.c | 92 +- Objects/floatobject.c | 7 +- Objects/frameobject.c | 220 +- Objects/longobject.c | 269 +- Objects/stringlib/README.txt | 34 + Objects/stringlib/count.h | 34 + Objects/stringlib/fastsearch.h | 104 + Objects/stringlib/find.h | 112 + Objects/stringlib/partition.h | 111 + Objects/stringobject.c | 1408 +++- Objects/typeobject.c | 10 +- Objects/unicodectype.c | 333 +- Objects/unicodeobject.c | 772 +- Objects/weakrefobject.c | 2 +- PC/config.c | 4 +- PC/errmap.h | 78 + PC/errmap.mak | 5 + PC/example_nt/example.c | 2 +- PC/example_nt/example.def | 2 - PC/example_nt/example.vcproj | 15 +- PC/generrmap.c | 20 + PC/py.ico | Bin 766 -> 23558 bytes PC/pyc.ico | Bin 766 -> 23558 bytes PC/pycon.ico | Bin 766 -> 23558 bytes PC/pyconfig.h | 51 +- PCbuild/_bsddb.vcproj | 2 +- PCbuild/_elementtree.vcproj | 2 +- PCbuild/_msi.vcproj | 4 +- PCbuild/_socket.vcproj | 2 +- PCbuild/_sqlite3.vcproj | 6 - PCbuild/_testcapi.vcproj | 2 +- PCbuild/_tkinter.vcproj | 2 +- PCbuild/bz2.vcproj | 2 +- PCbuild/installer.bmp | Bin 50324 -> 58806 bytes PCbuild/pythoncore.vcproj | 10 +- PCbuild/unicodedata.vcproj | 2 +- PCbuild/winsound.vcproj | 2 +- PCbuild8/Uninstal.wse | 514 ++ PCbuild8/_bsddb.vcproj | 385 + PCbuild8/_ctypes.vcproj | 408 + PCbuild8/_ctypes_test.vcproj | 367 + PCbuild8/_elementtree.vcproj | 390 + PCbuild8/_msi.vcproj | 375 + PCbuild8/_socket.vcproj | 381 + PCbuild8/_sqlite3.vcproj | 414 + PCbuild8/_ssl.mak | 21 + PCbuild8/_ssl.vcproj | 121 + PCbuild8/_testcapi.vcproj | 374 + PCbuild8/_tkinter.vcproj | 389 + PCbuild8/build_ssl.py | 163 + PCbuild8/bz2.vcproj | 390 + PCbuild8/db.build | 10 + PCbuild8/field3.py | 35 + PCbuild8/installer.bmp | Bin 0 -> 58806 bytes PCbuild8/make_buildinfo.c | 92 + PCbuild8/make_buildinfo.vcproj | 188 + PCbuild8/make_versioninfo.vcproj | 207 + PCbuild8/pcbuild.sln | 185 + PCbuild8/pyexpat.vcproj | 393 + PCbuild8/python.build | 21 + PCbuild8/python.iss | 346 + PCbuild8/python.vcproj | 400 + PCbuild8/python20.wse | 3135 +++++++ PCbuild8/pythoncore.vcproj | 1103 +++ PCbuild8/pythoncore_link.txt | 311 + PCbuild8/pythoncore_pgo.vcproj | 781 ++ PCbuild8/pythoncore_pgo_link.txt | 311 + PCbuild8/pythonw.vcproj | 386 + PCbuild8/readme.txt | 423 + PCbuild8/rmpyc.py | 25 + PCbuild8/rt.bat | 52 + PCbuild8/select.vcproj | 382 + PCbuild8/unicodedata.vcproj | 371 + PCbuild8/w9xpopen.vcproj | 185 + PCbuild8/winsound.vcproj | 375 + Parser/parsetok.c | 4 +- Python/ast.c | 12 +- Python/bltinmodule.c | 18 +- Python/ceval.c | 172 +- Python/codecs.c | 4 +- Python/compile.c | 19 +- Python/dynload_hpux.c | 4 +- Python/errors.c | 51 +- Python/exceptions.c | 1943 ----- Python/getcwd.c | 4 + Python/graminit.c | 35 +- Python/import.c | 75 +- Python/mystrtoul.c | 238 +- Python/pystrtod.c | 4 +- Python/pythonrun.c | 12 +- Python/structmember.c | 11 +- Python/thread_nt.h | 4 +- README | 7 +- Tools/msi/msi.py | 10 +- Tools/pybench/Arithmetic.py | 1 - Tools/pybench/Calls.py | 1 - Tools/pybench/CommandLine.py | 32 +- Tools/pybench/Constructs.py | 1 - Tools/pybench/Dict.py | 71 +- Tools/pybench/Exceptions.py | 4 +- Tools/pybench/Imports.py | 5 +- Tools/pybench/Instances.py | 2 - Tools/pybench/Lists.py | 31 +- Tools/pybench/Lookups.py | 1 - Tools/pybench/NewInstances.py | 66 + Tools/pybench/Numbers.py | 216 +- Tools/pybench/README | 11 + Tools/pybench/Setup.py | 4 + Tools/pybench/Strings.py | 88 +- Tools/pybench/Tuples.py | 3 +- Tools/pybench/Unicode.py | 16 +- Tools/pybench/pybench.py | 217 +- Tools/unicode/gencodec.py | 6 +- configure | 8722 +++++++++++--------- configure.in | 116 +- pyconfig.h.in | 26 +- setup.py | 78 +- 549 files changed, 47591 insertions(+), 25481 deletions(-) create mode 100644 Doc/howto/urllib2.rst create mode 100755 Doc/lib/libctypes.tex create mode 100644 Doc/lib/libctypesref.tex create mode 100644 Doc/lib/libmsilib.tex create mode 100644 Doc/lib/libsqlite3.tex create mode 100644 Doc/lib/libtrace.tex create mode 100644 Doc/lib/sqlite3/adapter_datetime.py create mode 100644 Doc/lib/sqlite3/adapter_point_1.py create mode 100644 Doc/lib/sqlite3/adapter_point_2.py create mode 100644 Doc/lib/sqlite3/collation_reverse.py create mode 100644 Doc/lib/sqlite3/complete_statement.py create mode 100644 Doc/lib/sqlite3/connect_db_1.py create mode 100644 Doc/lib/sqlite3/connect_db_2.py create mode 100644 Doc/lib/sqlite3/converter_point.py create mode 100644 Doc/lib/sqlite3/countcursors.py create mode 100644 Doc/lib/sqlite3/createdb.py create mode 100644 Doc/lib/sqlite3/execsql_fetchonerow.py create mode 100644 Doc/lib/sqlite3/execsql_printall_1.py create mode 100644 Doc/lib/sqlite3/execute_1.py create mode 100644 Doc/lib/sqlite3/execute_2.py create mode 100644 Doc/lib/sqlite3/execute_3.py create mode 100644 Doc/lib/sqlite3/executemany_1.py create mode 100644 Doc/lib/sqlite3/executemany_2.py create mode 100644 Doc/lib/sqlite3/executescript.py create mode 100644 Doc/lib/sqlite3/insert_more_people.py create mode 100644 Doc/lib/sqlite3/md5func.py create mode 100644 Doc/lib/sqlite3/mysumaggr.py create mode 100644 Doc/lib/sqlite3/parse_colnames.py create mode 100644 Doc/lib/sqlite3/pysqlite_datetime.py create mode 100644 Doc/lib/sqlite3/row_factory.py create mode 100644 Doc/lib/sqlite3/rowclass.py create mode 100644 Doc/lib/sqlite3/shared_cache.py create mode 100644 Doc/lib/sqlite3/shortcut_methods.py create mode 100644 Doc/lib/sqlite3/simple_tableprinter.py create mode 100644 Doc/lib/sqlite3/text_factory.py delete mode 100644 Lib/ctypes/_loader.py create mode 100644 Lib/ctypes/test/test_find.py delete mode 100644 Lib/ctypes/test/test_posix.py create mode 100644 Lib/ctypes/util.py delete mode 100644 Lib/easy_install.py create mode 100644 Lib/msilib/text.py delete mode 100644 Lib/msilib/uisample.py delete mode 100644 Lib/pkg_resources.py delete mode 100644 Lib/setuptools.egg-info/PKG-INFO delete mode 100755 Lib/setuptools.egg-info/entry_points.txt delete mode 100644 Lib/setuptools.egg-info/top_level.txt delete mode 100644 Lib/setuptools.egg-info/zip-safe delete mode 100644 Lib/setuptools/__init__.py delete mode 100755 Lib/setuptools/archive_util.py delete mode 100755 Lib/setuptools/cli.exe delete mode 100644 Lib/setuptools/command/__init__.py delete mode 100755 Lib/setuptools/command/alias.py delete mode 100644 Lib/setuptools/command/bdist_egg.py delete mode 100755 Lib/setuptools/command/bdist_rpm.py delete mode 100644 Lib/setuptools/command/build_ext.py delete mode 100644 Lib/setuptools/command/build_py.py delete mode 100755 Lib/setuptools/command/develop.py delete mode 100755 Lib/setuptools/command/easy_install.py delete mode 100755 Lib/setuptools/command/egg_info.py delete mode 100644 Lib/setuptools/command/install.py delete mode 100755 Lib/setuptools/command/install_egg_info.py delete mode 100644 Lib/setuptools/command/install_lib.py delete mode 100755 Lib/setuptools/command/install_scripts.py delete mode 100755 Lib/setuptools/command/rotate.py delete mode 100755 Lib/setuptools/command/saveopts.py delete mode 100755 Lib/setuptools/command/sdist.py delete mode 100755 Lib/setuptools/command/setopt.py delete mode 100644 Lib/setuptools/command/test.py delete mode 100755 Lib/setuptools/command/upload.py delete mode 100644 Lib/setuptools/depends.py delete mode 100644 Lib/setuptools/dist.py delete mode 100644 Lib/setuptools/extension.py delete mode 100755 Lib/setuptools/gui.exe delete mode 100755 Lib/setuptools/package_index.py delete mode 100755 Lib/setuptools/sandbox.py delete mode 100755 Lib/setuptools/site-patch.py delete mode 100644 Lib/setuptools/tests/__init__.py delete mode 100755 Lib/setuptools/tests/api_tests.txt delete mode 100644 Lib/setuptools/tests/test_resources.py create mode 100644 Lib/struct.py create mode 100644 Lib/test/test_bigmem.py create mode 100644 Lib/test/test_old_mailbox.py delete mode 100644 Lib/test/test_setuptools.py create mode 100644 Lib/test/threaded_import_hangers.py create mode 100644 Mac/OSX/BuildScript/README.txt create mode 100755 Mac/OSX/BuildScript/build-installer.py create mode 100644 Mac/OSX/BuildScript/ncurses-5.5.patch create mode 100644 Mac/OSX/BuildScript/resources/ReadMe.txt create mode 100644 Mac/OSX/BuildScript/resources/Welcome.rtf create mode 100644 Mac/OSX/BuildScript/resources/background.jpg create mode 100755 Mac/OSX/BuildScript/scripts/postflight.documentation create mode 100755 Mac/OSX/BuildScript/scripts/postflight.framework create mode 100755 Mac/OSX/BuildScript/scripts/postflight.patch-profile delete mode 100644 Mac/OSX/Dist/README.txt delete mode 100755 Mac/OSX/Dist/build delete mode 100644 Mac/OSX/Dist/example-pimp-database.plist delete mode 100755 Mac/OSX/Dist/makedmg delete mode 100644 Mac/OSX/Dist/resources/ReadMe.txt delete mode 100644 Mac/OSX/Dist/resources/Welcome.rtf delete mode 100755 Mac/OSX/Dist/resources/postflight create mode 100644 Mac/OSX/IDLE/Info.plist create mode 100644 Mac/OSX/IDLE/Makefile.in create mode 100644 Mac/OSX/IDLE/idlemain.py create mode 100644 Mac/OSX/Icons/Disk Image.icns create mode 100644 Mac/OSX/Icons/IDLE.icns create mode 100644 Mac/OSX/Icons/Python Folder.icns create mode 100644 Mac/OSX/Icons/PythonCompiled.icns create mode 100644 Mac/OSX/Icons/PythonLauncher.icns create mode 100644 Mac/OSX/Icons/PythonSource.icns create mode 100644 Mac/OSX/Icons/ReadMe.txt delete mode 100644 Mac/OSX/Makefile create mode 100644 Mac/OSX/Makefile.in delete mode 100644 Mac/OSX/PythonLauncher/English.lproj/InfoPlist.strings create mode 100644 Mac/OSX/PythonLauncher/English.lproj/PreferenceWindow.nib/classes.nib create mode 100644 Mac/OSX/PythonLauncher/English.lproj/PreferenceWindow.nib/info.nib create mode 100644 Mac/OSX/PythonLauncher/English.lproj/PreferenceWindow.nib/objects.nib create mode 100644 Mac/OSX/PythonLauncher/Info.plist create mode 100644 Mac/OSX/PythonLauncher/Makefile.in delete mode 100644 Mac/OSX/PythonLauncher/PythonCompiled.icns delete mode 100644 Mac/OSX/PythonLauncher/PythonInterpreter.icns delete mode 100755 Mac/OSX/PythonLauncher/PythonLauncher.pbproj/project.pbxproj delete mode 100644 Mac/OSX/PythonLauncher/PythonSource.icns delete mode 100644 Mac/OSX/PythonLauncher/PythonWSource.icns create mode 100644 Mac/OSX/Tools/pythonw.c delete mode 100644 Mac/OSX/fixversions.py delete mode 100644 Mac/OSX/sample_sitecustomize.py create mode 100644 Modules/_ctypes/libffi/src/darwin/ffitarget.h create mode 100644 Modules/_ctypes/libffi/src/x86/darwin.S create mode 100644 Modules/_ctypes/libffi/src/x86/ffi_darwin.c create mode 100644 Modules/_ctypes/libffi_msvc/mingwin32.S delete mode 100644 Modules/_sqlite/adapters.c delete mode 100644 Modules/_sqlite/adapters.h delete mode 100644 Modules/_sqlite/converters.c delete mode 100644 Modules/_sqlite/converters.h create mode 100644 Modules/_struct.c create mode 100644 Modules/expat/expat_config.h delete mode 100644 Modules/structmodule.c create mode 100644 Objects/exceptions.c create mode 100644 Objects/stringlib/README.txt create mode 100644 Objects/stringlib/count.h create mode 100644 Objects/stringlib/fastsearch.h create mode 100644 Objects/stringlib/find.h create mode 100644 Objects/stringlib/partition.h create mode 100644 PC/errmap.h create mode 100644 PC/errmap.mak delete mode 100644 PC/example_nt/example.def create mode 100644 PC/generrmap.c create mode 100644 PCbuild8/Uninstal.wse create mode 100644 PCbuild8/_bsddb.vcproj create mode 100644 PCbuild8/_ctypes.vcproj create mode 100644 PCbuild8/_ctypes_test.vcproj create mode 100644 PCbuild8/_elementtree.vcproj create mode 100644 PCbuild8/_msi.vcproj create mode 100644 PCbuild8/_socket.vcproj create mode 100644 PCbuild8/_sqlite3.vcproj create mode 100644 PCbuild8/_ssl.mak create mode 100644 PCbuild8/_ssl.vcproj create mode 100644 PCbuild8/_testcapi.vcproj create mode 100644 PCbuild8/_tkinter.vcproj create mode 100644 PCbuild8/build_ssl.py create mode 100644 PCbuild8/bz2.vcproj create mode 100644 PCbuild8/db.build create mode 100644 PCbuild8/field3.py create mode 100644 PCbuild8/installer.bmp create mode 100644 PCbuild8/make_buildinfo.c create mode 100644 PCbuild8/make_buildinfo.vcproj create mode 100644 PCbuild8/make_versioninfo.vcproj create mode 100644 PCbuild8/pcbuild.sln create mode 100644 PCbuild8/pyexpat.vcproj create mode 100644 PCbuild8/python.build create mode 100644 PCbuild8/python.iss create mode 100644 PCbuild8/python.vcproj create mode 100644 PCbuild8/python20.wse create mode 100644 PCbuild8/pythoncore.vcproj create mode 100644 PCbuild8/pythoncore_link.txt create mode 100644 PCbuild8/pythoncore_pgo.vcproj create mode 100644 PCbuild8/pythoncore_pgo_link.txt create mode 100644 PCbuild8/pythonw.vcproj create mode 100644 PCbuild8/readme.txt create mode 100644 PCbuild8/rmpyc.py create mode 100644 PCbuild8/rt.bat create mode 100644 PCbuild8/select.vcproj create mode 100644 PCbuild8/unicodedata.vcproj create mode 100644 PCbuild8/w9xpopen.vcproj create mode 100644 PCbuild8/winsound.vcproj delete mode 100644 Python/exceptions.c create mode 100755 Tools/pybench/NewInstances.py diff --git a/Doc/ACKS b/Doc/ACKS index 14a5ca8..bbb3241 100644 --- a/Doc/ACKS +++ b/Doc/ACKS @@ -195,6 +195,7 @@ Dan Wolfe Steven Work Thomas Wouters Ka-Ping Yee +Rory Yorke Moshe Zadka Milan Zamazal Cheng Zhang diff --git a/Doc/Makefile.deps b/Doc/Makefile.deps index 20c0688..11c6de0 100644 --- a/Doc/Makefile.deps +++ b/Doc/Makefile.deps @@ -155,6 +155,7 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \ lib/required_2.py \ lib/libtempfile.tex \ lib/liberrno.tex \ + lib/libctypes.tex \ lib/libsomeos.tex \ lib/libsignal.tex \ lib/libsocket.tex \ @@ -179,6 +180,7 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \ lib/libprofile.tex \ lib/libhotshot.tex \ lib/libtimeit.tex \ + lib/libtrace.tex \ lib/libcgi.tex \ lib/libcgitb.tex \ lib/liburllib.tex \ @@ -306,6 +308,7 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \ lib/libgetpass.tex \ lib/libshutil.tex \ lib/librepr.tex \ + lib/libmsilib.tex \ lib/libmsvcrt.tex \ lib/libwinreg.tex \ lib/libwinsound.tex \ @@ -348,7 +351,8 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \ lib/libturtle.tex \ lib/libtarfile.tex \ lib/libcsv.tex \ - lib/libcfgparser.tex + lib/libcfgparser.tex \ + lib/libsqlite3.tex # LaTeX source files for Macintosh Library Modules. MACFILES= $(HOWTOSTYLES) $(INDEXSTYLES) $(COMMONTEX) \ diff --git a/Doc/api/abstract.tex b/Doc/api/abstract.tex index f50ebc4..7c742a0 100644 --- a/Doc/api/abstract.tex +++ b/Doc/api/abstract.tex @@ -255,6 +255,8 @@ determination. \NULL, indicating that no arguments are provided. Returns the result of the call on success, or \NULL{} on failure. This is the equivalent of the Python expression \samp{\var{callable}(*\var{args})}. + Note that if you only pass \ctype{PyObject *} args, + \cfunction{PyObject_CallFunctionObjArgs} is a faster alternative. \end{cfuncdesc} @@ -268,6 +270,8 @@ determination. indicating that no arguments are provided. Returns the result of the call on success, or \NULL{} on failure. This is the equivalent of the Python expression \samp{\var{o}.\var{method}(\var{args})}. + Note that if you only pass \ctype{PyObject *} args, + \cfunction{PyObject_CallMethodObjArgs} is a faster alternative. \end{cfuncdesc} @@ -624,7 +628,7 @@ determination. Returns the result of right shifting \var{o1} by \var{o2} on success, or \NULL{} on failure. The operation is done \emph{in-place} when \var{o1} supports it. This is the equivalent - of the Python statement \samp{\var{o1} >\code{>=} \var{o2}}. + of the Python statement \samp{\var{o1} >>= \var{o2}}. \end{cfuncdesc} diff --git a/Doc/api/concrete.tex b/Doc/api/concrete.tex index 1982bae..10247ab 100644 --- a/Doc/api/concrete.tex +++ b/Doc/api/concrete.tex @@ -245,7 +245,7 @@ booleans. The following macros are available, however. \end{csimplemacrodesc} \begin{cfuncdesc}{PyObject*}{PyBool_FromLong}{long v} - Return a new reference to \constant{Py_True} or \constant{Py_False} + Return a new reference to \constant{Py_True} or \constant{Py_False} depending on the truth value of \var{v}. \versionadded{2.3} \end{cfuncdesc} @@ -618,12 +618,24 @@ parameter and are called with a non-string parameter. exactly to the format characters in the \var{format} string. The following format characters are allowed: + % This should be exactly the same as the table in PyErr_Format. + % One should just refer to the other. + + % The descriptions for %zd and %zu are wrong, but the truth is complicated + % because not all compilers support the %z width modifier -- we fake it + % when necessary via interpolating PY_FORMAT_SIZE_T. + + % %u, %lu, %zu should have "new in Python 2.5" blurbs. + \begin{tableiii}{l|l|l}{member}{Format Characters}{Type}{Comment} \lineiii{\%\%}{\emph{n/a}}{The literal \% character.} \lineiii{\%c}{int}{A single character, represented as an C int.} \lineiii{\%d}{int}{Exactly equivalent to \code{printf("\%d")}.} + \lineiii{\%u}{unsigned int}{Exactly equivalent to \code{printf("\%u")}.} \lineiii{\%ld}{long}{Exactly equivalent to \code{printf("\%ld")}.} - \lineiii{\%zd}{long}{Exactly equivalent to \code{printf("\%zd")}.} + \lineiii{\%lu}{unsigned long}{Exactly equivalent to \code{printf("\%lu")}.} + \lineiii{\%zd}{Py_ssize_t}{Exactly equivalent to \code{printf("\%zd")}.} + \lineiii{\%zu}{size_t}{Exactly equivalent to \code{printf("\%zu")}.} \lineiii{\%i}{int}{Exactly equivalent to \code{printf("\%i")}.} \lineiii{\%x}{int}{Exactly equivalent to \code{printf("\%x")}.} \lineiii{\%s}{char*}{A null-terminated C character array.} @@ -632,6 +644,10 @@ parameter and are called with a non-string parameter. guaranteed to start with the literal \code{0x} regardless of what the platform's \code{printf} yields.} \end{tableiii} + + An unrecognized format character causes all the rest of the format + string to be copied as-is to the result string, and any extra + arguments discarded. \end{cfuncdesc} \begin{cfuncdesc}{PyObject*}{PyString_FromFormatV}{const char *format, @@ -687,7 +703,7 @@ parameter and are called with a non-string parameter. \var{size})}. It must not be deallocated. If \var{string} is a Unicode object, this function computes the default encoding of \var{string} and operates on that. If \var{string} is not a string - object at all, \cfunction{PyString_AsStringAndSize()} returns + object at all, \cfunction{PyString_AsStringAndSize()} returns \code{-1} and raises \exception{TypeError}. \end{cfuncdesc} @@ -949,7 +965,7 @@ These APIs can be used for fast direct character conversions: \end{cfuncdesc} \begin{cfuncdesc}{double}{Py_UNICODE_TONUMERIC}{Py_UNICODE ch} - Return the character \var{ch} converted to a (positive) double. + Return the character \var{ch} converted to a double. Return \code{-1.0} if this is not possible. This macro does not raise exceptions. \end{cfuncdesc} @@ -1393,7 +1409,7 @@ The following codec API is special in that maps Unicode to Unicode. The \var{mapping} table must map Unicode ordinal integers to Unicode ordinal integers or None (causing deletion of the character). - Mapping tables need only provide the method{__getitem__()} + Mapping tables need only provide the \method{__getitem__()} interface; dictionaries and sequences work well. Unmapped character ordinals (ones which cause a \exception{LookupError}) are left untouched and are copied as-is. @@ -1494,7 +1510,7 @@ They all return \NULL{} or \code{-1} if an exception occurs. Return 1 if \var{substr} matches \var{str}[\var{start}:\var{end}] at the given tail end (\var{direction} == -1 means to do a prefix match, \var{direction} == 1 a suffix match), 0 otherwise. - Return \code{-1} if an error occurred. + Return \code{-1} if an error occurred. \end{cfuncdesc} \begin{cfuncdesc}{Py_ssize_t}{PyUnicode_Find}{PyObject *str, @@ -3013,7 +3029,7 @@ Macros for the convenience of modules implementing the DB API: \subsection{Set Objects \label{setObjects}} -\sectionauthor{Raymond D. Hettinger}{python@rcn.com} +\sectionauthor{Raymond D. Hettinger}{python@rcn.com} \obindex{set} \obindex{frozenset} @@ -3022,8 +3038,8 @@ Macros for the convenience of modules implementing the DB API: This section details the public API for \class{set} and \class{frozenset} objects. Any functionality not listed below is best accessed using the either the abstract object protocol (including -\cfunction{PyObject_CallMethod()}, \cfunction{PyObject_RichCompareBool()}, -\cfunction{PyObject_Hash()}, \cfunction{PyObject_Repr()}, +\cfunction{PyObject_CallMethod()}, \cfunction{PyObject_RichCompareBool()}, +\cfunction{PyObject_Hash()}, \cfunction{PyObject_Repr()}, \cfunction{PyObject_IsTrue()}, \cfunction{PyObject_Print()}, and \cfunction{PyObject_GetIter()}) or the abstract number protocol (including @@ -3040,7 +3056,7 @@ or the abstract number protocol (including block of memory for medium and large sized sets (much like list storage). None of the fields of this structure should be considered public and are subject to change. All access should be done through the - documented API rather than by manipulating the values in the structure. + documented API rather than by manipulating the values in the structure. \end{ctypedesc} @@ -3059,7 +3075,7 @@ The following type check macros work on pointers to any Python object. Likewise, the constructor functions work with any iterable Python object. \begin{cfuncdesc}{int}{PyAnySet_Check}{PyObject *p} - Return true if \var{p} is a \class{set} object, a \class{frozenset} + Return true if \var{p} is a \class{set} object, a \class{frozenset} object, or an instance of a subtype. \end{cfuncdesc} @@ -3112,7 +3128,7 @@ The following functions and macros are available for instances of function does not automatically convert unhashable sets into temporary frozensets. Raise a \exception{TypeError} if the \var{key} is unhashable. Raise \exception{PyExc_SystemError} if \var{anyset} is not a \class{set}, - \class{frozenset}, or an instance of a subtype. + \class{frozenset}, or an instance of a subtype. \end{cfuncdesc} The following functions are available for instances of \class{set} or @@ -3134,7 +3150,7 @@ its subtypes but not for instances of \class{frozenset} or its subtypes. unhashable. Unlike the Python \method{discard()} method, this function does not automatically convert unhashable sets into temporary frozensets. Raise \exception{PyExc_SystemError} if \var{set} is an not an instance - of \class{set} or its subtype. + of \class{set} or its subtype. \end{cfuncdesc} \begin{cfuncdesc}{PyObject*}{PySet_Pop}{PyObject *set} @@ -3142,7 +3158,7 @@ its subtypes but not for instances of \class{frozenset} or its subtypes. and removes the object from the \var{set}. Return \NULL{} on failure. Raise \exception{KeyError} if the set is empty. Raise a \exception{SystemError} if \var{set} is an not an instance - of \class{set} or its subtype. + of \class{set} or its subtype. \end{cfuncdesc} \begin{cfuncdesc}{int}{PySet_Clear}{PyObject *set} diff --git a/Doc/api/exceptions.tex b/Doc/api/exceptions.tex index 62f713b..6dbe818 100644 --- a/Doc/api/exceptions.tex +++ b/Doc/api/exceptions.tex @@ -132,13 +132,32 @@ error indicator for each thread. codes, similar to \cfunction{printf()}. The \code{width.precision} before a format code is parsed, but the width part is ignored. - \begin{tableii}{c|l}{character}{Character}{Meaning} - \lineii{c}{Character, as an \ctype{int} parameter} - \lineii{d}{Number in decimal, as an \ctype{int} parameter} - \lineii{x}{Number in hexadecimal, as an \ctype{int} parameter} - \lineii{s}{A string, as a \ctype{char *} parameter} - \lineii{p}{A hex pointer, as a \ctype{void *} parameter} - \end{tableii} + % This should be exactly the same as the table in PyString_FromFormat. + % One should just refer to the other. + + % The descriptions for %zd and %zu are wrong, but the truth is complicated + % because not all compilers support the %z width modifier -- we fake it + % when necessary via interpolating PY_FORMAT_SIZE_T. + + % %u, %lu, %zu should have "new in Python 2.5" blurbs. + + \begin{tableiii}{l|l|l}{member}{Format Characters}{Type}{Comment} + \lineiii{\%\%}{\emph{n/a}}{The literal \% character.} + \lineiii{\%c}{int}{A single character, represented as an C int.} + \lineiii{\%d}{int}{Exactly equivalent to \code{printf("\%d")}.} + \lineiii{\%u}{unsigned int}{Exactly equivalent to \code{printf("\%u")}.} + \lineiii{\%ld}{long}{Exactly equivalent to \code{printf("\%ld")}.} + \lineiii{\%lu}{unsigned long}{Exactly equivalent to \code{printf("\%lu")}.} + \lineiii{\%zd}{Py_ssize_t}{Exactly equivalent to \code{printf("\%zd")}.} + \lineiii{\%zu}{size_t}{Exactly equivalent to \code{printf("\%zu")}.} + \lineiii{\%i}{int}{Exactly equivalent to \code{printf("\%i")}.} + \lineiii{\%x}{int}{Exactly equivalent to \code{printf("\%x")}.} + \lineiii{\%s}{char*}{A null-terminated C character array.} + \lineiii{\%p}{void*}{The hex representation of a C pointer. + Mostly equivalent to \code{printf("\%p")} except that it is + guaranteed to start with the literal \code{0x} regardless of + what the platform's \code{printf} yields.} + \end{tableiii} An unrecognized format character causes all the rest of the format string to be copied as-is to the result string, and any extra @@ -272,8 +291,8 @@ error indicator for each thread. command line documentation. There is no C API for warning control. \end{cfuncdesc} -\begin{cfuncdesc}{int}{PyErr_WarnExplicit}{PyObject *category, - const char *message, const char *filename, int lineno, +\begin{cfuncdesc}{int}{PyErr_WarnExplicit}{PyObject *category, + const char *message, const char *filename, int lineno, const char *module, PyObject *registry} Issue a warning message with explicit control over all warning attributes. This is a straightforward wrapper around the Python @@ -314,12 +333,14 @@ error indicator for each thread. The \var{name} argument must be the name of the new exception, a C string of the form \code{module.class}. The \var{base} and \var{dict} arguments are normally \NULL. This creates a class - object derived from the root for all exceptions, the built-in name - \exception{Exception} (accessible in C as \cdata{PyExc_Exception}). + object derived from \exception{Exception} (accessible in C as + \cdata{PyExc_Exception}). + The \member{__module__} attribute of the new class is set to the first part (up to the last dot) of the \var{name} argument, and the class name is set to the last part (after the last dot). The - \var{base} argument can be used to specify an alternate base class. + \var{base} argument can be used to specify alternate base classes; + it can either be only one class or a tuple of classes. The \var{dict} argument can be used to specify a dictionary of class variables and methods. \end{cfuncdesc} @@ -399,5 +420,5 @@ are derived from \exception{BaseException}. \withsubitem{(built-in exception)}{\ttindex{BaseException}} String exceptions are still supported in the interpreter to allow -existing code to run unmodified, but this will also change in a future +existing code to run unmodified, but this will also change in a future release. diff --git a/Doc/api/newtypes.tex b/Doc/api/newtypes.tex index 2d758b0..28f77f7 100644 --- a/Doc/api/newtypes.tex +++ b/Doc/api/newtypes.tex @@ -883,8 +883,39 @@ The following three fields only exist if the \begin{cmemberdesc}{PyTypeObject}{traverseproc}{tp_traverse} An optional pointer to a traversal function for the garbage collector. This is only used if the \constant{Py_TPFLAGS_HAVE_GC} - flag bit is set. More information in section - \ref{supporting-cycle-detection} about garbage collection. + flag bit is set. More information about Python's garbage collection + scheme can be found in section \ref{supporting-cycle-detection}. + + The \member{tp_traverse} pointer is used by the garbage collector + to detect reference cycles. A typical implementation of a + \member{tp_traverse} function simply calls \cfunction{Py_VISIT()} on + each of the instance's members that are Python objects. For exampe, this + is function \cfunction{local_traverse} from the \module{thread} extension + module: + + \begin{verbatim} + static int + local_traverse(localobject *self, visitproc visit, void *arg) + { + Py_VISIT(self->args); + Py_VISIT(self->kw); + Py_VISIT(self->dict); + return 0; + } + \end{verbatim} + + Note that \cfunction{Py_VISIT()} is called only on those members that can + participate in reference cycles. Although there is also a + \samp{self->key} member, it can only be \NULL{} or a Python string and + therefore cannot be part of a reference cycle. + + On the other hand, even if you know a member can never be part of a cycle, + as a debugging aid you may want to visit it anyway just so the + \module{gc} module's \function{get_referents()} function will include it. + + Note that \cfunction{Py_VISIT()} requires the \var{visit} and \var{arg} + parameters to \cfunction{local_traverse} to have these specific names; + don't name them just anything. This field is inherited by subtypes together with \member{tp_clear} and the \constant{Py_TPFLAGS_HAVE_GC} flag bit: the flag bit, @@ -896,8 +927,57 @@ The following three fields only exist if the \begin{cmemberdesc}{PyTypeObject}{inquiry}{tp_clear} An optional pointer to a clear function for the garbage collector. This is only used if the \constant{Py_TPFLAGS_HAVE_GC} flag bit is - set. More information in section - \ref{supporting-cycle-detection} about garbage collection. + set. + + The \member{tp_clear} member function is used to break reference + cycles in cyclic garbage detected by the garbage collector. Taken + together, all \member{tp_clear} functions in the system must combine to + break all reference cycles. This is subtle, and if in any doubt supply a + \member{tp_clear} function. For example, the tuple type does not + implement a \member{tp_clear} function, because it's possible to prove + that no reference cycle can be composed entirely of tuples. Therefore + the \member{tp_clear} functions of other types must be sufficient to + break any cycle containing a tuple. This isn't immediately obvious, and + there's rarely a good reason to avoid implementing \member{tp_clear}. + + Implementations of \member{tp_clear} should drop the instance's + references to those of its members that may be Python objects, and set + its pointers to those members to \NULL{}, as in the following example: + + \begin{verbatim} + static int + local_clear(localobject *self) + { + Py_CLEAR(self->key); + Py_CLEAR(self->args); + Py_CLEAR(self->kw); + Py_CLEAR(self->dict); + return 0; + } + \end{verbatim} + + The \cfunction{Py_CLEAR()} macro should be used, because clearing + references is delicate: the reference to the contained object must not be + decremented until after the pointer to the contained object is set to + \NULL{}. This is because decrementing the reference count may cause + the contained object to become trash, triggering a chain of reclamation + activity that may include invoking arbitrary Python code (due to + finalizers, or weakref callbacks, associated with the contained object). + If it's possible for such code to reference \var{self} again, it's + important that the pointer to the contained object be \NULL{} at that + time, so that \var{self} knows the contained object can no longer be + used. The \cfunction{Py_CLEAR()} macro performs the operations in a + safe order. + + Because the goal of \member{tp_clear} functions is to break reference + cycles, it's not necessary to clear contained objects like Python strings + or Python integers, which can't participate in reference cycles. + On the other hand, it may be convenient to clear all contained Python + objects, and write the type's \member{tp_dealloc} function to + invoke \member{tp_clear}. + + More information about Python's garbage collection + scheme can be found in section \ref{supporting-cycle-detection}. This field is inherited by subtypes together with \member{tp_clear} and the \constant{Py_TPFLAGS_HAVE_GC} flag bit: the flag bit, @@ -910,10 +990,10 @@ The following three fields only exist if the An optional pointer to the rich comparison function. The signature is the same as for \cfunction{PyObject_RichCompare()}. - The function should return \code{1} if the requested comparison - returns true, \code{0} if it returns false. It should return - \code{-1} and set an exception condition when an error occurred - during the comparison. + The function should return the result of the comparison (usually + \code{Py_True} or \code{Py_False}). If the comparison is undefined, + it must return \code{Py_NotImplemented}, if another error occurred + it must return \code{NULL} and set an exception condition. This field is inherited by subtypes together with \member{tp_compare} and \member{tp_hash}: a subtype inherits all diff --git a/Doc/api/refcounts.dat b/Doc/api/refcounts.dat index 7bba011..ab6d865 100644 --- a/Doc/api/refcounts.dat +++ b/Doc/api/refcounts.dat @@ -1147,7 +1147,7 @@ PySet_Discard:PyObject*:key:-1:no effect if key not found PySet_New:PyObject*::+1: PySet_New:PyObject*:iterable:0: -PySet_Pop:PyObject*::0:or returns NULL and raises KeyError if set is empty +PySet_Pop:PyObject*::+1:or returns NULL and raises KeyError if set is empty PySet_Pop:PyObject*:set:0: PySet_Size:int::: diff --git a/Doc/commontex/boilerplate.tex b/Doc/commontex/boilerplate.tex index 55a4184..b4c9f48 100644 --- a/Doc/commontex/boilerplate.tex +++ b/Doc/commontex/boilerplate.tex @@ -5,5 +5,5 @@ Email: \email{docs@python.org} } -\date{5th April 2006} % XXX update before final release! +\date{\today} % XXX update before final release! \input{patchlevel} % include Python version information diff --git a/Doc/dist/dist.tex b/Doc/dist/dist.tex index 3ba51d0..9970ec2 100644 --- a/Doc/dist/dist.tex +++ b/Doc/dist/dist.tex @@ -1760,16 +1760,16 @@ The \command{upload} command uses the username, password, and repository URL from the \file{\$HOME/.pypirc} file (see section~\ref{pypirc} for more on this file). -You can use the \programopt{--sign} option to tell \command{upload} to +You can use the \longprogramopt{sign} option to tell \command{upload} to sign each uploaded file using GPG (GNU Privacy Guard). The \program{gpg} program must be available for execution on the system \envvar{PATH}. You can also specify which key to use for signing -using the \programopt{--identity=\var{name}} option. +using the \longprogramopt{identity=\var{name}} option. Other \command{upload} options include -\programopt{--repository=\var{url}} (which lets you override the +\longprogramopt{repository=\var{url}} (which lets you override the repository setting from \file{\$HOME/.pypirc}), and -\programopt{--show-response} (which displays the full response text +\longprogramopt{show-response} (which displays the full response text from the PyPI server for help in debugging upload problems). \chapter{Examples} diff --git a/Doc/ext/windows.tex b/Doc/ext/windows.tex index a821094..ca18a1e 100644 --- a/Doc/ext/windows.tex +++ b/Doc/ext/windows.tex @@ -88,7 +88,7 @@ described here are distributed with the Python sources in the Once the Debug build has succeeded, bring up a DOS box, and change to the \file{example_nt\textbackslash Debug} directory. You should now be able to repeat the following session (\code{C>} is - the DOS prompt, \code{>\code{>}>} is the Python prompt; note that + the DOS prompt, \code{>>>} is the Python prompt; note that build information and various debug output from Python may not match this screen dump exactly): diff --git a/Doc/howto/unicode.rst b/Doc/howto/unicode.rst index 0946bdc..f92471a 100644 --- a/Doc/howto/unicode.rst +++ b/Doc/howto/unicode.rst @@ -158,7 +158,7 @@ that are more efficient and convenient. Encodings don't have to handle every possible Unicode character, and most encodings don't. For example, Python's default encoding is the 'ascii' encoding. The rules for converting a Unicode string into the -ASCII encoding are are simple; for each code point: +ASCII encoding are simple; for each code point: 1. If the code point is <128, each byte is the same as the value of the code point. @@ -721,7 +721,7 @@ Revision History and Acknowledgements Thanks to the following people who have noted errors or offered suggestions on this article: Nicholas Bastin, Marius Gedminas, Kent Johnson, Ken Krugler, -Marc-André Lemburg, Martin von Löwis. +Marc-André Lemburg, Martin von Löwis, Chad Whitacre. Version 1.0: posted August 5 2005. diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst new file mode 100644 index 0000000..69ce508 --- /dev/null +++ b/Doc/howto/urllib2.rst @@ -0,0 +1,598 @@ +============================================== + HOWTO Fetch Internet Resources Using urllib2 +============================================== +---------------------------- + Fetching URLs With Python +---------------------------- + + +.. note:: + + There is an French translation of an earlier revision of this + HOWTO, available at `urllib2 - Le Manuel manquant + `_. + +.. contents:: urllib2 Tutorial + + +Introduction +============ + +.. sidebar:: Related Articles + + You may also find useful the following article on fetching web + resources with Python : + + * `Basic Authentication `_ + + A tutorial on *Basic Authentication*, with examples in Python. + + This HOWTO is written by `Michael Foord + `_. + +**urllib2** is a `Python `_ module for fetching URLs +(Uniform Resource Locators). It offers a very simple interface, in the form of +the *urlopen* function. This is capable of fetching URLs using a variety +of different protocols. It also offers a slightly more complex +interface for handling common situations - like basic authentication, +cookies, proxies and so on. These are provided by objects called +handlers and openers. + +urllib2 supports fetching URLs for many "URL schemes" (identified by the string +before the ":" in URL - for example "ftp" is the URL scheme of +"ftp://python.org/") using their associated network protocols (e.g. FTP, HTTP). +This tutorial focuses on the most common case, HTTP. + +For straightforward situations *urlopen* is very easy to use. But as +soon as you encounter errors or non-trivial cases when opening HTTP +URLs, you will need some understanding of the HyperText Transfer +Protocol. The most comprehensive and authoritative reference to HTTP +is :RFC:`2616`. This is a technical document and not intended to be +easy to read. This HOWTO aims to illustrate using *urllib2*, with +enough detail about HTTP to help you through. It is not intended to +replace the `urllib2 docs `_ , +but is supplementary to them. + + +Fetching URLs +============= + +The simplest way to use urllib2 is as follows : :: + + import urllib2 + response = urllib2.urlopen('http://python.org/') + html = response.read() + +Many uses of urllib2 will be that simple (note that instead of an +'http:' URL we could have used an URL starting with 'ftp:', 'file:', +etc.). However, it's the purpose of this tutorial to explain the more +complicated cases, concentrating on HTTP. + +HTTP is based on requests and responses - the client makes requests +and servers send responses. urllib2 mirrors this with a ``Request`` +object which represents the HTTP request you are making. In its +simplest form you create a Request object that specifies the URL you +want to fetch. Calling ``urlopen`` with this Request object returns a +response object for the URL requested. This response is a file-like +object, which means you can for example call .read() on the response : +:: + + import urllib2 + + req = urllib2.Request('http://www.voidspace.org.uk') + response = urllib2.urlopen(req) + the_page = response.read() + +Note that urllib2 makes use of the same Request interface to handle +all URL schemes. For example, you can make an FTP request like so: :: + + req = urllib2.Request('ftp://example.com/') + +In the case of HTTP, there are two extra things that Request objects +allow you to do: First, you can pass data to be sent to the server. +Second, you can pass extra information ("metadata") *about* the data +or the about request itself, to the server - this information is sent +as HTTP "headers". Let's look at each of these in turn. + +Data +---- + +Sometimes you want to send data to a URL (often the URL will refer to +a CGI (Common Gateway Interface) script [#]_ or other web +application). With HTTP, this is often done using what's known as a +**POST** request. This is often what your browser does when you submit +a HTML form that you filled in on the web. Not all POSTs have to come +from forms: you can use a POST to transmit arbitrary data to your own +application. In the common case of HTML forms, the data needs to be +encoded in a standard way, and then passed to the Request object as +the ``data`` argument. The encoding is done using a function from the +``urllib`` library *not* from ``urllib2``. :: + + import urllib + import urllib2 + + url = 'http://www.someserver.com/cgi-bin/register.cgi' + values = {'name' : 'Michael Foord', + 'location' : 'Northampton', + 'language' : 'Python' } + + data = urllib.urlencode(values) + req = urllib2.Request(url, data) + response = urllib2.urlopen(req) + the_page = response.read() + +Note that other encodings are sometimes required (e.g. for file upload +from HTML forms - see +`HTML Specification, Form Submission `_ +for more details). + +If you do not pass the ``data`` argument, urllib2 uses a **GET** +request. One way in which GET and POST requests differ is that POST +requests often have "side-effects": they change the state of the +system in some way (for example by placing an order with the website +for a hundredweight of tinned spam to be delivered to your door). +Though the HTTP standard makes it clear that POSTs are intended to +*always* cause side-effects, and GET requests *never* to cause +side-effects, nothing prevents a GET request from having side-effects, +nor a POST requests from having no side-effects. Data can also be +passed in an HTTP GET request by encoding it in the URL itself. + +This is done as follows:: + + >>> import urllib2 + >>> import urllib + >>> data = {} + >>> data['name'] = 'Somebody Here' + >>> data['location'] = 'Northampton' + >>> data['language'] = 'Python' + >>> url_values = urllib.urlencode(data) + >>> print url_values + name=Somebody+Here&language=Python&location=Northampton + >>> url = 'http://www.example.com/example.cgi' + >>> full_url = url + '?' + url_values + >>> data = urllib2.open(full_url) + +Notice that the full URL is created by adding a ``?`` to the URL, followed by +the encoded values. + +Headers +------- + +We'll discuss here one particular HTTP header, to illustrate how to +add headers to your HTTP request. + +Some websites [#]_ dislike being browsed by programs, or send +different versions to different browsers [#]_ . By default urllib2 +identifies itself as ``Python-urllib/x.y`` (where ``x`` and ``y`` are +the major and minor version numbers of the Python release, +e.g. ``Python-urllib/2.5``), which may confuse the site, or just plain +not work. The way a browser identifies itself is through the +``User-Agent`` header [#]_. When you create a Request object you can +pass a dictionary of headers in. The following example makes the same +request as above, but identifies itself as a version of Internet +Explorer [#]_. :: + + import urllib + import urllib2 + + url = 'http://www.someserver.com/cgi-bin/register.cgi' + user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' + values = {'name' : 'Michael Foord', + 'location' : 'Northampton', + 'language' : 'Python' } + headers = { 'User-Agent' : user_agent } + + data = urllib.urlencode(values) + req = urllib2.Request(url, data, headers) + response = urllib2.urlopen(req) + the_page = response.read() + +The response also has two useful methods. See the section on `info and +geturl`_ which comes after we have a look at what happens when things +go wrong. + + +Handling Exceptions +=================== + +*urlopen* raises ``URLError`` when it cannot handle a response (though +as usual with Python APIs, builtin exceptions such as ValueError, +TypeError etc. may also be raised). + +``HTTPError`` is the subclass of ``URLError`` raised in the specific +case of HTTP URLs. + +URLError +-------- + +Often, URLError is raised because there is no network connection (no +route to the specified server), or the specified server doesn't exist. +In this case, the exception raised will have a 'reason' attribute, +which is a tuple containing an error code and a text error message. + +e.g. :: + + >>> req = urllib2.Request('http://www.pretend_server.org') + >>> try: urllib2.urlopen(req) + >>> except URLError, e: + >>> print e.reason + >>> + (4, 'getaddrinfo failed') + + +HTTPError +--------- + +Every HTTP response from the server contains a numeric "status +code". Sometimes the status code indicates that the server is unable +to fulfil the request. The default handlers will handle some of these +responses for you (for example, if the response is a "redirection" +that requests the client fetch the document from a different URL, +urllib2 will handle that for you). For those it can't handle, urlopen +will raise an ``HTTPError``. Typical errors include '404' (page not +found), '403' (request forbidden), and '401' (authentication +required). + +See section 10 of RFC 2616 for a reference on all the HTTP error +codes. + +The ``HTTPError`` instance raised will have an integer 'code' +attribute, which corresponds to the error sent by the server. + +Error Codes +~~~~~~~~~~~ + +Because the default handlers handle redirects (codes in the 300 +range), and codes in the 100-299 range indicate success, you will +usually only see error codes in the 400-599 range. + +``BaseHTTPServer.BaseHTTPRequestHandler.responses`` is a useful +dictionary of response codes in that shows all the response codes used +by RFC 2616. The dictionary is reproduced here for convenience :: + + # Table mapping response codes to messages; entries have the + # form {code: (shortmessage, longmessage)}. + responses = { + 100: ('Continue', 'Request received, please continue'), + 101: ('Switching Protocols', + 'Switching to new protocol; obey Upgrade header'), + + 200: ('OK', 'Request fulfilled, document follows'), + 201: ('Created', 'Document created, URL follows'), + 202: ('Accepted', + 'Request accepted, processing continues off-line'), + 203: ('Non-Authoritative Information', 'Request fulfilled from cache'), + 204: ('No Content', 'Request fulfilled, nothing follows'), + 205: ('Reset Content', 'Clear input form for further input.'), + 206: ('Partial Content', 'Partial content follows.'), + + 300: ('Multiple Choices', + 'Object has several resources -- see URI list'), + 301: ('Moved Permanently', 'Object moved permanently -- see URI list'), + 302: ('Found', 'Object moved temporarily -- see URI list'), + 303: ('See Other', 'Object moved -- see Method and URL list'), + 304: ('Not Modified', + 'Document has not changed since given time'), + 305: ('Use Proxy', + 'You must use proxy specified in Location to access this ' + 'resource.'), + 307: ('Temporary Redirect', + 'Object moved temporarily -- see URI list'), + + 400: ('Bad Request', + 'Bad request syntax or unsupported method'), + 401: ('Unauthorized', + 'No permission -- see authorization schemes'), + 402: ('Payment Required', + 'No payment -- see charging schemes'), + 403: ('Forbidden', + 'Request forbidden -- authorization will not help'), + 404: ('Not Found', 'Nothing matches the given URI'), + 405: ('Method Not Allowed', + 'Specified method is invalid for this server.'), + 406: ('Not Acceptable', 'URI not available in preferred format.'), + 407: ('Proxy Authentication Required', 'You must authenticate with ' + 'this proxy before proceeding.'), + 408: ('Request Timeout', 'Request timed out; try again later.'), + 409: ('Conflict', 'Request conflict.'), + 410: ('Gone', + 'URI no longer exists and has been permanently removed.'), + 411: ('Length Required', 'Client must specify Content-Length.'), + 412: ('Precondition Failed', 'Precondition in headers is false.'), + 413: ('Request Entity Too Large', 'Entity is too large.'), + 414: ('Request-URI Too Long', 'URI is too long.'), + 415: ('Unsupported Media Type', 'Entity body in unsupported format.'), + 416: ('Requested Range Not Satisfiable', + 'Cannot satisfy request range.'), + 417: ('Expectation Failed', + 'Expect condition could not be satisfied.'), + + 500: ('Internal Server Error', 'Server got itself in trouble'), + 501: ('Not Implemented', + 'Server does not support this operation'), + 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'), + 503: ('Service Unavailable', + 'The server cannot process the request due to a high load'), + 504: ('Gateway Timeout', + 'The gateway server did not receive a timely response'), + 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'), + } + +When an error is raised the server responds by returning an HTTP error +code *and* an error page. You can use the ``HTTPError`` instance as a +response on the page returned. This means that as well as the code +attribute, it also has read, geturl, and info, methods. :: + + >>> req = urllib2.Request('http://www.python.org/fish.html') + >>> try: + >>> urllib2.urlopen(req) + >>> except URLError, e: + >>> print e.code + >>> print e.read() + >>> + 404 + + + Error 404: File Not Found + ...... etc... + +Wrapping it Up +-------------- + +So if you want to be prepared for ``HTTPError`` *or* ``URLError`` +there are two basic approaches. I prefer the second approach. + +Number 1 +~~~~~~~~ + +:: + + + from urllib2 import Request, urlopen, URLError, HTTPError + req = Request(someurl) + try: + response = urlopen(req) + except HTTPError, e: + print 'The server couldn\'t fulfill the request.' + print 'Error code: ', e.code + except URLError, e: + print 'We failed to reach a server.' + print 'Reason: ', e.reason + else: + # everything is fine + + +.. note:: + + The ``except HTTPError`` *must* come first, otherwise ``except URLError`` + will *also* catch an ``HTTPError``. + +Number 2 +~~~~~~~~ + +:: + + from urllib2 import Request, urlopen, URLError + req = Request(someurl) + try: + response = urlopen(req) + except URLError, e: + if hasattr(e, 'reason'): + print 'We failed to reach a server.' + print 'Reason: ', e.reason + elif hasattr(e, 'code'): + print 'The server couldn\'t fulfill the request.' + print 'Error code: ', e.code + else: + # everything is fine + + +info and geturl +=============== + +The response returned by urlopen (or the ``HTTPError`` instance) has +two useful methods ``info`` and ``geturl``. + +**geturl** - this returns the real URL of the page fetched. This is +useful because ``urlopen`` (or the opener object used) may have +followed a redirect. The URL of the page fetched may not be the same +as the URL requested. + +**info** - this returns a dictionary-like object that describes the +page fetched, particularly the headers sent by the server. It is +currently an ``httplib.HTTPMessage`` instance. + +Typical headers include 'Content-length', 'Content-type', and so +on. See the +`Quick Reference to HTTP Headers `_ +for a useful listing of HTTP headers with brief explanations of their meaning +and use. + + +Openers and Handlers +==================== + +When you fetch a URL you use an opener (an instance of the perhaps +confusingly-named ``urllib2.OpenerDirector``). Normally we have been using +the default opener - via ``urlopen`` - but you can create custom +openers. Openers use handlers. All the "heavy lifting" is done by the +handlers. Each handler knows how to open URLs for a particular URL +scheme (http, ftp, etc.), or how to handle an aspect of URL opening, +for example HTTP redirections or HTTP cookies. + +You will want to create openers if you want to fetch URLs with +specific handlers installed, for example to get an opener that handles +cookies, or to get an opener that does not handle redirections. + +To create an opener, instantiate an OpenerDirector, and then call +.add_handler(some_handler_instance) repeatedly. + +Alternatively, you can use ``build_opener``, which is a convenience +function for creating opener objects with a single function call. +``build_opener`` adds several handlers by default, but provides a +quick way to add more and/or override the default handlers. + +Other sorts of handlers you might want to can handle proxies, +authentication, and other common but slightly specialised +situations. + +``install_opener`` can be used to make an ``opener`` object the +(global) default opener. This means that calls to ``urlopen`` will use +the opener you have installed. + +Opener objects have an ``open`` method, which can be called directly +to fetch urls in the same way as the ``urlopen`` function: there's no +need to call ``install_opener``, except as a convenience. + + +Basic Authentication +==================== + +To illustrate creating and installing a handler we will use the +``HTTPBasicAuthHandler``. For a more detailed discussion of this +subject - including an explanation of how Basic Authentication works - +see the `Basic Authentication Tutorial `_. + +When authentication is required, the server sends a header (as well as +the 401 error code) requesting authentication. This specifies the +authentication scheme and a 'realm'. The header looks like : +``Www-authenticate: SCHEME realm="REALM"``. + +e.g. :: + + Www-authenticate: Basic realm="cPanel Users" + + +The client should then retry the request with the appropriate name and +password for the realm included as a header in the request. This is +'basic authentication'. In order to simplify this process we can +create an instance of ``HTTPBasicAuthHandler`` and an opener to use +this handler. + +The ``HTTPBasicAuthHandler`` uses an object called a password manager +to handle the mapping of URLs and realms to passwords and +usernames. If you know what the realm is (from the authentication +header sent by the server), then you can use a +``HTTPPasswordMgr``. Frequently one doesn't care what the realm is. In +that case, it is convenient to use +``HTTPPasswordMgrWithDefaultRealm``. This allows you to specify a +default username and password for a URL. This will be supplied in the +absence of you providing an alternative combination for a specific +realm. We indicate this by providing ``None`` as the realm argument to +the ``add_password`` method. + +The top-level URL is the first URL that requires authentication. URLs +"deeper" than the URL you pass to .add_password() will also match. :: + + # create a password manager + password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + + # Add the username and password. + # If we knew the realm, we could use it instead of ``None``. + top_level_url = "http://example.com/foo/" + password_mgr.add_password(None, top_level_url, username, password) + + handler = urllib2.HTTPBasicAuthHandler(password_mgr) + + # create "opener" (OpenerDirector instance) + opener = urllib2.build_opener(handler) + + # use the opener to fetch a URL + opener.open(a_url) + + # Install the opener. + # Now all calls to urllib2.urlopen use our opener. + urllib2.install_opener(opener) + +.. note:: + + In the above example we only supplied our ``HHTPBasicAuthHandler`` + to ``build_opener``. By default openers have the handlers for + normal situations - ``ProxyHandler``, ``UnknownHandler``, + ``HTTPHandler``, ``HTTPDefaultErrorHandler``, + ``HTTPRedirectHandler``, ``FTPHandler``, ``FileHandler``, + ``HTTPErrorProcessor``. + +top_level_url is in fact *either* a full URL (including the 'http:' +scheme component and the hostname and optionally the port number) +e.g. "http://example.com/" *or* an "authority" (i.e. the hostname, +optionally including the port number) e.g. "example.com" or +"example.com:8080" (the latter example includes a port number). The +authority, if present, must NOT contain the "userinfo" component - for +example "joe@password:example.com" is not correct. + + +Proxies +======= + +**urllib2** will auto-detect your proxy settings and use those. This +is through the ``ProxyHandler`` which is part of the normal handler +chain. Normally that's a good thing, but there are occasions when it +may not be helpful [#]_. One way to do this is to setup our own +``ProxyHandler``, with no proxies defined. This is done using similar +steps to setting up a `Basic Authentication`_ handler : :: + + >>> proxy_support = urllib2.ProxyHandler({}) + >>> opener = urllib2.build_opener(proxy_support) + >>> urllib2.install_opener(opener) + +.. note:: + + Currently ``urllib2`` *does not* support fetching of ``https`` + locations through a proxy. This can be a problem. + +Sockets and Layers +================== + +The Python support for fetching resources from the web is +layered. urllib2 uses the httplib library, which in turn uses the +socket library. + +As of Python 2.3 you can specify how long a socket should wait for a +response before timing out. This can be useful in applications which +have to fetch web pages. By default the socket module has *no timeout* +and can hang. Currently, the socket timeout is not exposed at the +httplib or urllib2 levels. However, you can set the default timeout +globally for all sockets using : :: + + import socket + import urllib2 + + # timeout in seconds + timeout = 10 + socket.setdefaulttimeout(timeout) + + # this call to urllib2.urlopen now uses the default timeout + # we have set in the socket module + req = urllib2.Request('http://www.voidspace.org.uk') + response = urllib2.urlopen(req) + + +------- + + +Footnotes +========= + +This document was reviewed and revised by John Lee. + +.. [#] For an introduction to the CGI protocol see + `Writing Web Applications in Python `_. +.. [#] Like Google for example. The *proper* way to use google from a program + is to use `PyGoogle `_ of course. See + `Voidspace Google `_ + for some examples of using the Google API. +.. [#] Browser sniffing is a very bad practise for website design - building + sites using web standards is much more sensible. Unfortunately a lot of + sites still send different versions to different browsers. +.. [#] The user agent for MSIE 6 is + *'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)'* +.. [#] For details of more HTTP request headers, see + `Quick Reference to HTTP Headers`_. +.. [#] In my case I have to use a proxy to access the internet at work. If you + attempt to fetch *localhost* URLs through this proxy it blocks them. IE + is set to use the proxy, which urllib2 picks up on. In order to test + scripts with a localhost server, I have to prevent urllib2 from using + the proxy. diff --git a/Doc/inst/inst.tex b/Doc/inst/inst.tex index 4961a1a..676f8ae 100644 --- a/Doc/inst/inst.tex +++ b/Doc/inst/inst.tex @@ -726,8 +726,8 @@ There are two environment variables that can modify \code{sys.path}. \envvar{PYTHONHOME} sets an alternate value for the prefix of the Python installation. For example, if \envvar{PYTHONHOME} is set to \samp{/www/python}, the search path will be set to \code{['', -'/www/python/lib/python2.2/', '/www/python/lib/python2.3/plat-linux2', -...]}. +'/www/python/lib/python\shortversion/', +'/www/python/lib/python\shortversion/plat-linux2', ...]}. The \envvar{PYTHONPATH} variable can be set to a list of paths that will be added to the beginning of \code{sys.path}. For example, if @@ -981,15 +981,15 @@ different from the format used by the Python version you can download from the Python or ActiveState Web site. (Python is built with Microsoft Visual \Cpp, which uses COFF as the object file format.) For this reason you have to convert Python's library -\file{python24.lib} into the Borland format. You can do this as +\file{python25.lib} into the Borland format. You can do this as follows: \begin{verbatim} -coff2omf python24.lib python24_bcpp.lib +coff2omf python25.lib python25_bcpp.lib \end{verbatim} The \file{coff2omf} program comes with the Borland compiler. The file -\file{python24.lib} is in the \file{Libs} directory of your Python +\file{python25.lib} is in the \file{Libs} directory of your Python installation. If your extension uses other libraries (zlib,...) you have to convert them too. @@ -1053,17 +1053,23 @@ First you have to create a list of symbols which the Python DLL exports. PExports 0.42h there.) \begin{verbatim} -pexports python24.dll >python24.def +pexports python25.dll >python25.def \end{verbatim} +The location of an installed \file{python25.dll} will depend on the +installation options and the version and language of Windows. In a +``just for me'' installation, it will appear in the root of the +installation directory. In a shared installation, it will be located +in the system directory. + Then you can create from these information an import library for gcc. \begin{verbatim} -dlltool --dllname python24.dll --def python24.def --output-lib libpython24.a +/cygwin/bin/dlltool --dllname python25.dll --def python25.def --output-lib libpython25.a \end{verbatim} The resulting library has to be placed in the same directory as -\file{python24.lib}. (Should be the \file{libs} directory under your +\file{python25.lib}. (Should be the \file{libs} directory under your Python installation directory.) If your extension uses other libraries (zlib,...) you might diff --git a/Doc/lib/lib.tex b/Doc/lib/lib.tex index eac35de..cf657c3 100644 --- a/Doc/lib/lib.tex +++ b/Doc/lib/lib.tex @@ -224,6 +224,7 @@ and how to embed it in other applications. \input{libdbhash} \input{libbsddb} \input{libdumbdbm} +\input{libsqlite3} % ============= @@ -243,6 +244,8 @@ and how to embed it in other applications. \input{libcursespanel} \input{libplatform} \input{liberrno} +\input{libctypes} +\input{libctypesref} \input{libsomeos} % Optional Operating System Services \input{libselect} @@ -359,7 +362,7 @@ and how to embed it in other applications. \input{libprofile} % The Python Profiler \input{libhotshot} % unmaintained C profiler \input{libtimeit} - +\input{libtrace} % ============= % PYTHON ENGINE @@ -444,6 +447,7 @@ and how to embed it in other applications. \input{libsunaudio} \input{windows} % MS Windows ONLY +\input{libmsilib} \input{libmsvcrt} \input{libwinreg} \input{libwinsound} diff --git a/Doc/lib/libcodecs.tex b/Doc/lib/libcodecs.tex index 6e0bc8d..05c0375 100644 --- a/Doc/lib/libcodecs.tex +++ b/Doc/lib/libcodecs.tex @@ -161,7 +161,7 @@ directly. \end{funcdesc} \begin{funcdesc}{lookup_error}{name} -Return the error handler previously register under the name \var{name}. +Return the error handler previously registered under the name \var{name}. Raises a \exception{LookupError} in case the handler cannot be found. \end{funcdesc} @@ -366,7 +366,7 @@ steps. It defines the following methods which every incremental encoder must define in order to be compatible with the Python codec registry. \begin{classdesc}{IncrementalEncoder}{\optional{errors}} - Constructor for a \class{IncrementalEncoder} instance. + Constructor for an \class{IncrementalEncoder} instance. All incremental encoders must provide this constructor interface. They are free to add additional keyword arguments, but only the ones defined @@ -413,7 +413,7 @@ steps. It defines the following methods which every incremental decoder must define in order to be compatible with the Python codec registry. \begin{classdesc}{IncrementalDecoder}{\optional{errors}} - Constructor for a \class{IncrementalDecoder} instance. + Constructor for an \class{IncrementalDecoder} instance. All incremental decoders must provide this constructor interface. They are free to add additional keyword arguments, but only the ones defined diff --git a/Doc/lib/libcodeop.tex b/Doc/lib/libcodeop.tex index 7d6153e..6972b6f 100644 --- a/Doc/lib/libcodeop.tex +++ b/Doc/lib/libcodeop.tex @@ -19,7 +19,7 @@ There are two parts to this job: \begin{enumerate} \item Being able to tell if a line of input completes a Python statement: in short, telling whether to print - `\code{>\code{>}>~}' or `\code{...~}' next. + `\code{>>>~}' or `\code{...~}' next. \item Remembering which future statements the user has entered, so subsequent input can be compiled with these in effect. \end{enumerate} diff --git a/Doc/lib/libcollections.tex b/Doc/lib/libcollections.tex index d9bfa39..3e56a3e 100644 --- a/Doc/lib/libcollections.tex +++ b/Doc/lib/libcollections.tex @@ -59,12 +59,12 @@ Deque objects support the following methods: \begin{methoddesc}{pop}{} Remove and return an element from the right side of the deque. - If no elements are present, raises a \exception{IndexError}. + If no elements are present, raises an \exception{IndexError}. \end{methoddesc} \begin{methoddesc}{popleft}{} Remove and return an element from the left side of the deque. - If no elements are present, raises a \exception{IndexError}. + If no elements are present, raises an \exception{IndexError}. \end{methoddesc} \begin{methoddesc}{remove}{value} diff --git a/Doc/lib/libcontextlib.tex b/Doc/lib/libcontextlib.tex index 46f9cdd..72bf537 100644 --- a/Doc/lib/libcontextlib.tex +++ b/Doc/lib/libcontextlib.tex @@ -12,11 +12,13 @@ This module provides utilities for common tasks involving the Functions provided: \begin{funcdesc}{contextmanager}{func} -This function is a decorator that can be used to define context managers -for use with the \keyword{with} statement, without needing to create a -class or separate \method{__enter__()} and \method{__exit__()} methods. +This function is a decorator that can be used to define a factory +function for \keyword{with} statement context managers, without +needing to create a class or separate \method{__enter__()} and +\method{__exit__()} methods. -A simple example: +A simple example (this is not recommended as a real way of +generating HTML!): \begin{verbatim} from __future__ import with_statement @@ -36,9 +38,10 @@ foo \end{verbatim} -When called, the decorated function must return a generator-iterator. -This iterator must yield exactly one value, which will be bound to the -targets in the \keyword{with} statement's \keyword{as} clause, if any. +The function being decorated must return a generator-iterator when +called. This iterator must yield exactly one value, which will be +bound to the targets in the \keyword{with} statement's \keyword{as} +clause, if any. At the point where the generator yields, the block nested in the \keyword{with} statement is executed. The generator is then resumed @@ -46,37 +49,16 @@ after the block is exited. If an unhandled exception occurs in the block, it is reraised inside the generator at the point where the yield occurred. Thus, you can use a \keyword{try}...\keyword{except}...\keyword{finally} statement to trap -the error (if any), or ensure that some cleanup takes place. - -Note that you can use \code{@contextmanager} to define a context -manager's \method{__context__} method. This is usually more convenient -than creating another class just to serve as a context. For example: - -\begin{verbatim} -from __future__ import with_statement -from contextlib import contextmanager - -class Tag: - def __init__(self, name): - self.name = name - - @contextmanager - def __context__(self): - print "<%s>" % self.name - yield self - print "" % self.name - -h1 = Tag("h1") - ->>> with h1 as me: -... print "hello from", me -

-hello from <__main__.Tag instance at 0x402ce8ec> -

-\end{verbatim} +the error (if any), or ensure that some cleanup takes place. If an +exception is trapped merely in order to log it or to perform some +action (rather than to suppress it entirely), the generator must +reraise that exception. Otherwise the generator context manager will +indicate to the \keyword{with} statement that the exception has been +handled, and execution will resume with the statement immediately +following the \keyword{with} statement. \end{funcdesc} -\begin{funcdesc}{nested}{ctx1\optional{, ctx2\optional{, ...}}} +\begin{funcdesc}{nested}{mgr1\optional{, mgr2\optional{, ...}}} Combine multiple context managers into a single nested context manager. Code like this: @@ -97,18 +79,22 @@ with A as X: do_something() \end{verbatim} -Note that if one of the nested contexts' \method{__exit__()} method -raises an exception, any previous exception state will be lost; the new -exception will be passed to the outer contexts' \method{__exit__()} -method(s), if any. In general, \method{__exit__()} methods should avoid -raising exceptions, and in particular they should not re-raise a +Note that if the \method{__exit__()} method of one of the nested +context managers indicates an exception should be suppressed, no +exception information will be passed to any remaining outer context +managers. Similarly, if the \method{__exit__()} method of one of the +nested managers raises an exception, any previous exception state will +be lost; the new exception will be passed to the +\method{__exit__()} methods of any remaining outer context managers. +In general, \method{__exit__()} methods should avoid raising +exceptions, and in particular they should not re-raise a passed-in exception. \end{funcdesc} \label{context-closing} \begin{funcdesc}{closing}{thing} -Return a context manager that closes \var{thing} upon completion of the -block. This is basically equivalent to: +Return a context manager that closes \var{thing} upon completion of +the block. This is basically equivalent to: \begin{verbatim} from contextlib import contextmanager @@ -127,14 +113,14 @@ from __future__ import with_statement from contextlib import closing import codecs -with closing(codecs.open("foo", encoding="utf8")) as f: - for line in f: - print line.encode("latin1") +with closing(urllib.urlopen('http://www.python.org')) as page: + for line in page: + print line \end{verbatim} -without needing to explicitly close \code{f}. Even if an error occurs, -\code{f.close()} will be called when the \keyword{with} block is exited. - +without needing to explicitly close \code{page}. Even if an error +occurs, \code{page.close()} will be called when the \keyword{with} +block is exited. \end{funcdesc} \begin{seealso} diff --git a/Doc/lib/libctypes.tex b/Doc/lib/libctypes.tex new file mode 100755 index 0000000..dc37749 --- /dev/null +++ b/Doc/lib/libctypes.tex @@ -0,0 +1,1226 @@ +\newlength{\locallinewidth} +\setlength{\locallinewidth}{\linewidth} +\section{\module{ctypes} --- A foreign function library for Python.} +\declaremodule{standard}{ctypes} +\moduleauthor{Thomas Heller}{theller@python.net} +\modulesynopsis{A foreign function library for Python.} +\versionadded{2.5} + +\code{ctypes} is a foreign function library for Python. + + +\subsection{ctypes tutorial\label{ctypes-ctypes-tutorial}} + +This tutorial describes version 0.9.9 of \code{ctypes}. + +Note: The code samples in this tutorial uses \code{doctest} to make sure +that they actually work. Since some code samples behave differently +under Linux, Windows, or Mac OS X, they contain doctest directives in +comments. + +Note: Quite some code samples references the ctypes \class{c{\_}int} type. +This type is an alias to the \class{c{\_}long} type on 32-bit systems. So, +you should not be confused if \class{c{\_}long} is printed if you would +expect \class{c{\_}int} - they are actually the same type. + + +\subsubsection{Loading dynamic link libraries\label{ctypes-loading-dynamic-link-libraries}} + +\code{ctypes} exports the \var{cdll}, and on Windows also \var{windll} and +\var{oledll} objects to load dynamic link libraries. + +You load libraries by accessing them as attributes of these objects. +\var{cdll} loads libraries which export functions using the standard +\code{cdecl} calling convention, while \var{windll} libraries call +functions using the \code{stdcall} calling convention. \var{oledll} also +uses the \code{stdcall} calling convention, and assumes the functions +return a Windows \class{HRESULT} error code. The error code is used to +automatically raise \class{WindowsError} Python exceptions when the +function call fails. + +Here are some examples for Windows, note that \code{msvcrt} is the MS +standard C library containing most standard C functions, and uses the +cdecl calling convention: +\begin{verbatim} +>>> from ctypes import * +>>> print windll.kernel32 # doctest: +WINDOWS + +>>> print cdll.msvcrt # doctest: +WINDOWS + +>>> libc = cdll.msvcrt # doctest: +WINDOWS +>>> +\end{verbatim} + +Windows appends the usual '.dll' file suffix automatically. + +On Linux, it is required to specify the filename \emph{including} the +extension to load a library, so attribute access does not work. +Either the \method{LoadLibrary} method of the dll loaders should be used, +or you should load the library by creating an instance of CDLL by +calling the constructor: +\begin{verbatim} +>>> cdll.LoadLibrary("libc.so.6") # doctest: +LINUX + +>>> libc = CDLL("libc.so.6") # doctest: +LINUX +>>> libc # doctest: +LINUX + +>>> +\end{verbatim} + +XXX Add section for Mac OS X. + + +\subsubsection{Accessing functions from loaded dlls\label{ctypes-accessing-functions-from-loaded-dlls}} + +Functions are accessed as attributes of dll objects: +\begin{verbatim} +>>> from ctypes import * +>>> libc.printf +<_FuncPtr object at 0x...> +>>> print windll.kernel32.GetModuleHandleA # doctest: +WINDOWS +<_FuncPtr object at 0x...> +>>> print windll.kernel32.MyOwnFunction # doctest: +WINDOWS +Traceback (most recent call last): + File "", line 1, in ? + File "ctypes.py", line 239, in __getattr__ + func = _StdcallFuncPtr(name, self) +AttributeError: function 'MyOwnFunction' not found +>>> +\end{verbatim} + +Note that win32 system dlls like \code{kernel32} and \code{user32} often +export ANSI as well as UNICODE versions of a function. The UNICODE +version is exported with an \code{W} appended to the name, while the ANSI +version is exported with an \code{A} appended to the name. The win32 +\code{GetModuleHandle} function, which returns a \emph{module handle} for a +given module name, has the following C prototype, and a macro is used +to expose one of them as \code{GetModuleHandle} depending on whether +UNICODE is defined or not: +\begin{verbatim} +/* ANSI version */ +HMODULE GetModuleHandleA(LPCSTR lpModuleName); +/* UNICODE version */ +HMODULE GetModuleHandleW(LPCWSTR lpModuleName); +\end{verbatim} + +\var{windll} does not try to select one of them by magic, you must +access the version you need by specifying \code{GetModuleHandleA} or +\code{GetModuleHandleW} explicitely, and then call it with normal strings +or unicode strings respectively. + +Sometimes, dlls export functions with names which aren't valid Python +identifiers, like \code{"??2@YAPAXI@Z"}. In this case you have to use +\code{getattr} to retrieve the function: +\begin{verbatim} +>>> getattr(cdll.msvcrt, "??2@YAPAXI@Z") # doctest: +WINDOWS +<_FuncPtr object at 0x...> +>>> +\end{verbatim} + +On Windows, some dlls export functions not by name but by ordinal. +These functions can be accessed by indexing the dll object with the +odinal number: +\begin{verbatim} +>>> cdll.kernel32[1] # doctest: +WINDOWS +<_FuncPtr object at 0x...> +>>> cdll.kernel32[0] # doctest: +WINDOWS +Traceback (most recent call last): + File "", line 1, in ? + File "ctypes.py", line 310, in __getitem__ + func = _StdcallFuncPtr(name, self) +AttributeError: function ordinal 0 not found +>>> +\end{verbatim} + + +\subsubsection{Calling functions\label{ctypes-calling-functions}} + +You can call these functions like any other Python callable. This +example uses the \code{time()} function, which returns system time in +seconds since the \UNIX{} epoch, and the \code{GetModuleHandleA()} function, +which returns a win32 module handle. + +This example calls both functions with a NULL pointer (\code{None} should +be used as the NULL pointer): +\begin{verbatim} +>>> print libc.time(None) +114... +>>> print hex(windll.kernel32.GetModuleHandleA(None)) # doctest: +WINDOWS +0x1d000000 +>>> +\end{verbatim} + +\code{ctypes} tries to protect you from calling functions with the wrong +number of arguments. Unfortunately this only works on Windows. It +does this by examining the stack after the function returns: +\begin{verbatim} +>>> windll.kernel32.GetModuleHandleA() # doctest: +WINDOWS +Traceback (most recent call last): + File "", line 1, in ? +ValueError: Procedure probably called with not enough arguments (4 bytes missing) +>>> windll.kernel32.GetModuleHandleA(0, 0) # doctest: +WINDOWS +Traceback (most recent call last): + File "", line 1, in ? +ValueError: Procedure probably called with too many arguments (4 bytes in excess) +>>> +\end{verbatim} + +On Windows, \code{ctypes} uses win32 structured exception handling to +prevent crashes from general protection faults when functions are +called with invalid argument values: +\begin{verbatim} +>>> windll.kernel32.GetModuleHandleA(32) # doctest: +WINDOWS +Traceback (most recent call last): + File "", line 1, in ? +WindowsError: exception: access violation reading 0x00000020 +>>> +\end{verbatim} + +There are, however, enough ways to crash Python with \code{ctypes}, so +you should be careful anyway. + +Python integers, strings and unicode strings are the only objects that +can directly be used as parameters in these function calls. + +Before we move on calling functions with other parameter types, we +have to learn more about \code{ctypes} data types. + + +\subsubsection{Simple data types\label{ctypes-simple-data-types}} + +\code{ctypes} defines a number of primitive C compatible data types : +\begin{quote} + +\begin{longtable}[c]{|p{0.19\locallinewidth}|p{0.28\locallinewidth}|p{0.14\locallinewidth}|} +\hline +\textbf{ +ctypes type +} & \textbf{ +C type +} & \textbf{ +Python type +} \\ +\hline +\endhead + +\class{c{\_}char} + & +\code{char} + & +character + \\ +\hline + +\class{c{\_}byte} + & +\code{char} + & +integer + \\ +\hline + +\class{c{\_}ubyte} + & +\code{unsigned char} + & +integer + \\ +\hline + +\class{c{\_}short} + & +\code{short} + & +integer + \\ +\hline + +\class{c{\_}ushort} + & +\code{unsigned short} + & +integer + \\ +\hline + +\class{c{\_}int} + & +\code{int} + & +integer + \\ +\hline + +\class{c{\_}uint} + & +\code{unsigned int} + & +integer + \\ +\hline + +\class{c{\_}long} + & +\code{long} + & +integer + \\ +\hline + +\class{c{\_}ulong} + & +\code{unsigned long} + & +long + \\ +\hline + +\class{c{\_}longlong} + & +\code{{\_}{\_}int64} or +\code{long long} + & +long + \\ +\hline + +\class{c{\_}ulonglong} + & +\code{unsigned {\_}{\_}int64} or +\code{unsigned long long} + & +long + \\ +\hline + +\class{c{\_}float} + & +\code{float} + & +float + \\ +\hline + +\class{c{\_}double} + & +\code{double} + & +float + \\ +\hline + +\class{c{\_}char{\_}p} + & +\code{char *} +(NUL terminated) + & +string or +\code{None} + \\ +\hline + +\class{c{\_}wchar{\_}p} + & +\code{wchar{\_}t *} +(NUL terminated) + & +unicode or +\code{None} + \\ +\hline + +\class{c{\_}void{\_}p} + & +\code{void *} + & +integer or +\code{None} + \\ +\hline +\end{longtable} +\end{quote} + +All these types can be created by calling them with an optional +initializer of the correct type and value: +\begin{verbatim} +>>> c_int() +c_long(0) +>>> c_char_p("Hello, World") +c_char_p('Hello, World') +>>> c_ushort(-3) +c_ushort(65533) +>>> +\end{verbatim} + +Since these types are mutable, their value can also be changed +afterwards: +\begin{verbatim} +>>> i = c_int(42) +>>> print i +c_long(42) +>>> print i.value +42 +>>> i.value = -99 +>>> print i.value +-99 +>>> +\end{verbatim} + +Assigning a new value to instances of the pointer types \class{c{\_}char{\_}p}, +\class{c{\_}wchar{\_}p}, and \class{c{\_}void{\_}p} changes the \emph{memory location} they +point to, \emph{not the contents} of the memory block (of course not, +because Python strings are immutable): +\begin{verbatim} +>>> s = "Hello, World" +>>> c_s = c_char_p(s) +>>> print c_s +c_char_p('Hello, World') +>>> c_s.value = "Hi, there" +>>> print c_s +c_char_p('Hi, there') +>>> print s # first string is unchanged +Hello, World +\end{verbatim} + +You should be careful, however, not to pass them to functions +expecting pointers to mutable memory. If you need mutable memory +blocks, ctypes has a \code{create{\_}string{\_}buffer} function which creates +these in various ways. The current memory block contents can be +accessed (or changed) with the \code{raw} property, if you want to access +it as NUL terminated string, use the \code{string} property: +\begin{verbatim} +>>> from ctypes import * +>>> p = create_string_buffer(3) # create a 3 byte buffer, initialized to NUL bytes +>>> print sizeof(p), repr(p.raw) +3 '\x00\x00\x00' +>>> p = create_string_buffer("Hello") # create a buffer containing a NUL terminated string +>>> print sizeof(p), repr(p.raw) +6 'Hello\x00' +>>> print repr(p.value) +'Hello' +>>> p = create_string_buffer("Hello", 10) # create a 10 byte buffer +>>> print sizeof(p), repr(p.raw) +10 'Hello\x00\x00\x00\x00\x00' +>>> p.value = "Hi" +>>> print sizeof(p), repr(p.raw) +10 'Hi\x00lo\x00\x00\x00\x00\x00' +>>> +\end{verbatim} + +The \code{create{\_}string{\_}buffer} function replaces the \code{c{\_}buffer} +function (which is still available as an alias), as well as the +\code{c{\_}string} function from earlier ctypes releases. To create a +mutable memory block containing unicode characters of the C type +\code{wchar{\_}t} use the \code{create{\_}unicode{\_}buffer} function. + + +\subsubsection{Calling functions, continued\label{ctypes-calling-functions-continued}} + +Note that printf prints to the real standard output channel, \emph{not} to +\code{sys.stdout}, so these examples will only work at the console +prompt, not from within \emph{IDLE} or \emph{PythonWin}: +\begin{verbatim} +>>> printf = libc.printf +>>> printf("Hello, %s\n", "World!") +Hello, World! +14 +>>> printf("Hello, %S", u"World!") +Hello, World! +13 +>>> printf("%d bottles of beer\n", 42) +42 bottles of beer +19 +>>> printf("%f bottles of beer\n", 42.5) +Traceback (most recent call last): + File "", line 1, in ? +ArgumentError: argument 2: exceptions.TypeError: Don't know how to convert parameter 2 +>>> +\end{verbatim} + +As has been mentioned before, all Python types except integers, +strings, and unicode strings have to be wrapped in their corresponding +\code{ctypes} type, so that they can be converted to the required C data +type: +\begin{verbatim} +>>> printf("An int %d, a double %f\n", 1234, c_double(3.14)) +Integer 1234, double 3.1400001049 +31 +>>> +\end{verbatim} + + +\subsubsection{Calling functions with your own custom data types\label{ctypes-calling-functions-with-own-custom-data-types}} + +You can also customize \code{ctypes} argument conversion to allow +instances of your own classes be used as function arguments. +\code{ctypes} looks for an \member{{\_}as{\_}parameter{\_}} attribute and uses this as +the function argument. Of course, it must be one of integer, string, +or unicode: +\begin{verbatim} +>>> class Bottles(object): +... def __init__(self, number): +... self._as_parameter_ = number +... +>>> bottles = Bottles(42) +>>> printf("%d bottles of beer\n", bottles) +42 bottles of beer +19 +>>> +\end{verbatim} + +If you don't want to store the instance's data in the +\member{{\_}as{\_}parameter{\_}} instance variable, you could define a \code{property} +which makes the data avaiblable. + + +\subsubsection{Specifying the required argument types (function prototypes)\label{ctypes-specifying-required-argument-types}} + +It is possible to specify the required argument types of functions +exported from DLLs by setting the \member{argtypes} attribute. + +\member{argtypes} must be a sequence of C data types (the \code{printf} +function is probably not a good example here, because it takes a +variable number and different types of parameters depending on the +format string, on the other hand this is quite handy to experiment +with this feature): +\begin{verbatim} +>>> printf.argtypes = [c_char_p, c_char_p, c_int, c_double] +>>> printf("String '%s', Int %d, Double %f\n", "Hi", 10, 2.2) +String 'Hi', Int 10, Double 2.200000 +37 +>>> +\end{verbatim} + +Specifying a format protects against incompatible argument types (just +as a prototype for a C function), and tries to convert the arguments +to valid types: +\begin{verbatim} +>>> printf("%d %d %d", 1, 2, 3) +Traceback (most recent call last): + File "", line 1, in ? +ArgumentError: argument 2: exceptions.TypeError: wrong type +>>> printf("%s %d %f", "X", 2, 3) +X 2 3.00000012 +12 +>>> +\end{verbatim} + +If you have defined your own classes which you pass to function calls, +you have to implement a \method{from{\_}param} class method for them to be +able to use them in the \member{argtypes} sequence. The \method{from{\_}param} +class method receives the Python object passed to the function call, +it should do a typecheck or whatever is needed to make sure this +object is acceptable, and then return the object itself, it's +\member{{\_}as{\_}parameter{\_}} attribute, or whatever you want to pass as the C +function argument in this case. Again, the result should be an +integer, string, unicode, a \code{ctypes} instance, or something having +the \member{{\_}as{\_}parameter{\_}} attribute. + + +\subsubsection{Return types\label{ctypes-return-types}} + +By default functions are assumed to return integers. Other return +types can be specified by setting the \member{restype} attribute of the +function object. + +Here is a more advanced example, it uses the strchr function, which +expects a string pointer and a char, and returns a pointer to a +string: +\begin{verbatim} +>>> strchr = libc.strchr +>>> strchr("abcdef", ord("d")) # doctest: +SKIP +8059983 +>>> strchr.restype = c_char_p # c_char_p is a pointer to a string +>>> strchr("abcdef", ord("d")) +'def' +>>> print strchr("abcdef", ord("x")) +None +>>> +\end{verbatim} + +If you want to avoid the \code{ord("x")} calls above, you can set the +\member{argtypes} attribute, and the second argument will be converted from +a single character Python string into a C char: +\begin{verbatim} +>>> strchr.restype = c_char_p +>>> strchr.argtypes = [c_char_p, c_char] +>>> strchr("abcdef", "d") +'def' +>>> strchr("abcdef", "def") +Traceback (most recent call last): + File "", line 1, in ? +ArgumentError: argument 2: exceptions.TypeError: one character string expected +>>> print strchr("abcdef", "x") +None +>>> strchr("abcdef", "d") +'def' +>>> +\end{verbatim} + +XXX Mention the \member{errcheck} protocol... + +You can also use a callable Python object (a function or a class for +example) as the \member{restype} attribute. It will be called with the +\code{integer} the C function returns, and the result of this call will +be used as the result of your function call. This is useful to check +for error return values and automatically raise an exception: +\begin{verbatim} +>>> GetModuleHandle = windll.kernel32.GetModuleHandleA # doctest: +WINDOWS +>>> def ValidHandle(value): +... if value == 0: +... raise WinError() +... return value +... +>>> +>>> GetModuleHandle.restype = ValidHandle # doctest: +WINDOWS +>>> GetModuleHandle(None) # doctest: +WINDOWS +486539264 +>>> GetModuleHandle("something silly") # doctest: +WINDOWS +IGNORE_EXCEPTION_DETAIL +Traceback (most recent call last): + File "", line 1, in ? + File "", line 3, in ValidHandle +WindowsError: [Errno 126] The specified module could not be found. +>>> +\end{verbatim} + +\code{WinError} is a function which will call Windows \code{FormatMessage()} +api to get the string representation of an error code, and \emph{returns} +an exception. \code{WinError} takes an optional error code parameter, if +no one is used, it calls \function{GetLastError()} to retrieve it. + + +\subsubsection{Passing pointers (or: passing parameters by reference)\label{ctypes-passing-pointers}} + +Sometimes a C api function expects a \emph{pointer} to a data type as +parameter, probably to write into the corresponding location, or if +the data is too large to be passed by value. This is also known as +\emph{passing parameters by reference}. + +\code{ctypes} exports the \function{byref} function which is used to pass +parameters by reference. The same effect can be achieved with the +\code{pointer} function, although \code{pointer} does a lot more work since +it constructs a real pointer object, so it is faster to use \function{byref} +if you don't need the pointer object in Python itself: +\begin{verbatim} +>>> i = c_int() +>>> f = c_float() +>>> s = create_string_buffer('\000' * 32) +>>> print i.value, f.value, repr(s.value) +0 0.0 '' +>>> libc.sscanf("1 3.14 Hello", "%d %f %s", +... byref(i), byref(f), s) +3 +>>> print i.value, f.value, repr(s.value) +1 3.1400001049 'Hello' +>>> +\end{verbatim} + + +\subsubsection{Structures and unions\label{ctypes-structures-unions}} + +Structures and unions must derive from the \class{Structure} and \class{Union} +base classes which are defined in the \code{ctypes} module. Each subclass +must define a \member{{\_}fields{\_}} attribute. \member{{\_}fields{\_}} must be a list of +\emph{2-tuples}, containing a \emph{field name} and a \emph{field type}. + +The field type must be a \code{ctypes} type like \class{c{\_}int}, or any other +derived \code{ctypes} type: structure, union, array, pointer. + +Here is a simple example of a POINT structure, which contains two +integers named \code{x} and \code{y}, and also shows how to initialize a +structure in the constructor: +\begin{verbatim} +>>> from ctypes import * +>>> class POINT(Structure): +... _fields_ = [("x", c_int), +... ("y", c_int)] +... +>>> point = POINT(10, 20) +>>> print point.x, point.y +10 20 +>>> point = POINT(y=5) +>>> print point.x, point.y +0 5 +>>> POINT(1, 2, 3) +Traceback (most recent call last): + File "", line 1, in ? +ValueError: too many initializers +>>> +\end{verbatim} + +You can, however, build much more complicated structures. Structures +can itself contain other structures by using a structure as a field +type. + +Here is a RECT structure which contains two POINTs named \code{upperleft} +and \code{lowerright} +\begin{verbatim} +>>> class RECT(Structure): +... _fields_ = [("upperleft", POINT), +... ("lowerright", POINT)] +... +>>> rc = RECT(point) +>>> print rc.upperleft.x, rc.upperleft.y +0 5 +>>> print rc.lowerright.x, rc.lowerright.y +0 0 +>>> +\end{verbatim} + +Nested structures can also be initialized in the constructor in +several ways: +\begin{verbatim} +>>> r = RECT(POINT(1, 2), POINT(3, 4)) +>>> r = RECT((1, 2), (3, 4)) +\end{verbatim} + +Fields descriptors can be retrieved from the \emph{class}, they are useful +for debugging because they can provide useful information: +\begin{verbatim} +>>> print POINT.x + +>>> print POINT.y + +>>> +\end{verbatim} + + +\subsubsection{Structure/union alignment and byte order\label{ctypes-structureunion-alignment-byte-order}} + +By default, Structure and Union fields are aligned in the same way the +C compiler does it. It is possible to override this behaviour be +specifying a \member{{\_}pack{\_}} class attribute in the subclass +definition. This must be set to a positive integer and specifies the +maximum alignment for the fields. This is what \code{{\#}pragma pack(n)} +also does in MSVC. + +\code{ctypes} uses the native byte order for Structures and Unions. To +build structures with non-native byte order, you can use one of the +BigEndianStructure, LittleEndianStructure, BigEndianUnion, and +LittleEndianUnion base classes. These classes cannot contain pointer +fields. + + +\subsubsection{Bit fields in structures and unions\label{ctypes-bit-fields-in-structures-unions}} + +It is possible to create structures and unions containing bit fields. +Bit fields are only possible for integer fields, the bit width is +specified as the third item in the \member{{\_}fields{\_}} tuples: +\begin{verbatim} +>>> class Int(Structure): +... _fields_ = [("first_16", c_int, 16), +... ("second_16", c_int, 16)] +... +>>> print Int.first_16 + +>>> print Int.second_16 + +>>> +\end{verbatim} + + +\subsubsection{Arrays\label{ctypes-arrays}} + +Arrays are sequences, containing a fixed number of instances of the +same type. + +The recommended way to create array types is by multiplying a data +type with a positive integer: +\begin{verbatim} +TenPointsArrayType = POINT * 10 +\end{verbatim} + +Here is an example of an somewhat artifical data type, a structure +containing 4 POINTs among other stuff: +\begin{verbatim} +>>> from ctypes import * +>>> class POINT(Structure): +... _fields_ = ("x", c_int), ("y", c_int) +... +>>> class MyStruct(Structure): +... _fields_ = [("a", c_int), +... ("b", c_float), +... ("point_array", POINT * 4)] +>>> +>>> print len(MyStruct().point_array) +4 +\end{verbatim} + +Instances are created in the usual way, by calling the class: +\begin{verbatim} +arr = TenPointsArrayType() +for pt in arr: + print pt.x, pt.y +\end{verbatim} + +The above code print a series of \code{0 0} lines, because the array +contents is initialized to zeros. + +Initializers of the correct type can also be specified: +\begin{verbatim} +>>> from ctypes import * +>>> TenIntegers = c_int * 10 +>>> ii = TenIntegers(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) +>>> print ii + +>>> for i in ii: print i, +... +1 2 3 4 5 6 7 8 9 10 +>>> +\end{verbatim} + + +\subsubsection{Pointers\label{ctypes-pointers}} + +Pointer instances are created by calling the \code{pointer} function on a +\code{ctypes} type: +\begin{verbatim} +>>> from ctypes import * +>>> i = c_int(42) +>>> pi = pointer(i) +>>> +\end{verbatim} + +XXX XXX Not correct: use indexing, not the contents atribute + +Pointer instances have a \code{contents} attribute which returns the +ctypes' type pointed to, the \code{c{\_}int(42)} in the above case: +\begin{verbatim} +>>> pi.contents +c_long(42) +>>> +\end{verbatim} + +Assigning another \class{c{\_}int} instance to the pointer's contents +attribute would cause the pointer to point to the memory location +where this is stored: +\begin{verbatim} +>>> pi.contents = c_int(99) +>>> pi.contents +c_long(99) +>>> +\end{verbatim} + +Pointer instances can also be indexed with integers: +\begin{verbatim} +>>> pi[0] +99 +>>> +\end{verbatim} + +XXX What is this??? +Assigning to an integer index changes the pointed to value: +\begin{verbatim} +>>> i2 = pi[0] +>>> i2 +99 +>>> pi[0] = 22 +>>> i2 +99 +>>> +\end{verbatim} + +It is also possible to use indexes different from 0, but you must know +what you're doing when you use this: You access or change arbitrary +memory locations when you do this. Generally you only use this feature +if you receive a pointer from a C function, and you \emph{know} that the +pointer actually points to an array instead of a single item. + + +\subsubsection{Pointer classes/types\label{ctypes-pointer-classestypes}} + +Behind the scenes, the \code{pointer} function does more than simply +create pointer instances, it has to create pointer \emph{types} first. +This is done with the \code{POINTER} function, which accepts any +\code{ctypes} type, and returns a new type: +\begin{verbatim} +>>> PI = POINTER(c_int) +>>> PI + +>>> PI(42) # doctest: +IGNORE_EXCEPTION_DETAIL +Traceback (most recent call last): + File "", line 1, in ? +TypeError: expected c_long instead of int +>>> PI(c_int(42)) + +>>> +\end{verbatim} + + +\subsubsection{Incomplete Types\label{ctypes-incomplete-types}} + +\emph{Incomplete Types} are structures, unions or arrays whose members are +not yet specified. In C, they are specified by forward declarations, which +are defined later: +\begin{verbatim} +struct cell; /* forward declaration */ + +struct { + char *name; + struct cell *next; +} cell; +\end{verbatim} + +The straightforward translation into ctypes code would be this, but it +does not work: +\begin{verbatim} +>>> class cell(Structure): +... _fields_ = [("name", c_char_p), +... ("next", POINTER(cell))] +... +Traceback (most recent call last): + File "", line 1, in ? + File "", line 2, in cell +NameError: name 'cell' is not defined +>>> +\end{verbatim} + +because the new \code{class cell} is not available in the class statement +itself. In \code{ctypes}, we can define the \code{cell} class and set the +\member{{\_}fields{\_}} attribute later, after the class statement: +\begin{verbatim} +>>> from ctypes import * +>>> class cell(Structure): +... pass +... +>>> cell._fields_ = [("name", c_char_p), +... ("next", POINTER(cell))] +>>> +\end{verbatim} + +Lets try it. We create two instances of \code{cell}, and let them point +to each other, and finally follow the pointer chain a few times: +\begin{verbatim} +>>> c1 = cell() +>>> c1.name = "foo" +>>> c2 = cell() +>>> c2.name = "bar" +>>> c1.next = pointer(c2) +>>> c2.next = pointer(c1) +>>> p = c1 +>>> for i in range(8): +... print p.name, +... p = p.next[0] +... +foo bar foo bar foo bar foo bar +>>> +\end{verbatim} + + +\subsubsection{Callback functions\label{ctypes-callback-functions}} + +\code{ctypes} allows to create C callable function pointers from Python +callables. These are sometimes called \emph{callback functions}. + +First, you must create a class for the callback function, the class +knows the calling convention, the return type, and the number and +types of arguments this function will receive. + +The CFUNCTYPE factory function creates types for callback functions +using the normal cdecl calling convention, and, on Windows, the +WINFUNCTYPE factory function creates types for callback functions +using the stdcall calling convention. + +Both of these factory functions are called with the result type as +first argument, and the callback functions expected argument types as +the remaining arguments. + +I will present an example here which uses the standard C library's +\function{qsort} function, this is used to sort items with the help of a +callback function. \function{qsort} will be used to sort an array of +integers: +\begin{verbatim} +>>> IntArray5 = c_int * 5 +>>> ia = IntArray5(5, 1, 7, 33, 99) +>>> qsort = libc.qsort +>>> qsort.restype = None +>>> +\end{verbatim} + +\function{qsort} must be called with a pointer to the data to sort, the +number of items in the data array, the size of one item, and a pointer +to the comparison function, the callback. The callback will then be +called with two pointers to items, and it must return a negative +integer if the first item is smaller than the second, a zero if they +are equal, and a positive integer else. + +So our callback function receives pointers to integers, and must +return an integer. First we create the \code{type} for the callback +function: +\begin{verbatim} +>>> CMPFUNC = CFUNCTYPE(c_int, POINTER(c_int), POINTER(c_int)) +>>> +\end{verbatim} + +For the first implementation of the callback function, we simply print +the arguments we get, and return 0 (incremental development ;-): +\begin{verbatim} +>>> def py_cmp_func(a, b): +... print "py_cmp_func", a, b +... return 0 +... +>>> +\end{verbatim} + +Create the C callable callback: +\begin{verbatim} +>>> cmp_func = CMPFUNC(py_cmp_func) +>>> +\end{verbatim} + +And we're ready to go: +\begin{verbatim} +>>> qsort(ia, len(ia), sizeof(c_int), cmp_func) # doctest: +WINDOWS +py_cmp_func +py_cmp_func +py_cmp_func +py_cmp_func +py_cmp_func +py_cmp_func +py_cmp_func +py_cmp_func +py_cmp_func +py_cmp_func +>>> +\end{verbatim} + +We know how to access the contents of a pointer, so lets redefine our callback: +\begin{verbatim} +>>> def py_cmp_func(a, b): +... print "py_cmp_func", a[0], b[0] +... return 0 +... +>>> cmp_func = CMPFUNC(py_cmp_func) +>>> +\end{verbatim} + +Here is what we get on Windows: +\begin{verbatim} +>>> qsort(ia, len(ia), sizeof(c_int), cmp_func) # doctest: +WINDOWS +py_cmp_func 7 1 +py_cmp_func 33 1 +py_cmp_func 99 1 +py_cmp_func 5 1 +py_cmp_func 7 5 +py_cmp_func 33 5 +py_cmp_func 99 5 +py_cmp_func 7 99 +py_cmp_func 33 99 +py_cmp_func 7 33 +>>> +\end{verbatim} + +It is funny to see that on linux the sort function seems to work much +more efficient, it is doing less comparisons: +\begin{verbatim} +>>> qsort(ia, len(ia), sizeof(c_int), cmp_func) # doctest: +LINUX +py_cmp_func 5 1 +py_cmp_func 33 99 +py_cmp_func 7 33 +py_cmp_func 5 7 +py_cmp_func 1 7 +>>> +\end{verbatim} + +Ah, we're nearly done! The last step is to actually compare the two +items and return a useful result: +\begin{verbatim} +>>> def py_cmp_func(a, b): +... print "py_cmp_func", a[0], b[0] +... return a[0] - b[0] +... +>>> +\end{verbatim} + +Final run on Windows: +\begin{verbatim} +>>> qsort(ia, len(ia), sizeof(c_int), CMPFUNC(py_cmp_func)) # doctest: +WINDOWS +py_cmp_func 33 7 +py_cmp_func 99 33 +py_cmp_func 5 99 +py_cmp_func 1 99 +py_cmp_func 33 7 +py_cmp_func 1 33 +py_cmp_func 5 33 +py_cmp_func 5 7 +py_cmp_func 1 7 +py_cmp_func 5 1 +>>> +\end{verbatim} + +and on Linux: +\begin{verbatim} +>>> qsort(ia, len(ia), sizeof(c_int), CMPFUNC(py_cmp_func)) # doctest: +LINUX +py_cmp_func 5 1 +py_cmp_func 33 99 +py_cmp_func 7 33 +py_cmp_func 1 7 +py_cmp_func 5 7 +>>> +\end{verbatim} + +So, our array sorted now: +\begin{verbatim} +>>> for i in ia: print i, +... +1 5 7 33 99 +>>> +\end{verbatim} + +\textbf{Important note for callback functions:} + +Make sure you keep references to CFUNCTYPE objects as long as they are +used from C code. ctypes doesn't, and if you don't, they may be +garbage collected, crashing your program when a callback is made. + + +\subsubsection{Accessing values exported from dlls\label{ctypes-accessing-values-exported-from-dlls}} + +Sometimes, a dll not only exports functions, it also exports +values. An example in the Python library itself is the +\code{Py{\_}OptimizeFlag}, an integer set to 0, 1, or 2, depending on the +\programopt{-O} or \programopt{-OO} flag given on startup. + +\code{ctypes} can access values like this with the \method{in{\_}dll} class +methods of the type. \var{pythonapi} ìs a predefined symbol giving +access to the Python C api: +\begin{verbatim} +>>> opt_flag = c_int.in_dll(pythonapi, "Py_OptimizeFlag") +>>> print opt_flag +c_long(0) +>>> +\end{verbatim} + +If the interpreter would have been started with \programopt{-O}, the sample +would have printed \code{c{\_}long(1)}, or \code{c{\_}long(2)} if \programopt{-OO} would have +been specified. + +An extended example which also demonstrates the use of pointers +accesses the \code{PyImport{\_}FrozenModules} pointer exported by Python. + +Quoting the Python docs: \emph{This pointer is initialized to point to an +array of ``struct {\_}frozen`` records, terminated by one whose members +are all NULL or zero. When a frozen module is imported, it is searched +in this table. Third-party code could play tricks with this to provide +a dynamically created collection of frozen modules.} + +So manipulating this pointer could even prove useful. To restrict the +example size, we show only how this table can be read with +\code{ctypes}: +\begin{verbatim} +>>> from ctypes import * +>>> +>>> class struct_frozen(Structure): +... _fields_ = [("name", c_char_p), +... ("code", POINTER(c_ubyte)), +... ("size", c_int)] +... +>>> +\end{verbatim} + +We have defined the \code{struct {\_}frozen} data type, so we can get the +pointer to the table: +\begin{verbatim} +>>> FrozenTable = POINTER(struct_frozen) +>>> table = FrozenTable.in_dll(pythonapi, "PyImport_FrozenModules") +>>> +\end{verbatim} + +Since \code{table} is a \code{pointer} to the array of \code{struct{\_}frozen} +records, we can iterate over it, but we just have to make sure that +our loop terminates, because pointers have no size. Sooner or later it +would probably crash with an access violation or whatever, so it's +better to break out of the loop when we hit the NULL entry: +\begin{verbatim} +>>> for item in table: +... print item.name, item.size +... if item.name is None: +... break +... +__hello__ 104 +__phello__ -104 +__phello__.spam 104 +None 0 +>>> +\end{verbatim} + +The fact that standard Python has a frozen module and a frozen package +(indicated by the negative size member) is not wellknown, it is only +used for testing. Try it out with \code{import {\_}{\_}hello{\_}{\_}} for example. + +XXX Describe how to access the \var{code} member fields, which contain +the byte code for the modules. + + +\subsubsection{Surprises\label{ctypes-surprises}} + +There are some edges in \code{ctypes} where you may be expect something +else than what actually happens. + +Consider the following example: +\begin{verbatim} +>>> from ctypes import * +>>> class POINT(Structure): +... _fields_ = ("x", c_int), ("y", c_int) +... +>>> class RECT(Structure): +... _fields_ = ("a", POINT), ("b", POINT) +... +>>> p1 = POINT(1, 2) +>>> p2 = POINT(3, 4) +>>> rc = RECT(p1, p2) +>>> print rc.a.x, rc.a.y, rc.b.x, rc.b.y +1 2 3 4 +>>> # now swap the two points +>>> rc.a, rc.b = rc.b, rc.a +>>> print rc.a.x, rc.a.y, rc.b.x, rc.b.y +3 4 3 4 +\end{verbatim} + +Hm. We certainly expected the last statement to print \code{3 4 1 2}. +What happended? Here are the steps of the \code{rc.a, rc.b = rc.b, rc.a} +line above: +\begin{verbatim} +>>> temp0, temp1 = rc.b, rc.a +>>> rc.a = temp0 +>>> rc.b = temp1 +\end{verbatim} + +Note that \code{temp0} and \code{temp1} are objects still using the internal +buffer of the \code{rc} object above. So executing \code{rc.a = temp0} +copies the buffer contents of \code{temp0} into \code{rc} 's buffer. This, +in turn, changes the contents of \code{temp1}. So, the last assignment +\code{rc.b = temp1}, doesn't have the expected effect. + +Keep in mind that retrieving subobjects from Structure, Unions, and +Arrays doesn't \emph{copy} the subobject, instead it retrieves a wrapper +object accessing the root-object's underlying buffer. + +Another example that may behave different from what one would expect is this: +\begin{verbatim} +>>> s = c_char_p() +>>> s.value = "abc def ghi" +>>> s.value +'abc def ghi' +>>> s.value is s.value +False +>>> +\end{verbatim} + +Why is it printing \code{False}? ctypes instances are objects containing +a memory block plus some descriptors accessing the contents of the +memory. Storing a Python object in the memory block does not store +the object itself, instead the \code{contents} of the object is stored. +Accessing the contents again constructs a new Python each time! + + +\subsubsection{Bugs, ToDo and non-implemented things\label{ctypes-bugs-todo-non-implemented-things}} + +Enumeration types are not implemented. You can do it easily yourself, +using \class{c{\_}int} as the base class. + +\code{long double} is not implemented. +% Local Variables: +% compile-command: "make.bat" +% End: + diff --git a/Doc/lib/libctypesref.tex b/Doc/lib/libctypesref.tex new file mode 100644 index 0000000..6d950f4 --- /dev/null +++ b/Doc/lib/libctypesref.tex @@ -0,0 +1,457 @@ +\subsection{ctypes reference\label{ctypes-reference}} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% functions +\subsubsection{ctypes functions} + +\begin{funcdesc}{addressof}{obj} +Returns the address of the memory buffer as integer. \var{obj} must +be an instance of a ctypes type. +\end{funcdesc} + +\begin{funcdesc}{alignment}{obj_or_type} +Returns the alignment requirements of a ctypes type. +\var{obj_or_type} must be a ctypes type or an instance. +\end{funcdesc} + +\begin{excclassdesc}{ArgumentError}{} +This exception is raised when a foreign function call cannot convert +one of the passed arguments. +\end{excclassdesc} + +\begin{funcdesc}{byref}{obj} +Returns a light-weight pointer to \var{obj}, which must be an instance +of a ctypes type. The returned object can only be used as a foreign +function call parameter. It behaves similar to \code{pointer(obj)}, +but the construction is a lot faster. +\end{funcdesc} + +\begin{funcdesc}{cast}{obj, type} +This function is similar to the cast operator in C. It returns a new +instance of \var{type} which points to the same memory block as +\code{obj}. \code{type} must be a pointer type, and \code{obj} + must be an object that can be interpreted as a pointer. +\end{funcdesc} + +% XXX separate section for CFUNCTYPE, WINFUNCTYPE, PYFUNCTYPE? + +\begin{funcdesc}{CFUNCTYPE}{restype, *argtypes} +This is a factory function that returns a function prototype. The +function prototype describes a function that has a result type of +\code{restype}, and accepts arguments as specified by \code{argtypes}. +The function prototype can be used to construct several kinds of +functions, depending on how the prototype is called. + +The prototypes returned by \code{CFUNCTYPE} or \code{PYFUNCTYPE} +create functions that use the standard C calling convention, +prototypes returned from \code{WINFUNCTYPE} (on Windows) use the +\code{__stdcall} calling convention. + +Functions created by calling the \code{CFUNCTYPE} and +\code{WINFUNCTYPE} prototypes release the Python GIL +before entering the foreign function, and acquire it back after +leaving the function code. + +% XXX differences between CFUNCTYPE / WINFUNCTYPE / PYFUNCTYPE + +\end{funcdesc} + +\begin{funcdesc}{create_string_buffer}{init_or_size\optional{, size}} +This function creates a mutable character buffer. The returned object +is a ctypes array of \code{c_char}. + +\var{init_or_size} must be an integer which specifies the size of the +array, or a string which will be used to initialize the array items. + +If a string is specified as first argument, the buffer is made one +item larger than the length of the string so that the last element in +the array is a NUL termination character. An integer can be passed as +second argument which allows to specify the size of the array if the +length of the string should not be used. + +If the first parameter is a unicode string, it is converted into an +8-bit string according to ctypes conversion rules. +\end{funcdesc} + +\begin{funcdesc}{create_unicode_buffer}{init_or_size\optional{, size}} +This function creates a mutable unicode character buffer. The +returned object is a ctypes array of \code{c_wchar}. + +\var{init_or_size} must be an integer which specifies the size of the +array, or a unicode string which will be used to initialize the array +items. + +If a unicode string is specified as first argument, the buffer is made +one item larger than the length of the string so that the last element +in the array is a NUL termination character. An integer can be passed +as second argument which allows to specify the size of the array if +the length of the string should not be used. + +If the first parameter is a 8-bit string, it is converted into an +unicode string according to ctypes conversion rules. +\end{funcdesc} + +\begin{funcdesc}{DllCanUnloadNow}{} +Windows only: This function is a hook which allows to implement +inprocess COM servers with ctypes. It is called from the +\code{DllCanUnloadNow} function that the \code{_ctypes} +extension dll exports. +\end{funcdesc} + +\begin{funcdesc}{DllGetClassObject}{} +Windows only: This function is a hook which allows to implement +inprocess COM servers with ctypes. It is called from the +\code{DllGetClassObject} function that the \code{_ctypes} +extension dll exports. +\end{funcdesc} + +\begin{funcdesc}{FormatError}{\optional{code}} +Windows only: Returns a textual description of the error code. If no +error code is specified, the last error code is used by calling the +Windows api function \code{GetLastError}. +\end{funcdesc} + +\begin{funcdesc}{GetLastError}{} +Windows only: Returns the last error code set by Windows in the +calling thread. +\end{funcdesc} + +\begin{funcdesc}{memmove}{dst, src, count} +Same as the standard C \code{memmove} library function: copies +\var{count} bytes from \code{src} to \code{dst}. \code{dst} and +\code{src} must be integers or ctypes instances that can be converted to pointers. +\end{funcdesc} + +\begin{funcdesc}{memset}{dst, c, count} +Same as the standard C \code{memset} library function: fills the +memory clock at address \code{dst} with \var{count} bytes of value +\var{c}. \var{dst} must be an integer specifying an address, or a ctypes instance. +\end{funcdesc} + +\begin{funcdesc}{POINTER}{type} +This factory function creates and returns a new ctypes pointer type. +Pointer types are cached an reused internally, so calling this +function repeatedly is cheap. \var{type} must be a ctypes type. +\end{funcdesc} + +\begin{funcdesc}{pointer}{obj} +This function creates a new pointer instance, pointing to \var{obj}. +The returned object is of the type \code{POINTER(type(obj))}. + +Note: If you just want to pass a pointer to an object to a foreign +function call, you should use \code{byref(obj)} which is much faster. +\end{funcdesc} + +\begin{funcdesc}{PYFUNCTYPE}{restype, *argtypes} +\end{funcdesc} + +\begin{funcdesc}{pythonapi}{} +\end{funcdesc} + +\begin{funcdesc}{resize}{obj, size} +This function resizes the internal memory buffer of \var{obj}, which +must be an instance of a ctypes type. It is not possible to make the +buffer smaller than the native size of the objects type, as given by +\code{sizeof(type(obj))}, but it is possible to enlarge the buffer. +\end{funcdesc} + +\begin{funcdesc}{set_conversion_mode}{encoding, errors} +This function sets the rules that ctypes objects use when converting +between 8-bit strings and unicode strings. \var{encoding} must be a +string specifying an encoding, like 'utf-8' or 'mbcs', \var{errors} +must be a string specifying the error handling on encoding/decoding +errors. Examples of possible values are ``strict'', ``replace'', or +``ignore''. + +\code{set_conversion_mode} returns a 2-tuple containing the previous +conversion rules. On windows, the initial conversion rules are +\code{('mbcs', 'ignore')}, on other systems \code{('ascii', 'strict')}. +\end{funcdesc} + +\begin{funcdesc}{sizeof}{obj_or_type} +Returns the size in bytes of a ctypes type or instance memory buffer. +Does the same as the C sizeof() function. +\end{funcdesc} + +\begin{funcdesc}{string_at}{address\optional{size}} +This function returns the string starting at memory address +\var{address}. If \var{size} is specified, it is used as size, +otherwise the string is assumed to be zero-terminated. +\end{funcdesc} + +\begin{funcdesc}{WinError}{code=None, descr=None} +Windows only: this function is probably the worst-named thing in +ctypes. It creates an instance of \code{WindowsError}. If \var{code} +is not specified, \code{GetLastError} is called to determine the error +code. If \var{descr} is not spcified, \var{FormatError} is called to +get a textual description of the error. +\end{funcdesc} + +\begin{funcdesc}{WINFUNCTYPE}{restype, *argtypes} +\end{funcdesc} + +\begin{funcdesc}{wstring_at}{address} +This function returns the wide character string starting at memory +address \var{address} as unicode string. If \var{size} is specified, +it is used as size, otherwise the string is assumed to be +zero-terminated. +\end{funcdesc} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% data types +\subsubsection{data types} + +ctypes defines a lot of C compatible datatypes, and also allows to +define your own types. Among other things, a ctypes type instance +holds a memory block that contains C compatible data. + +\begin{classdesc}{_ctypes._CData}{} +This non-public class is the base class of all ctypes data types. It +is mentioned here because it contains the common methods of the ctypes +data types. +\end{classdesc} + +Common methods of ctypes data types, these are all class methods (to +be exact, they are methods of the metaclass): + +\begin{methoddesc}{from_address}{address} +This method returns a ctypes type instance using the memory specified +by \code{address}. +\end{methoddesc} + +\begin{methoddesc}{from_param}{obj} +This method adapts \code{obj} to a ctypes type. +\end{methoddesc} + +\begin{methoddesc}{in_dll}{name, library} +This method returns a ctypes type instance exported by a shared +library. \var{name} is the name of the symbol that exports the data, +\var{library} is the loaded shared library. +\end{methoddesc} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% simple data types +\subsubsection{simple data types} + +\begin{classdesc}{_ctypes._SimpleCData}{} +This non-public class is the base class of all ctypes data types. It +is mentioned here because it contains the common attributes of the +ctypes data types. +\end{classdesc} + +\begin{memberdesc}{value} +This attribute contains the actual value of the instance. For integer +types, it is an integer. +\end{memberdesc} + +Here are the simple ctypes data types: + +\begin{classdesc}{c_byte}{\optional{value}} +Represents a C \code{signed char} datatype, and interprets the value +as small integer. The constructor accepts an optional integer +initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_char}{\optional{value}} +Represents a C \code{char} datatype, and interprets the value as a +single character. The constructor accepts an optional string +initializer, the length of the string must be exactly one character. +\end{classdesc} + +\begin{classdesc}{c_char_p}{\optional{value}} +Represents a C \code{char *} datatype, which must be a pointer to a +zero-terminated string. The constructor accepts an integer address, +or a string. +% XXX Explain the difference to POINTER(c_char) +\end{classdesc} + +\begin{classdesc}{c_double}{\optional{value}} +Represents a C \code{double} datatype. The constructor accepts an +optional float initializer. +\end{classdesc} + +\begin{classdesc}{c_float}{\optional{value}} +Represents a C \code{double} datatype. The constructor accepts an +optional float initializer. +\end{classdesc} + +\begin{classdesc}{c_int}{\optional{value}} +Represents a C \code{signed int} datatype. The constructor accepts an +optional integer initializer; no overflow checking is done. On +platforms where \code{sizeof(int) == sizeof(long)} \var{c_int} is an +alias to \var{c_long}. +\end{classdesc} + +\begin{classdesc}{c_int16}{\optional{value}} +Represents a C 16-bit \code{signed int} datatype. Usually an alias +for \var{c_short}. +\end{classdesc} + +\begin{classdesc}{c_int32}{\optional{value}} +Represents a C 32-bit \code{signed int} datatype. Usually an alias +for \code{c_int}. +\end{classdesc} + +\begin{classdesc}{c_int64}{\optional{value}} +Represents a C 64-bit \code{signed int} datatype. Usually an alias +for \code{c_longlong}. +\end{classdesc} + +\begin{classdesc}{c_int8}{\optional{value}} +Represents a C 8-bit \code{signed int} datatype. Usually an alias for \code{c_byte}. +\end{classdesc} + +\begin{classdesc}{c_long}{\optional{value}} +Represents a C \code{signed long} datatype. The constructor accepts +an optional integer initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_longlong}{\optional{value}} +Represents a C \code{signed long long} datatype. The constructor +accepts an optional integer initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_short}{\optional{value}} +Represents a C \code{signed short} datatype. The constructor accepts +an optional integer initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_size_t}{\optional{value}} +Represents a C \code{size_t} datatype. +\end{classdesc} + +\begin{classdesc}{c_ubyte}{\optional{value}} +Represents a C \code{unsigned char} datatype, and interprets the value +as small integer. The constructor accepts an optional integer +initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_uint}{\optional{value}} +Represents a C \code{unsigned int} datatype. The constructor accepts +an optional integer initializer; no overflow checking is done. On +platforms where \code{sizeof(int) == sizeof(long)} \var{c_int} is an +alias to \var{c_long}. +\end{classdesc} + +\begin{classdesc}{c_uint16}{\optional{value}} +Represents a C 16-bit \code{unsigned int} datatype. Usually an alias +for \code{c_ushort}. +\end{classdesc} + +\begin{classdesc}{c_uint32}{\optional{value}} +Represents a C 32-bit \code{unsigned int} datatype. Usually an alias +for \code{c_uint}. +\end{classdesc} + +\begin{classdesc}{c_uint64}{\optional{value}} +Represents a C 64-bit \code{unsigned int} datatype. Usually an alias +for \code{c_ulonglong}. +\end{classdesc} + +\begin{classdesc}{c_uint8}{\optional{value}} +Represents a C 8-bit \code{unsigned int} datatype. Usually an alias +for \code{c_ubyte}. +\end{classdesc} + +\begin{classdesc}{c_ulong}{\optional{value}} +Represents a C \code{unsigned long} datatype. The constructor accepts +an optional integer initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_ulonglong}{\optional{value}} +Represents a C \code{unsigned long long} datatype. The constructor +accepts an optional integer initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_ushort}{\optional{value}} +Represents a C \code{unsigned short} datatype. The constructor accepts +an optional integer initializer; no overflow checking is done. +\end{classdesc} + +\begin{classdesc}{c_void_p}{\optional{value}} +Represents a C \code{void *} type. The value is represented as +integer. The constructor accepts an optional integer initializer. +\end{classdesc} + +\begin{classdesc}{c_wchar}{\optional{value}} +Represents a C \code{wchar_t} datatype, and interprets the value as a +single character unicode string. The constructor accepts an optional +string initializer, the length of the string must be exactly one +character. +\end{classdesc} + +\begin{classdesc}{c_wchar_p}{\optional{value}} +Represents a C \code{wchar_t *} datatype, which must be a pointer to a +zero-terminated wide character string. The constructor accepts an +integer address, or a string. +% XXX Explain the difference to POINTER(c_wchar) +\end{classdesc} + +\begin{classdesc}{HRESULT}{} +Windows only: Represents a \code{HRESULT} value, which contains +success or error information for a function or method call. +\end{classdesc} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% structured data types +\subsubsection{structured data types} + +\begin{classdesc}{BigEndianStructure}{} +\end{classdesc} + +\begin{classdesc}{LittleEndianStructure}{} +\end{classdesc} + +\begin{classdesc}{Structure}{} +Base class for Structure data types. + +\end{classdesc} + +\begin{classdesc}{Union}{} +\end{classdesc} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% libraries +\subsubsection{libraries} + +\begin{classdesc}{CDLL}{name, mode=RTLD_LOCAL, handle=None} +\end{classdesc} + +\begin{datadesc}{cdll} +\end{datadesc} + +\begin{classdesc}{LibraryLoader}{dlltype} + +\begin{memberdesc}{LoadLibrary}{name, mode=RTLD_LOCAL, handle=None} +\end{memberdesc} + +\end{classdesc} + +\begin{classdesc}{OleDLL}{name, mode=RTLD_LOCAL, handle=None} +\end{classdesc} + +\begin{datadesc}{oledll} +\end{datadesc} + +\begin{classdesc}{py_object}{} +\end{classdesc} + +\begin{classdesc}{PyDLL}{name, mode=RTLD_LOCAL, handle=None} +\end{classdesc} + +\begin{datadesc}{pydll}{} +\end{datadesc} + +\begin{datadesc}{RTLD_GLOBAL} +\end{datadesc} + +\begin{datadesc}{RTLD_LOCAL} +\end{datadesc} + +\begin{classdesc}{WinDLL}{name, mode=RTLD_LOCAL, handle=None} +\end{classdesc} + +\begin{datadesc}{windll} +\end{datadesc} + diff --git a/Doc/lib/libcursespanel.tex b/Doc/lib/libcursespanel.tex index 519091e..1f96717 100644 --- a/Doc/lib/libcursespanel.tex +++ b/Doc/lib/libcursespanel.tex @@ -22,6 +22,9 @@ Returns the bottom panel in the panel stack. \begin{funcdesc}{new_panel}{win} Returns a panel object, associating it with the given window \var{win}. +Be aware that you need to keep the returned panel object referenced +explicitly. If you don't, the panel object is garbage collected and +removed from the panel stack. \end{funcdesc} \begin{funcdesc}{top_panel}{} diff --git a/Doc/lib/libdecimal.tex b/Doc/lib/libdecimal.tex index ffc3363..a0c7bde 100644 --- a/Doc/lib/libdecimal.tex +++ b/Doc/lib/libdecimal.tex @@ -713,8 +713,8 @@ here. \constant{NaN}. \end{methoddesc} -\begin{methoddesc}{sqrt}{} - Return the square root to full precision. +\begin{methoddesc}{sqrt}{x} + Return the square root of \var{x} to full precision. \end{methoddesc} \begin{methoddesc}{subtract}{x, y} @@ -734,7 +734,7 @@ here. or \constant{Rounded}. \end{methoddesc} -\begin{methoddesc}{to_sci_string}{} +\begin{methoddesc}{to_sci_string}{x} Converts a number to a string using scientific notation. \end{methoddesc} diff --git a/Doc/lib/libdis.tex b/Doc/lib/libdis.tex index 19fda5b..27b8a5c 100644 --- a/Doc/lib/libdis.tex +++ b/Doc/lib/libdis.tex @@ -55,7 +55,7 @@ was provided. The output is divided in the following columns: \begin{enumerate} \item the line number, for the first instruction of each line \item the current instruction, indicated as \samp{-->}, -\item a labelled instruction, indicated with \samp{>\code{>}}, +\item a labelled instruction, indicated with \samp{>>}, \item the address of the instruction, \item the operation code name, \item operation parameters, and diff --git a/Doc/lib/libdoctest.tex b/Doc/lib/libdoctest.tex index 0e3a017..73b29ad 100644 --- a/Doc/lib/libdoctest.tex +++ b/Doc/lib/libdoctest.tex @@ -333,8 +333,8 @@ NO!!! \end{verbatim} Any expected output must immediately follow the final -\code{'>\code{>}>~'} or \code{'...~'} line containing the code, and -the expected output (if any) extends to the next \code{'>\code{>}>~'} +\code{'>>>~'} or \code{'...~'} line containing the code, and +the expected output (if any) extends to the next \code{'>>>~'} or all-whitespace line. The fine print: @@ -386,7 +386,7 @@ Backslashes in a raw docstring: m\n \end{verbatim} and as many leading whitespace characters are stripped from the -expected output as appeared in the initial \code{'>\code{>}>~'} line +expected output as appeared in the initial \code{'>>>~'} line that started the example. \end{itemize} @@ -407,10 +407,13 @@ You can force use of your own dict as the execution context by passing \subsubsection{What About Exceptions?\label{doctest-exceptions}} No problem, provided that the traceback is the only output produced by -the example: just paste in the traceback. Since tracebacks contain -details that are likely to change rapidly (for example, exact file paths -and line numbers), this is one case where doctest works hard to be -flexible in what it accepts. +the example: just paste in the traceback.\footnote{Examples containing + both expected output and an exception are not supported. Trying + to guess where one ends and the other begins is too error-prone, + and that also makes for a confusing test.} +Since tracebacks contain details that are likely to change rapidly (for +example, exact file paths and line numbers), this is one case where doctest +works hard to be flexible in what it accepts. Simple example: @@ -613,6 +616,20 @@ TypeError: object doesn't support item assignment \end{datadesc} +\begin{datadesc}{SKIP} + + When specified, do not run the example at all. This can be useful + in contexts where doctest examples serve as both documentation and + test cases, and an example should be included for documentation + purposes, but should not be checked. E.g., the example's output + might be random; or the example might depend on resources which + would be unavailable to the test driver. + + The SKIP flag can also be used for temporarily "commenting out" + examples. + +\end{datadesc} + \begin{datadesc}{COMPARISON_FLAGS} A bitmask or'ing together all the comparison flags above. \end{datadesc} @@ -741,6 +758,7 @@ can be useful. were added; by default \code{} in expected output matches an empty line in actual output; and doctest directives were added]{2.4} +\versionchanged[Constant \constant{SKIP} was added]{2.5} There's also a way to register new option flag names, although this isn't useful unless you intend to extend \refmodule{doctest} internals @@ -1040,7 +1058,11 @@ runner.run(suite) There are two main functions for creating \class{\refmodule{unittest}.TestSuite} instances from text files and modules with doctests: -\begin{funcdesc}{DocFileSuite}{*paths, **kw} +\begin{funcdesc}{DocFileSuite}{\optional{module_relative}\optional{, + package}\optional{, setUp}\optional{, + tearDown}\optional{, globs}\optional{, + optionflags}\optional{, parser}} + Convert doctest tests from one or more text files to a \class{\refmodule{unittest}.TestSuite}. @@ -1108,9 +1130,9 @@ instances from text files and modules with doctests: \versionadded{2.4} - Starting in Python 2.5, the global \code{__file__} was added to the + \versionchanged[The global \code{__file__} was added to the globals provided to doctests loaded from a text file using - \function{DocFileSuite()}. + \function{DocFileSuite()}]{2.5} \end{funcdesc} \begin{funcdesc}{DocTestSuite}{\optional{module}\optional{, diff --git a/Doc/lib/libexcs.tex b/Doc/lib/libexcs.tex index 85058a4..30fe831 100644 --- a/Doc/lib/libexcs.tex +++ b/Doc/lib/libexcs.tex @@ -80,7 +80,6 @@ text message explaining why the exception had been raised. If more data needs to be attached to the exception, attach it through arbitrary attributes on the instance. All arguments are also stored in \member{args} as a tuple, but it will eventually be deprecated and thus its use is discouraged. -\versionchanged[Changed to inherit from \exception{BaseException}]{2.5} \versionadded{2.5} \end{excdesc} @@ -88,6 +87,7 @@ eventually be deprecated and thus its use is discouraged. All built-in, non-system-exiting exceptions are derived from this class. All user-defined exceptions should also be derived from this class. +\versionchanged[Changed to inherit from \exception{BaseException}]{2.5} \end{excdesc} \begin{excdesc}{StandardError} @@ -394,11 +394,15 @@ Raised when an \keyword{assert} statement fails. \begin{excdesc}{WindowsError} Raised when a Windows-specific error occurs or when the error number does not correspond to an \cdata{errno} value. The - \member{errno} and \member{strerror} values are created from the + \member{winerror} and \member{strerror} values are created from the return values of the \cfunction{GetLastError()} and \cfunction{FormatMessage()} functions from the Windows Platform API. + The \member{errno} value maps the \member{winerror} value to + corresponding \code{errno.h} values. This is a subclass of \exception{OSError}. \versionadded{2.0} +\versionchanged[Previous versions put the \cfunction{GetLastError()} +codes into \member{errno}]{2.5} \end{excdesc} \begin{excdesc}{ZeroDivisionError} @@ -442,6 +446,11 @@ Base class for warnings about constructs that will change semantically in the future. \end{excdesc} +\begin{excdesc}{ImportWarning} +Base class for warnings about probable mistakes in module imports. +\versionadded{2.5} +\end{excdesc} + The class hierarchy for built-in exceptions is: \verbatiminput{../../Lib/test/exception_hierarchy.txt} diff --git a/Doc/lib/libfuncs.tex b/Doc/lib/libfuncs.tex index c0352d3..8904d5f 100644 --- a/Doc/lib/libfuncs.tex +++ b/Doc/lib/libfuncs.tex @@ -418,7 +418,7 @@ class C: that differentiate between binary and text files (else it is ignored). If the file cannot be opened, \exception{IOError} is raised. - + In addition to the standard \cfunction{fopen()} values \var{mode} may be \code{'U'} or \code{'rU'}. If Python is built with universal newline support (the default) the file is opened as a text file, but @@ -434,6 +434,9 @@ class C: have yet been seen), \code{'\e n'}, \code{'\e r'}, \code{'\e r\e n'}, or a tuple containing all the newline types seen. + Python enforces that the mode, after stripping \code{'U'}, begins with + \code{'r'}, \code{'w'} or \code{'a'}. + If \var{mode} is omitted, it defaults to \code{'r'}. When opening a binary file, you should append \code{'b'} to the \var{mode} value for improved portability. (It's useful even on systems which don't @@ -455,12 +458,10 @@ class C: after any I/O has been performed, and there's no reliable way to determine whether this is the case.} - The \function{file()} constructor is new in Python 2.2 and is an - alias for \function{open()}. Both spellings are equivalent. The - intent is for \function{open()} to continue to be preferred for use - as a factory function which returns a new \class{file} object. The - spelling, \class{file} is more suited to type testing (for example, - writing \samp{isinstance(f, file)}). + \versionadded{2.2} + + \versionchanged[Restriction on first letter of mode string + introduced]{2.5} \end{funcdesc} \begin{funcdesc}{filter}{function, list} @@ -708,7 +709,10 @@ class C: \end{funcdesc} \begin{funcdesc}{open}{filename\optional{, mode\optional{, bufsize}}} - An alias for the \function{file()} function above. + A wrapper for the \function{file()} function above. The intent is + for \function{open()} to be preferred for use as a factory function + returning a new \class{file} object. \class{file} is more suited to + type testing (for example, writing \samp{isinstance(f, file)}). \end{funcdesc} \begin{funcdesc}{ord}{c} diff --git a/Doc/lib/libgetpass.tex b/Doc/lib/libgetpass.tex index 1d177d3..a742439 100644 --- a/Doc/lib/libgetpass.tex +++ b/Doc/lib/libgetpass.tex @@ -19,7 +19,7 @@ The \module{getpass} module provides two functions: \code{sys.stdout} (this argument is ignored on Windows). Availability: Macintosh, \UNIX, Windows. - \versionadded[The \var{stream} parameter]{2.5} + \versionchanged[The \var{stream} parameter was added]{2.5} \end{funcdesc} diff --git a/Doc/lib/libhtmlparser.tex b/Doc/lib/libhtmlparser.tex index b85ba56..52f8409 100644 --- a/Doc/lib/libhtmlparser.tex +++ b/Doc/lib/libhtmlparser.tex @@ -132,7 +132,7 @@ implementation does nothing. \begin{methoddesc}{handle_decl}{decl} Method called when an SGML declaration is read by the parser. The \var{decl} parameter will be the entire contents of the declaration -inside the \code{} markup.It is intended to be overridden +inside the \code{} markup. It is intended to be overridden by a derived class; the base class implementation does nothing. \end{methoddesc} diff --git a/Doc/lib/liblocale.tex b/Doc/lib/liblocale.tex index e6ba2c1..688ccb0 100644 --- a/Doc/lib/liblocale.tex +++ b/Doc/lib/liblocale.tex @@ -61,7 +61,7 @@ locale.setlocale(locale.LC_ALL, '') Returns the database of the local conventions as a dictionary. This dictionary has the following strings as keys: - \begin{tableiii}{l|l|p{3in}}{constant}{Key}{Category}{Meaning} + \begin{tableiii}{l|l|p{3in}}{constant}{Category}{Key}{Meaning} \lineiii{LC_NUMERIC}{\code{'decimal_point'}} {Decimal point character.} \lineiii{}{\code{'grouping'}} @@ -76,8 +76,20 @@ locale.setlocale(locale.LC_ALL, '') {International currency symbol.} \lineiii{}{\code{'currency_symbol'}} {Local currency symbol.} + \lineiii{}{\code{'p_cs_precedes/n_cs_precedes'}} + {Whether the currency symbol precedes the value (for positive resp. + negative values).} + \lineiii{}{\code{'p_sep_by_space/n_sep_by_space'}} + {Whether the currency symbol is separated from the value + by a space (for positive resp. negative values).} \lineiii{}{\code{'mon_decimal_point'}} {Decimal point used for monetary values.} + \lineiii{}{\code{'frac_digits'}} + {Number of fractional digits used in local formatting + of monetary values.} + \lineiii{}{\code{'int_frac_digits'}} + {Number of fractional digits used in international + formatting of monetary values.} \lineiii{}{\code{'mon_thousands_sep'}} {Group separator used for monetary values.} \lineiii{}{\code{'mon_grouping'}} @@ -87,13 +99,12 @@ locale.setlocale(locale.LC_ALL, '') {Symbol used to annotate a positive monetary value.} \lineiii{}{\code{'negative_sign'}} {Symbol used to annotate a negative monetary value.} - \lineiii{}{\code{'frac_digits'}} - {Number of fractional digits used in local formatting - of monetary values.} - \lineiii{}{\code{'int_frac_digits'}} - {Number of fractional digits used in international - formatting of monetary values.} + \lineiii{}{\code{'p_sign_posn/n_sign_posn'}} + {The position of the sign (for positive resp. negative values), see below.} \end{tableiii} + + All numeric values can be set to \constant{CHAR_MAX} to indicate that + there is no value specified in this locale. The possible values for \code{'p_sign_posn'} and \code{'n_sign_posn'} are given below. @@ -104,7 +115,7 @@ locale.setlocale(locale.LC_ALL, '') \lineii{2}{The sign should follow the value and currency symbol.} \lineii{3}{The sign should immediately precede the value.} \lineii{4}{The sign should immediately follow the value.} - \lineii{\constant{LC_MAX}}{Nothing is specified in this locale.} + \lineii{\constant{CHAR_MAX}}{Nothing is specified in this locale.} \end{tableii} \end{funcdesc} @@ -206,12 +217,44 @@ for which symbolic constants are available in the locale module. strings. \end{funcdesc} -\begin{funcdesc}{format}{format, val\optional{, grouping}} +\begin{funcdesc}{format}{format, val\optional{, grouping\optional{, monetary}}} Formats a number \var{val} according to the current \constant{LC_NUMERIC} setting. The format follows the conventions of the \code{\%} operator. For floating point values, the decimal point is modified if appropriate. If \var{grouping} is true, also takes the grouping into account. + + If \var{monetary} is true, the conversion uses monetary thousands + separator and grouping strings. + + Please note that this function will only work for exactly one \%char + specifier. For whole format strings, use \function{format_string()}. + + \versionchanged[Added the \var{monetary} parameter]{2.5} +\end{funcdesc} + +\begin{funcdesc}{format_string}{format, val\optional{, grouping}} + Processes formatting specifiers as in \code{format \% val}, + but takes the current locale settings into account. + + \versionadded{2.5} +\end{funcdesc} + +\begin{funcdesc}{currency}{val\optional{, symbol\optional{, grouping\optional{, international}}}} + Formats a number \var{val} according to the current \constant{LC_MONETARY} + settings. + + The returned string includes the currency symbol if \var{symbol} is true, + which is the default. + If \var{grouping} is true (which is not the default), grouping is done with + the value. + If \var{international} is true (which is not the default), the international + currency symbol is used. + + Note that this function will not work with the `C' locale, so you have to set + a locale via \function{setlocale()} first. + + \versionadded{2.5} \end{funcdesc} \begin{funcdesc}{str}{float} diff --git a/Doc/lib/libmailbox.tex b/Doc/lib/libmailbox.tex index dd18562..0a1f792 100644 --- a/Doc/lib/libmailbox.tex +++ b/Doc/lib/libmailbox.tex @@ -1,12 +1,1253 @@ \section{\module{mailbox} --- - Read various mailbox formats} + Manipulate mailboxes in various formats} -\declaremodule{standard}{mailbox} -\modulesynopsis{Read various mailbox formats.} +\declaremodule{}{mailbox} +\moduleauthor{Gregory K.~Johnson}{gkj@gregorykjohnson.com} +\sectionauthor{Gregory K.~Johnson}{gkj@gregorykjohnson.com} +\modulesynopsis{Manipulate mailboxes in various formats} -This module defines a number of classes that allow easy and uniform -access to mail messages in a (\UNIX) mailbox. +This module defines two classes, \class{Mailbox} and \class{Message}, for +accessing and manipulating on-disk mailboxes and the messages they contain. +\class{Mailbox} offers a dictionary-like mapping from keys to messages. +\class{Message} extends the \module{email.Message} module's \class{Message} +class with format-specific state and behavior. Supported mailbox formats are +Maildir, mbox, MH, Babyl, and MMDF. + +\begin{seealso} + \seemodule{email}{Represent and manipulate messages.} +\end{seealso} + +\subsection{\class{Mailbox} objects} +\label{mailbox-objects} + +\begin{classdesc*}{Mailbox} +A mailbox, which may be inspected and modified. +\end{classdesc*} + +The \class{Mailbox} interface is dictionary-like, with small keys +corresponding to messages. Keys are issued by the \class{Mailbox} instance +with which they will be used and are only meaningful to that \class{Mailbox} +instance. A key continues to identify a message even if the corresponding +message is modified, such as by replacing it with another message. Messages may +be added to a \class{Mailbox} instance using the set-like method +\method{add()} and removed using a \code{del} statement or the set-like methods +\method{remove()} and \method{discard()}. + +\class{Mailbox} interface semantics differ from dictionary semantics in some +noteworthy ways. Each time a message is requested, a new representation +(typically a \class{Message} instance) is generated, based upon the current +state of the mailbox. Similarly, when a message is added to a \class{Mailbox} +instance, the provided message representation's contents are copied. In neither +case is a reference to the message representation kept by the \class{Mailbox} +instance. + +The default \class{Mailbox} iterator iterates over message representations, not +keys as the default dictionary iterator does. Moreover, modification of a +mailbox during iteration is safe and well-defined. Messages added to the +mailbox after an iterator is created will not be seen by the iterator. Messages +removed from the mailbox before the iterator yields them will be silently +skipped, though using a key from an iterator may result in a +\exception{KeyError} exception if the corresponding message is subsequently +removed. + +\class{Mailbox} itself is intended to define an interface and to be inherited +from by format-specific subclasses but is not intended to be instantiated. +Instead, you should instantiate a subclass. + +\class{Mailbox} instances have the following methods: + +\begin{methoddesc}{add}{message} +Add \var{message} to the mailbox and return the key that has been assigned to +it. + +Parameter \var{message} may be a \class{Message} instance, an +\class{email.Message.Message} instance, a string, or a file-like object (which +should be open in text mode). If \var{message} is an instance of the +appropriate format-specific \class{Message} subclass (e.g., if it's an +\class{mboxMessage} instance and this is an \class{mbox} instance), its +format-specific information is used. Otherwise, reasonable defaults for +format-specific information are used. +\end{methoddesc} + +\begin{methoddesc}{remove}{key} +\methodline{__delitem__}{key} +\methodline{discard}{key} +Delete the message corresponding to \var{key} from the mailbox. + +If no such message exists, a \exception{KeyError} exception is raised if the +method was called as \method{remove()} or \method{__delitem__()} but no +exception is raised if the method was called as \method{discard()}. The +behavior of \method{discard()} may be preferred if the underlying mailbox +format supports concurrent modification by other processes. +\end{methoddesc} + +\begin{methoddesc}{__setitem__}{key, message} +Replace the message corresponding to \var{key} with \var{message}. Raise a +\exception{KeyError} exception if no message already corresponds to \var{key}. + +As with \method{add()}, parameter \var{message} may be a \class{Message} +instance, an \class{email.Message.Message} instance, a string, or a file-like +object (which should be open in text mode). If \var{message} is an instance of +the appropriate format-specific \class{Message} subclass (e.g., if it's an +\class{mboxMessage} instance and this is an \class{mbox} instance), its +format-specific information is used. Otherwise, the format-specific information +of the message that currently corresponds to \var{key} is left unchanged. +\end{methoddesc} + +\begin{methoddesc}{iterkeys}{} +\methodline{keys}{} +Return an iterator over all keys if called as \method{iterkeys()} or return a +list of keys if called as \method{keys()}. +\end{methoddesc} + +\begin{methoddesc}{itervalues}{} +\methodline{__iter__}{} +\methodline{values}{} +Return an iterator over representations of all messages if called as +\method{itervalues()} or \method{__iter__()} or return a list of such +representations if called as \method{values()}. The messages are represented as +instances of the appropriate format-specific \class{Message} subclass unless a +custom message factory was specified when the \class{Mailbox} instance was +initialized. \note{The behavior of \method{__iter__()} is unlike that of +dictionaries, which iterate over keys.} +\end{methoddesc} + +\begin{methoddesc}{iteritems}{} +\methodline{items}{} +Return an iterator over (\var{key}, \var{message}) pairs, where \var{key} is a +key and \var{message} is a message representation, if called as +\method{iteritems()} or return a list of such pairs if called as +\method{items()}. The messages are represented as instances of the appropriate +format-specific \class{Message} subclass unless a custom message factory was +specified when the \class{Mailbox} instance was initialized. +\end{methoddesc} + +\begin{methoddesc}{get}{key\optional{, default=None}} +\methodline{__getitem__}{key} +Return a representation of the message corresponding to \var{key}. If no such +message exists, \var{default} is returned if the method was called as +\method{get()} and a \exception{KeyError} exception is raised if the method was +called as \method{__getitem__()}. The message is represented as an instance of +the appropriate format-specific \class{Message} subclass unless a custom +message factory was specified when the \class{Mailbox} instance was +initialized. +\end{methoddesc} + +\begin{methoddesc}{get_message}{key} +Return a representation of the message corresponding to \var{key} as an +instance of the appropriate format-specific \class{Message} subclass, or raise +a \exception{KeyError} exception if no such message exists. +\end{methoddesc} + +\begin{methoddesc}{get_string}{key} +Return a string representation of the message corresponding to \var{key}, or +raise a \exception{KeyError} exception if no such message exists. +\end{methoddesc} + +\begin{methoddesc}{get_file}{key} +Return a file-like representation of the message corresponding to \var{key}, +or raise a \exception{KeyError} exception if no such message exists. The +file-like object behaves as if open in binary mode. This file should be closed +once it is no longer needed. + +\note{Unlike other representations of messages, file-like representations are +not necessarily independent of the \class{Mailbox} instance that created them +or of the underlying mailbox. More specific documentation is provided by each +subclass.} +\end{methoddesc} + +\begin{methoddesc}{has_key}{key} +\methodline{__contains__}{key} +Return \code{True} if \var{key} corresponds to a message, \code{False} +otherwise. +\end{methoddesc} + +\begin{methoddesc}{__len__}{} +Return a count of messages in the mailbox. +\end{methoddesc} + +\begin{methoddesc}{clear}{} +Delete all messages from the mailbox. +\end{methoddesc} + +\begin{methoddesc}{pop}{key\optional{, default}} +Return a representation of the message corresponding to \var{key} and delete +the message. If no such message exists, return \var{default} if it was supplied +or else raise a \exception{KeyError} exception. The message is represented as +an instance of the appropriate format-specific \class{Message} subclass unless +a custom message factory was specified when the \class{Mailbox} instance was +initialized. +\end{methoddesc} + +\begin{methoddesc}{popitem}{} +Return an arbitrary (\var{key}, \var{message}) pair, where \var{key} is a key +and \var{message} is a message representation, and delete the corresponding +message. If the mailbox is empty, raise a \exception{KeyError} exception. The +message is represented as an instance of the appropriate format-specific +\class{Message} subclass unless a custom message factory was specified when the +\class{Mailbox} instance was initialized. +\end{methoddesc} + +\begin{methoddesc}{update}{arg} +Parameter \var{arg} should be a \var{key}-to-\var{message} mapping or an +iterable of (\var{key}, \var{message}) pairs. Updates the mailbox so that, for +each given \var{key} and \var{message}, the message corresponding to \var{key} +is set to \var{message} as if by using \method{__setitem__()}. As with +\method{__setitem__()}, each \var{key} must already correspond to a message in +the mailbox or else a \exception{KeyError} exception will be raised, so in +general it is incorrect for \var{arg} to be a \class{Mailbox} instance. +\note{Unlike with dictionaries, keyword arguments are not supported.} +\end{methoddesc} + +\begin{methoddesc}{flush}{} +Write any pending changes to the filesystem. For some \class{Mailbox} +subclasses, changes are always written immediately and this method does +nothing. +\end{methoddesc} + +\begin{methoddesc}{lock}{} +Acquire an exclusive advisory lock on the mailbox so that other processes know +not to modify it. An \exception{ExternalClashError} is raised if the lock is +not available. The particular locking mechanisms used depend upon the mailbox +format. +\end{methoddesc} + +\begin{methoddesc}{unlock}{} +Release the lock on the mailbox, if any. +\end{methoddesc} + +\begin{methoddesc}{close}{} +Flush the mailbox, unlock it if necessary, and close any open files. For some +\class{Mailbox} subclasses, this method does nothing. +\end{methoddesc} + + +\subsubsection{\class{Maildir}} +\label{mailbox-maildir} + +\begin{classdesc}{Maildir}{dirname\optional{, factory=rfc822.Message\optional{, +create=True}}} +A subclass of \class{Mailbox} for mailboxes in Maildir format. Parameter +\var{factory} is a callable object that accepts a file-like message +representation (which behaves as if opened in binary mode) and returns a custom +representation. If \var{factory} is \code{None}, \class{MaildirMessage} is used +as the default message representation. If \var{create} is \code{True}, the +mailbox is created if it does not exist. + +It is for historical reasons that \var{factory} defaults to +\class{rfc822.Message} and that \var{dirname} is named as such rather than +\var{path}. For a \class{Maildir} instance that behaves like instances of other +\class{Mailbox} subclasses, set \var{factory} to \code{None}. +\end{classdesc} + +Maildir is a directory-based mailbox format invented for the qmail mail +transfer agent and now widely supported by other programs. Messages in a +Maildir mailbox are stored in separate files within a common directory +structure. This design allows Maildir mailboxes to be accessed and modified by +multiple unrelated programs without data corruption, so file locking is +unnecessary. + +Maildir mailboxes contain three subdirectories, namely: \file{tmp}, \file{new}, +and \file{cur}. Messages are created momentarily in the \file{tmp} subdirectory +and then moved to the \file{new} subdirectory to finalize delivery. A mail user +agent may subsequently move the message to the \file{cur} subdirectory and +store information about the state of the message in a special "info" section +appended to its file name. + +Folders of the style introduced by the Courier mail transfer agent are also +supported. Any subdirectory of the main mailbox is considered a folder if +\character{.} is the first character in its name. Folder names are represented +by \class{Maildir} without the leading \character{.}. Each folder is itself a +Maildir mailbox but should not contain other folders. Instead, a logical +nesting is indicated using \character{.} to delimit levels, e.g., +"Archived.2005.07". + +\begin{notice} +The Maildir specification requires the use of a colon (\character{:}) in +certain message file names. However, some operating systems do not permit this +character in file names, If you wish to use a Maildir-like format on such an +operating system, you should specify another character to use instead. The +exclamation point (\character{!}) is a popular choice. For example: +\begin{verbatim} +import mailbox +mailbox.Maildir.colon = '!' +\end{verbatim} +The \member{colon} attribute may also be set on a per-instance basis. +\end{notice} + +\class{Maildir} instances have all of the methods of \class{Mailbox} in +addition to the following: + +\begin{methoddesc}{list_folders}{} +Return a list of the names of all folders. +\end{methoddesc} + +\begin{methoddesc}{get_folder}{folder} +Return a \class{Maildir} instance representing the folder whose name is +\var{folder}. A \exception{NoSuchMailboxError} exception is raised if the +folder does not exist. +\end{methoddesc} + +\begin{methoddesc}{add_folder}{folder} +Create a folder whose name is \var{folder} and return a \class{Maildir} +instance representing it. +\end{methoddesc} + +\begin{methoddesc}{remove_folder}{folder} +Delete the folder whose name is \var{folder}. If the folder contains any +messages, a \exception{NotEmptyError} exception will be raised and the folder +will not be deleted. +\end{methoddesc} + +\begin{methoddesc}{clean}{} +Delete temporary files from the mailbox that have not been accessed in the +last 36 hours. The Maildir specification says that mail-reading programs +should do this occasionally. +\end{methoddesc} + +Some \class{Mailbox} methods implemented by \class{Maildir} deserve special +remarks: + +\begin{methoddesc}{add}{message} +\methodline[Maildir]{__setitem__}{key, message} +\methodline[Maildir]{update}{arg} +\warning{These methods generate unique file names based upon the current +process ID. When using multiple threads, undetected name clashes may occur and +cause corruption of the mailbox unless threads are coordinated to avoid using +these methods to manipulate the same mailbox simultaneously.} +\end{methoddesc} + +\begin{methoddesc}{flush}{} +All changes to Maildir mailboxes are immediately applied, so this method does +nothing. +\end{methoddesc} + +\begin{methoddesc}{lock}{} +\methodline{unlock}{} +Maildir mailboxes do not support (or require) locking, so these methods do +nothing. +\end{methoddesc} + +\begin{methoddesc}{close}{} +\class{Maildir} instances do not keep any open files and the underlying +mailboxes do not support locking, so this method does nothing. +\end{methoddesc} + +\begin{methoddesc}{get_file}{key} +Depending upon the host platform, it may not be possible to modify or remove +the underlying message while the returned file remains open. +\end{methoddesc} + +\begin{seealso} + \seelink{http://www.qmail.org/man/man5/maildir.html}{maildir man page from + qmail}{The original specification of the format.} + \seelink{http://cr.yp.to/proto/maildir.html}{Using maildir format}{Notes + on Maildir by its inventor. Includes an updated name-creation scheme and + details on "info" semantics.} + \seelink{http://www.courier-mta.org/?maildir.html}{maildir man page from + Courier}{Another specification of the format. Describes a common extension + for supporting folders.} +\end{seealso} + +\subsubsection{\class{mbox}} +\label{mailbox-mbox} + +\begin{classdesc}{mbox}{path\optional{, factory=None\optional{, create=True}}} +A subclass of \class{Mailbox} for mailboxes in mbox format. Parameter +\var{factory} is a callable object that accepts a file-like message +representation (which behaves as if opened in binary mode) and returns a custom +representation. If \var{factory} is \code{None}, \class{mboxMessage} is used as +the default message representation. If \var{create} is \code{True}, the mailbox +is created if it does not exist. +\end{classdesc} + +The mbox format is the classic format for storing mail on \UNIX{} systems. All +messages in an mbox mailbox are stored in a single file with the beginning of +each message indicated by a line whose first five characters are "From~". + +Several variations of the mbox format exist to address perceived shortcomings +in the original. In the interest of compatibility, \class{mbox} implements the +original format, which is sometimes referred to as \dfn{mboxo}. This means that +the \mailheader{Content-Length} header, if present, is ignored and that any +occurrences of "From~" at the beginning of a line in a message body are +transformed to ">From~" when storing the message, although occurences of +">From~" are not transformed to "From~" when reading the message. + +Some \class{Mailbox} methods implemented by \class{mbox} deserve special +remarks: + +\begin{methoddesc}{get_file}{key} +Using the file after calling \method{flush()} or \method{close()} on the +\class{mbox} instance may yield unpredictable results or raise an exception. +\end{methoddesc} + +\begin{methoddesc}{lock}{} +\methodline{unlock}{} +Three locking mechanisms are used---dot locking and, if available, the +\cfunction{flock()} and \cfunction{lockf()} system calls. +\end{methoddesc} + +\begin{seealso} + \seelink{http://www.qmail.org/man/man5/mbox.html}{mbox man page from + qmail}{A specification of the format and its variations.} + \seelink{http://www.tin.org/bin/man.cgi?section=5\&topic=mbox}{mbox man + page from tin}{Another specification of the format, with details on + locking.} + \seelink{http://home.netscape.com/eng/mozilla/2.0/relnotes/demo/content-length.html} + {Configuring Netscape Mail on \UNIX{}: Why The Content-Length Format is + Bad}{An argument for using the original mbox format rather than a + variation.} + \seelink{http://homepages.tesco.net./\tilde{}J.deBoynePollard/FGA/mail-mbox-formats.html} + {"mbox" is a family of several mutually incompatible mailbox formats}{A + history of mbox variations.} +\end{seealso} + +\subsubsection{\class{MH}} +\label{mailbox-mh} + +\begin{classdesc}{MH}{path\optional{, factory=None\optional{, create=True}}} +A subclass of \class{Mailbox} for mailboxes in MH format. Parameter +\var{factory} is a callable object that accepts a file-like message +representation (which behaves as if opened in binary mode) and returns a custom +representation. If \var{factory} is \code{None}, \class{MHMessage} is used as +the default message representation. If \var{create} is \code{True}, the mailbox +is created if it does not exist. +\end{classdesc} + +MH is a directory-based mailbox format invented for the MH Message Handling +System, a mail user agent. Each message in an MH mailbox resides in its own +file. An MH mailbox may contain other MH mailboxes (called \dfn{folders}) in +addition to messages. Folders may be nested indefinitely. MH mailboxes also +support \dfn{sequences}, which are named lists used to logically group messages +without moving them to sub-folders. Sequences are defined in a file called +\file{.mh_sequences} in each folder. + +The \class{MH} class manipulates MH mailboxes, but it does not attempt to +emulate all of \program{mh}'s behaviors. In particular, it does not modify and +is not affected by the \file{context} or \file{.mh_profile} files that are used +by \program{mh} to store its state and configuration. + +\class{MH} instances have all of the methods of \class{Mailbox} in addition to +the following: + +\begin{methoddesc}{list_folders}{} +Return a list of the names of all folders. +\end{methoddesc} + +\begin{methoddesc}{get_folder}{folder} +Return an \class{MH} instance representing the folder whose name is +\var{folder}. A \exception{NoSuchMailboxError} exception is raised if the +folder does not exist. +\end{methoddesc} + +\begin{methoddesc}{add_folder}{folder} +Create a folder whose name is \var{folder} and return an \class{MH} instance +representing it. +\end{methoddesc} + +\begin{methoddesc}{remove_folder}{folder} +Delete the folder whose name is \var{folder}. If the folder contains any +messages, a \exception{NotEmptyError} exception will be raised and the folder +will not be deleted. +\end{methoddesc} + +\begin{methoddesc}{get_sequences}{} +Return a dictionary of sequence names mapped to key lists. If there are no +sequences, the empty dictionary is returned. +\end{methoddesc} + +\begin{methoddesc}{set_sequences}{sequences} +Re-define the sequences that exist in the mailbox based upon \var{sequences}, a +dictionary of names mapped to key lists, like returned by +\method{get_sequences()}. +\end{methoddesc} + +\begin{methoddesc}{pack}{} +Rename messages in the mailbox as necessary to eliminate gaps in numbering. +Entries in the sequences list are updated correspondingly. \note{Already-issued +keys are invalidated by this operation and should not be subsequently used.} +\end{methoddesc} + +Some \class{Mailbox} methods implemented by \class{MH} deserve special remarks: + +\begin{methoddesc}{remove}{key} +\methodline{__delitem__}{key} +\methodline{discard}{key} +These methods immediately delete the message. The MH convention of marking a +message for deletion by prepending a comma to its name is not used. +\end{methoddesc} + +\begin{methoddesc}{lock}{} +\methodline{unlock}{} +Three locking mechanisms are used---dot locking and, if available, the +\cfunction{flock()} and \cfunction{lockf()} system calls. For MH mailboxes, +locking the mailbox means locking the \file{.mh_sequences} file and, only for +the duration of any operations that affect them, locking individual message +files. +\end{methoddesc} + +\begin{methoddesc}{get_file}{key} +Depending upon the host platform, it may not be possible to remove the +underlying message while the returned file remains open. +\end{methoddesc} + +\begin{methoddesc}{flush}{} +All changes to MH mailboxes are immediately applied, so this method does +nothing. +\end{methoddesc} + +\begin{methoddesc}{close}{} +\class{MH} instances do not keep any open files, so this method is equivelant +to \method{unlock()}. +\end{methoddesc} + +\begin{seealso} +\seelink{http://www.nongnu.org/nmh/}{nmh - Message Handling System}{Home page +of \program{nmh}, an updated version of the original \program{mh}.} +\seelink{http://www.ics.uci.edu/\tilde{}mh/book/}{MH \& nmh: Email for Users \& +Programmers}{A GPL-licensed book on \program{mh} and \program{nmh}, with some +information on the mailbox format.} +\end{seealso} + +\subsubsection{\class{Babyl}} +\label{mailbox-babyl} + +\begin{classdesc}{Babyl}{path\optional{, factory=None\optional{, create=True}}} +A subclass of \class{Mailbox} for mailboxes in Babyl format. Parameter +\var{factory} is a callable object that accepts a file-like message +representation (which behaves as if opened in binary mode) and returns a custom +representation. If \var{factory} is \code{None}, \class{BabylMessage} is used +as the default message representation. If \var{create} is \code{True}, the +mailbox is created if it does not exist. +\end{classdesc} + +Babyl is a single-file mailbox format used by the Rmail mail user agent +included with Emacs. The beginning of a message is indicated by a line +containing the two characters Control-Underscore +(\character{\textbackslash037}) and Control-L (\character{\textbackslash014}). +The end of a message is indicated by the start of the next message or, in the +case of the last message, a line containing a Control-Underscore +(\character{\textbackslash037}) character. + +Messages in a Babyl mailbox have two sets of headers, original headers and +so-called visible headers. Visible headers are typically a subset of the +original headers that have been reformatted or abridged to be more attractive. +Each message in a Babyl mailbox also has an accompanying list of \dfn{labels}, +or short strings that record extra information about the message, and a list of +all user-defined labels found in the mailbox is kept in the Babyl options +section. + +\class{Babyl} instances have all of the methods of \class{Mailbox} in addition +to the following: + +\begin{methoddesc}{get_labels}{} +Return a list of the names of all user-defined labels used in the mailbox. +\note{The actual messages are inspected to determine which labels exist in the +mailbox rather than consulting the list of labels in the Babyl options section, +but the Babyl section is updated whenever the mailbox is modified.} +\end{methoddesc} + +Some \class{Mailbox} methods implemented by \class{Babyl} deserve special +remarks: + +\begin{methoddesc}{get_file}{key} +In Babyl mailboxes, the headers of a message are not stored contiguously with +the body of the message. To generate a file-like representation, the headers +and body are copied together into a \class{StringIO} instance (from the +\module{StringIO} module), which has an API identical to that of a file. As a +result, the file-like object is truly independent of the underlying mailbox but +does not save memory compared to a string representation. +\end{methoddesc} + +\begin{methoddesc}{lock}{} +\methodline{unlock}{} +Three locking mechanisms are used---dot locking and, if available, the +\cfunction{flock()} and \cfunction{lockf()} system calls. +\end{methoddesc} + +\begin{seealso} +\seelink{http://quimby.gnus.org/notes/BABYL}{Format of Version 5 Babyl Files}{A +specification of the Babyl format.} +\seelink{http://www.gnu.org/software/emacs/manual/html_node/Rmail.html}{Reading +Mail with Rmail}{The Rmail manual, with some information on Babyl semantics.} +\end{seealso} + +\subsubsection{\class{MMDF}} +\label{mailbox-mmdf} + +\begin{classdesc}{MMDF}{path\optional{, factory=None\optional{, create=True}}} +A subclass of \class{Mailbox} for mailboxes in MMDF format. Parameter +\var{factory} is a callable object that accepts a file-like message +representation (which behaves as if opened in binary mode) and returns a custom +representation. If \var{factory} is \code{None}, \class{MMDFMessage} is used as +the default message representation. If \var{create} is \code{True}, the mailbox +is created if it does not exist. +\end{classdesc} + +MMDF is a single-file mailbox format invented for the Multichannel Memorandum +Distribution Facility, a mail transfer agent. Each message is in the same form +as an mbox message but is bracketed before and after by lines containing four +Control-A (\character{\textbackslash001}) characters. As with the mbox format, +the beginning of each message is indicated by a line whose first five +characters are "From~", but additional occurrences of "From~" are not +transformed to ">From~" when storing messages because the extra message +separator lines prevent mistaking such occurrences for the starts of subsequent +messages. + +Some \class{Mailbox} methods implemented by \class{MMDF} deserve special +remarks: + +\begin{methoddesc}{get_file}{key} +Using the file after calling \method{flush()} or \method{close()} on the +\class{MMDF} instance may yield unpredictable results or raise an exception. +\end{methoddesc} + +\begin{methoddesc}{lock}{} +\methodline{unlock}{} +Three locking mechanisms are used---dot locking and, if available, the +\cfunction{flock()} and \cfunction{lockf()} system calls. +\end{methoddesc} + +\begin{seealso} +\seelink{http://www.tin.org/bin/man.cgi?section=5\&topic=mmdf}{mmdf man page +from tin}{A specification of MMDF format from the documentation of tin, a +newsreader.} +\seelink{http://en.wikipedia.org/wiki/MMDF}{MMDF}{A Wikipedia article +describing the Multichannel Memorandum Distribution Facility.} +\end{seealso} + +\subsection{\class{Message} objects} +\label{mailbox-message-objects} + +\begin{classdesc}{Message}{\optional{message}} +A subclass of the \module{email.Message} module's \class{Message}. Subclasses +of \class{mailbox.Message} add mailbox-format-specific state and behavior. + +If \var{message} is omitted, the new instance is created in a default, empty +state. If \var{message} is an \class{email.Message.Message} instance, its +contents are copied; furthermore, any format-specific information is converted +insofar as possible if \var{message} is a \class{Message} instance. If +\var{message} is a string or a file, it should contain an \rfc{2822}-compliant +message, which is read and parsed. +\end{classdesc} + +The format-specific state and behaviors offered by subclasses vary, but in +general it is only the properties that are not specific to a particular mailbox +that are supported (although presumably the properties are specific to a +particular mailbox format). For example, file offsets for single-file mailbox +formats and file names for directory-based mailbox formats are not retained, +because they are only applicable to the original mailbox. But state such as +whether a message has been read by the user or marked as important is retained, +because it applies to the message itself. + +There is no requirement that \class{Message} instances be used to represent +messages retrieved using \class{Mailbox} instances. In some situations, the +time and memory required to generate \class{Message} representations might not +not acceptable. For such situations, \class{Mailbox} instances also offer +string and file-like representations, and a custom message factory may be +specified when a \class{Mailbox} instance is initialized. + +\subsubsection{\class{MaildirMessage}} +\label{mailbox-maildirmessage} + +\begin{classdesc}{MaildirMessage}{\optional{message}} +A message with Maildir-specific behaviors. Parameter \var{message} +has the same meaning as with the \class{Message} constructor. +\end{classdesc} + +Typically, a mail user agent application moves all of the messages in the +\file{new} subdirectory to the \file{cur} subdirectory after the first time the +user opens and closes the mailbox, recording that the messages are old whether +or not they've actually been read. Each message in \file{cur} has an "info" +section added to its file name to store information about its state. (Some mail +readers may also add an "info" section to messages in \file{new}.) The "info" +section may take one of two forms: it may contain "2," followed by a list of +standardized flags (e.g., "2,FR") or it may contain "1," followed by so-called +experimental information. Standard flags for Maildir messages are as follows: + +\begin{tableiii}{l|l|l}{textrm}{Flag}{Meaning}{Explanation} +\lineiii{D}{Draft}{Under composition} +\lineiii{F}{Flagged}{Marked as important} +\lineiii{P}{Passed}{Forwarded, resent, or bounced} +\lineiii{R}{Replied}{Replied to} +\lineiii{S}{Seen}{Read} +\lineiii{T}{Trashed}{Marked for subsequent deletion} +\end{tableiii} + +\class{MaildirMessage} instances offer the following methods: + +\begin{methoddesc}{get_subdir}{} +Return either "new" (if the message should be stored in the \file{new} +subdirectory) or "cur" (if the message should be stored in the \file{cur} +subdirectory). \note{A message is typically moved from \file{new} to \file{cur} +after its mailbox has been accessed, whether or not the message is has been +read. A message \code{msg} has been read if \code{"S" not in msg.get_flags()} +is \code{True}.} +\end{methoddesc} + +\begin{methoddesc}{set_subdir}{subdir} +Set the subdirectory the message should be stored in. Parameter \var{subdir} +must be either "new" or "cur". +\end{methoddesc} + +\begin{methoddesc}{get_flags}{} +Return a string specifying the flags that are currently set. If the message +complies with the standard Maildir format, the result is the concatenation in +alphabetical order of zero or one occurrence of each of \character{D}, +\character{F}, \character{P}, \character{R}, \character{S}, and \character{T}. +The empty string is returned if no flags are set or if "info" contains +experimental semantics. +\end{methoddesc} + +\begin{methoddesc}{set_flags}{flags} +Set the flags specified by \var{flags} and unset all others. +\end{methoddesc} + +\begin{methoddesc}{add_flag}{flag} +Set the flag(s) specified by \var{flag} without changing other flags. To add +more than one flag at a time, \var{flag} may be a string of more than one +character. The current "info" is overwritten whether or not it contains +experimental information rather than +flags. +\end{methoddesc} + +\begin{methoddesc}{remove_flag}{flag} +Unset the flag(s) specified by \var{flag} without changing other flags. To +remove more than one flag at a time, \var{flag} maybe a string of more than one +character. If "info" contains experimental information rather than flags, the +current "info" is not modified. +\end{methoddesc} + +\begin{methoddesc}{get_date}{} +Return the delivery date of the message as a floating-point number representing +seconds since the epoch. +\end{methoddesc} + +\begin{methoddesc}{set_date}{date} +Set the delivery date of the message to \var{date}, a floating-point number +representing seconds since the epoch. +\end{methoddesc} + +\begin{methoddesc}{get_info}{} +Return a string containing the "info" for a message. This is useful for +accessing and modifying "info" that is experimental (i.e., not a list of +flags). +\end{methoddesc} + +\begin{methoddesc}{set_info}{info} +Set "info" to \var{info}, which should be a string. +\end{methoddesc} + +When a \class{MaildirMessage} instance is created based upon an +\class{mboxMessage} or \class{MMDFMessage} instance, the \mailheader{Status} +and \mailheader{X-Status} headers are omitted and the following conversions +take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{mboxMessage} or \class{MMDFMessage} state} +\lineii{"cur" subdirectory}{O flag} +\lineii{F flag}{F flag} +\lineii{R flag}{A flag} +\lineii{S flag}{R flag} +\lineii{T flag}{D flag} +\end{tableii} + +When a \class{MaildirMessage} instance is created based upon an +\class{MHMessage} instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MHMessage} state} +\lineii{"cur" subdirectory}{"unseen" sequence} +\lineii{"cur" subdirectory and S flag}{no "unseen" sequence} +\lineii{F flag}{"flagged" sequence} +\lineii{R flag}{"replied" sequence} +\end{tableii} + +When a \class{MaildirMessage} instance is created based upon a +\class{BabylMessage} instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{BabylMessage} state} +\lineii{"cur" subdirectory}{"unseen" label} +\lineii{"cur" subdirectory and S flag}{no "unseen" label} +\lineii{P flag}{"forwarded" or "resent" label} +\lineii{R flag}{"answered" label} +\lineii{T flag}{"deleted" label} +\end{tableii} + +\subsubsection{\class{mboxMessage}} +\label{mailbox-mboxmessage} + +\begin{classdesc}{mboxMessage}{\optional{message}} +A message with mbox-specific behaviors. Parameter \var{message} has the same +meaning as with the \class{Message} constructor. +\end{classdesc} + +Messages in an mbox mailbox are stored together in a single file. The sender's +envelope address and the time of delivery are typically stored in a line +beginning with "From~" that is used to indicate the start of a message, though +there is considerable variation in the exact format of this data among mbox +implementations. Flags that indicate the state of the message, such as whether +it has been read or marked as important, are typically stored in +\mailheader{Status} and \mailheader{X-Status} headers. + +Conventional flags for mbox messages are as follows: + +\begin{tableiii}{l|l|l}{textrm}{Flag}{Meaning}{Explanation} +\lineiii{R}{Read}{Read} +\lineiii{O}{Old}{Previously detected by MUA} +\lineiii{D}{Deleted}{Marked for subsequent deletion} +\lineiii{F}{Flagged}{Marked as important} +\lineiii{A}{Answered}{Replied to} +\end{tableiii} + +The "R" and "O" flags are stored in the \mailheader{Status} header, and the +"D", "F", and "A" flags are stored in the \mailheader{X-Status} header. The +flags and headers typically appear in the order mentioned. + +\class{mboxMessage} instances offer the following methods: + +\begin{methoddesc}{get_from}{} +Return a string representing the "From~" line that marks the start of the +message in an mbox mailbox. The leading "From~" and the trailing newline are +excluded. +\end{methoddesc} + +\begin{methoddesc}{set_from}{from_\optional{, time_=None}} +Set the "From~" line to \var{from_}, which should be specified without a +leading "From~" or trailing newline. For convenience, \var{time_} may be +specified and will be formatted appropriately and appended to \var{from_}. If +\var{time_} is specified, it should be a \class{struct_time} instance, a tuple +suitable for passing to \method{time.strftime()}, or \code{True} (to use +\method{time.gmtime()}). +\end{methoddesc} + +\begin{methoddesc}{get_flags}{} +Return a string specifying the flags that are currently set. If the message +complies with the conventional format, the result is the concatenation in the +following order of zero or one occurrence of each of \character{R}, +\character{O}, \character{D}, \character{F}, and \character{A}. +\end{methoddesc} + +\begin{methoddesc}{set_flags}{flags} +Set the flags specified by \var{flags} and unset all others. Parameter +\var{flags} should be the concatenation in any order of zero or more +occurrences of each of \character{R}, \character{O}, \character{D}, +\character{F}, and \character{A}. +\end{methoddesc} + +\begin{methoddesc}{add_flag}{flag} +Set the flag(s) specified by \var{flag} without changing other flags. To add +more than one flag at a time, \var{flag} may be a string of more than one +character. +\end{methoddesc} + +\begin{methoddesc}{remove_flag}{flag} +Unset the flag(s) specified by \var{flag} without changing other flags. To +remove more than one flag at a time, \var{flag} maybe a string of more than one +character. +\end{methoddesc} + +When an \class{mboxMessage} instance is created based upon a +\class{MaildirMessage} instance, a "From~" line is generated based upon the +\class{MaildirMessage} instance's delivery date, and the following conversions +take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MaildirMessage} state} +\lineii{R flag}{S flag} +\lineii{O flag}{"cur" subdirectory} +\lineii{D flag}{T flag} +\lineii{F flag}{F flag} +\lineii{A flag}{R flag} +\end{tableii} + +When an \class{mboxMessage} instance is created based upon an \class{MHMessage} +instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MHMessage} state} +\lineii{R flag and O flag}{no "unseen" sequence} +\lineii{O flag}{"unseen" sequence} +\lineii{F flag}{"flagged" sequence} +\lineii{A flag}{"replied" sequence} +\end{tableii} + +When an \class{mboxMessage} instance is created based upon a +\class{BabylMessage} instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{BabylMessage} state} +\lineii{R flag and O flag}{no "unseen" label} +\lineii{O flag}{"unseen" label} +\lineii{D flag}{"deleted" label} +\lineii{A flag}{"answered" label} +\end{tableii} + +When a \class{Message} instance is created based upon an \class{MMDFMessage} +instance, the "From~" line is copied and all flags directly correspond: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MMDFMessage} state} +\lineii{R flag}{R flag} +\lineii{O flag}{O flag} +\lineii{D flag}{D flag} +\lineii{F flag}{F flag} +\lineii{A flag}{A flag} +\end{tableii} + +\subsubsection{\class{MHMessage}} +\label{mailbox-mhmessage} + +\begin{classdesc}{MHMessage}{\optional{message}} +A message with MH-specific behaviors. Parameter \var{message} has the same +meaning as with the \class{Message} constructor. +\end{classdesc} + +MH messages do not support marks or flags in the traditional sense, but they do +support sequences, which are logical groupings of arbitrary messages. Some mail +reading programs (although not the standard \program{mh} and \program{nmh}) use +sequences in much the same way flags are used with other formats, as follows: + +\begin{tableii}{l|l}{textrm}{Sequence}{Explanation} +\lineii{unseen}{Not read, but previously detected by MUA} +\lineii{replied}{Replied to} +\lineii{flagged}{Marked as important} +\end{tableii} + +\class{MHMessage} instances offer the following methods: + +\begin{methoddesc}{get_sequences}{} +Return a list of the names of sequences that include this message. +\end{methoddesc} + +\begin{methoddesc}{set_sequences}{sequences} +Set the list of sequences that include this message. +\end{methoddesc} + +\begin{methoddesc}{add_sequence}{sequence} +Add \var{sequence} to the list of sequences that include this message. +\end{methoddesc} + +\begin{methoddesc}{remove_sequence}{sequence} +Remove \var{sequence} from the list of sequences that include this message. +\end{methoddesc} + +When an \class{MHMessage} instance is created based upon a +\class{MaildirMessage} instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MaildirMessage} state} +\lineii{"unseen" sequence}{no S flag} +\lineii{"replied" sequence}{R flag} +\lineii{"flagged" sequence}{F flag} +\end{tableii} + +When an \class{MHMessage} instance is created based upon an \class{mboxMessage} +or \class{MMDFMessage} instance, the \mailheader{Status} and +\mailheader{X-Status} headers are omitted and the following conversions take +place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{mboxMessage} or \class{MMDFMessage} state} +\lineii{"unseen" sequence}{no R flag} +\lineii{"replied" sequence}{A flag} +\lineii{"flagged" sequence}{F flag} +\end{tableii} + +When an \class{MHMessage} instance is created based upon a \class{BabylMessage} +instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{BabylMessage} state} +\lineii{"unseen" sequence}{"unseen" label} +\lineii{"replied" sequence}{"answered" label} +\end{tableii} + +\subsubsection{\class{BabylMessage}} +\label{mailbox-babylmessage} + +\begin{classdesc}{BabylMessage}{\optional{message}} +A message with Babyl-specific behaviors. Parameter \var{message} has the same +meaning as with the \class{Message} constructor. +\end{classdesc} + +Certain message labels, called \dfn{attributes}, are defined by convention to +have special meanings. The attributes are as follows: + +\begin{tableii}{l|l}{textrm}{Label}{Explanation} +\lineii{unseen}{Not read, but previously detected by MUA} +\lineii{deleted}{Marked for subsequent deletion} +\lineii{filed}{Copied to another file or mailbox} +\lineii{answered}{Replied to} +\lineii{forwarded}{Forwarded} +\lineii{edited}{Modified by the user} +\lineii{resent}{Resent} +\end{tableii} + +By default, Rmail displays only +visible headers. The \class{BabylMessage} class, though, uses the original +headers because they are more complete. Visible headers may be accessed +explicitly if desired. + +\class{BabylMessage} instances offer the following methods: + +\begin{methoddesc}{get_labels}{} +Return a list of labels on the message. +\end{methoddesc} + +\begin{methoddesc}{set_labels}{labels} +Set the list of labels on the message to \var{labels}. +\end{methoddesc} + +\begin{methoddesc}{add_label}{label} +Add \var{label} to the list of labels on the message. +\end{methoddesc} + +\begin{methoddesc}{remove_label}{label} +Remove \var{label} from the list of labels on the message. +\end{methoddesc} + +\begin{methoddesc}{get_visible}{} +Return an \class{Message} instance whose headers are the message's visible +headers and whose body is empty. +\end{methoddesc} + +\begin{methoddesc}{set_visible}{visible} +Set the message's visible headers to be the same as the headers in +\var{message}. Parameter \var{visible} should be a \class{Message} instance, an +\class{email.Message.Message} instance, a string, or a file-like object (which +should be open in text mode). +\end{methoddesc} + +\begin{methoddesc}{update_visible}{} +When a \class{BabylMessage} instance's original headers are modified, the +visible headers are not automatically modified to correspond. This method +updates the visible headers as follows: each visible header with a +corresponding original header is set to the value of the original header, each +visible header without a corresponding original header is removed, and any of +\mailheader{Date}, \mailheader{From}, \mailheader{Reply-To}, \mailheader{To}, +\mailheader{CC}, and \mailheader{Subject} that are present in the original +headers but not the visible headers are added to the visible headers. +\end{methoddesc} + +When a \class{BabylMessage} instance is created based upon a +\class{MaildirMessage} instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MaildirMessage} state} +\lineii{"unseen" label}{no S flag} +\lineii{"deleted" label}{T flag} +\lineii{"answered" label}{R flag} +\lineii{"forwarded" label}{P flag} +\end{tableii} + +When a \class{BabylMessage} instance is created based upon an +\class{mboxMessage} or \class{MMDFMessage} instance, the \mailheader{Status} +and \mailheader{X-Status} headers are omitted and the following conversions +take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{mboxMessage} or \class{MMDFMessage} state} +\lineii{"unseen" label}{no R flag} +\lineii{"deleted" label}{D flag} +\lineii{"answered" label}{A flag} +\end{tableii} + +When a \class{BabylMessage} instance is created based upon an \class{MHMessage} +instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MHMessage} state} +\lineii{"unseen" label}{"unseen" sequence} +\lineii{"answered" label}{"replied" sequence} +\end{tableii} + +\subsubsection{\class{MMDFMessage}} +\label{mailbox-mmdfmessage} + +\begin{classdesc}{MMDFMessage}{\optional{message}} +A message with MMDF-specific behaviors. Parameter \var{message} has the same +meaning as with the \class{Message} constructor. +\end{classdesc} + +As with message in an mbox mailbox, MMDF messages are stored with the sender's +address and the delivery date in an initial line beginning with "From ". +Likewise, flags that indicate the state of the message are typically stored in +\mailheader{Status} and \mailheader{X-Status} headers. + +Conventional flags for MMDF messages are identical to those of mbox message and +are as follows: + +\begin{tableiii}{l|l|l}{textrm}{Flag}{Meaning}{Explanation} +\lineiii{R}{Read}{Read} +\lineiii{O}{Old}{Previously detected by MUA} +\lineiii{D}{Deleted}{Marked for subsequent deletion} +\lineiii{F}{Flagged}{Marked as important} +\lineiii{A}{Answered}{Replied to} +\end{tableiii} + +The "R" and "O" flags are stored in the \mailheader{Status} header, and the +"D", "F", and "A" flags are stored in the \mailheader{X-Status} header. The +flags and headers typically appear in the order mentioned. + +\class{MMDFMessage} instances offer the following methods, which are identical +to those offered by \class{mboxMessage}: + +\begin{methoddesc}{get_from}{} +Return a string representing the "From~" line that marks the start of the +message in an mbox mailbox. The leading "From~" and the trailing newline are +excluded. +\end{methoddesc} + +\begin{methoddesc}{set_from}{from_\optional{, time_=None}} +Set the "From~" line to \var{from_}, which should be specified without a +leading "From~" or trailing newline. For convenience, \var{time_} may be +specified and will be formatted appropriately and appended to \var{from_}. If +\var{time_} is specified, it should be a \class{struct_time} instance, a tuple +suitable for passing to \method{time.strftime()}, or \code{True} (to use +\method{time.gmtime()}). +\end{methoddesc} + +\begin{methoddesc}{get_flags}{} +Return a string specifying the flags that are currently set. If the message +complies with the conventional format, the result is the concatenation in the +following order of zero or one occurrence of each of \character{R}, +\character{O}, \character{D}, \character{F}, and \character{A}. +\end{methoddesc} + +\begin{methoddesc}{set_flags}{flags} +Set the flags specified by \var{flags} and unset all others. Parameter +\var{flags} should be the concatenation in any order of zero or more +occurrences of each of \character{R}, \character{O}, \character{D}, +\character{F}, and \character{A}. +\end{methoddesc} + +\begin{methoddesc}{add_flag}{flag} +Set the flag(s) specified by \var{flag} without changing other flags. To add +more than one flag at a time, \var{flag} may be a string of more than one +character. +\end{methoddesc} + +\begin{methoddesc}{remove_flag}{flag} +Unset the flag(s) specified by \var{flag} without changing other flags. To +remove more than one flag at a time, \var{flag} maybe a string of more than one +character. +\end{methoddesc} + +When an \class{MMDFMessage} instance is created based upon a +\class{MaildirMessage} instance, a "From~" line is generated based upon the +\class{MaildirMessage} instance's delivery date, and the following conversions +take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MaildirMessage} state} +\lineii{R flag}{S flag} +\lineii{O flag}{"cur" subdirectory} +\lineii{D flag}{T flag} +\lineii{F flag}{F flag} +\lineii{A flag}{R flag} +\end{tableii} + +When an \class{MMDFMessage} instance is created based upon an \class{MHMessage} +instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{MHMessage} state} +\lineii{R flag and O flag}{no "unseen" sequence} +\lineii{O flag}{"unseen" sequence} +\lineii{F flag}{"flagged" sequence} +\lineii{A flag}{"replied" sequence} +\end{tableii} + +When an \class{MMDFMessage} instance is created based upon a +\class{BabylMessage} instance, the following conversions take place: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{BabylMessage} state} +\lineii{R flag and O flag}{no "unseen" label} +\lineii{O flag}{"unseen" label} +\lineii{D flag}{"deleted" label} +\lineii{A flag}{"answered" label} +\end{tableii} + +When an \class{MMDFMessage} instance is created based upon an +\class{mboxMessage} instance, the "From~" line is copied and all flags directly +correspond: + +\begin{tableii}{l|l}{textrm} + {Resulting state}{\class{mboxMessage} state} +\lineii{R flag}{R flag} +\lineii{O flag}{O flag} +\lineii{D flag}{D flag} +\lineii{F flag}{F flag} +\lineii{A flag}{A flag} +\end{tableii} + +\subsection{Exceptions} +\label{mailbox-deprecated} + +The following exception classes are defined in the \module{mailbox} module: + +\begin{classdesc}{Error}{} +The based class for all other module-specific exceptions. +\end{classdesc} + +\begin{classdesc}{NoSuchMailboxError}{} +Raised when a mailbox is expected but is not found, such as when instantiating +a \class{Mailbox} subclass with a path that does not exist (and with the +\var{create} parameter set to \code{False}), or when opening a folder that does +not exist. +\end{classdesc} + +\begin{classdesc}{NotEmptyErrorError}{} +Raised when a mailbox is not empty but is expected to be, such as when deleting +a folder that contains messages. +\end{classdesc} + +\begin{classdesc}{ExternalClashError}{} +Raised when some mailbox-related condition beyond the control of the program +causes it to be unable to proceed, such as when failing to acquire a lock that +another program already holds a lock, or when a uniquely-generated file name +already exists. +\end{classdesc} + +\begin{classdesc}{FormatError}{} +Raised when the data in a file cannot be parsed, such as when an \class{MH} +instance attempts to read a corrupted \file{.mh_sequences} file. +\end{classdesc} + +\subsection{Deprecated classes and methods} +\label{mailbox-deprecated} + +Older versions of the \module{mailbox} module do not support modification of +mailboxes, such as adding or removing message, and do not provide classes to +represent format-specific message properties. For backward compatibility, the +older mailbox classes are still available, but the newer classes should be used +in preference to them. + +Older mailbox objects support only iteration and provide a single public +method: + +\begin{methoddesc}{next}{} +Return the next message in the mailbox, created with the optional \var{factory} +argument passed into the mailbox object's constructor. By default this is an +\class{rfc822.Message} object (see the \refmodule{rfc822} module). Depending +on the mailbox implementation the \var{fp} attribute of this object may be a +true file object or a class instance simulating a file object, taking care of +things like message boundaries if multiple mail messages are contained in a +single file, etc. If no more messages are available, this method returns +\code{None}. +\end{methoddesc} + +Most of the older mailbox classes have names that differ from the current +mailbox class names, except for \class{Maildir}. For this reason, the new +\class{Maildir} class defines a \method{next()} method and its constructor +differs slightly from those of the other new mailbox classes. + +The older mailbox classes whose names are not the same as their newer +counterparts are as follows: \begin{classdesc}{UnixMailbox}{fp\optional{, factory}} Access to a classic \UNIX-style mailbox, where all messages are @@ -68,12 +1309,6 @@ The name of the mailbox directory is passed in \var{dirname}. \var{factory} is as with the \class{UnixMailbox} class. \end{classdesc} -\begin{classdesc}{Maildir}{dirname\optional{, factory}} -Access a Qmail mail directory. All new and current mail for the -mailbox specified by \var{dirname} is made available. -\var{factory} is as with the \class{UnixMailbox} class. -\end{classdesc} - \begin{classdesc}{BabylMailbox}{fp\optional{, factory}} Access a Babyl mailbox, which is similar to an MMDF mailbox. In Babyl format, each message has two sets of headers, the @@ -89,11 +1324,8 @@ messages start with the EOOH line and end with a line containing only \class{UnixMailbox} class. \end{classdesc} -Note that because the \refmodule{rfc822} module is deprecated, it is -recommended that you use the \refmodule{email} package to create -message objects from a mailbox. (The default can't be changed for -backwards compatibility reasons.) The safest way to do this is with -bit of code: +If you wish to use the older mailbox classes with the \module{email} module +rather than the deprecated \module{rfc822} module, you can do so as follows: \begin{verbatim} import email @@ -105,17 +1337,14 @@ def msgfactory(fp): return email.message_from_file(fp) except email.Errors.MessageParseError: # Don't return None since that will - # stop the mailbox iterator - return '' + # stop the mailbox iterator + return '' mbox = mailbox.UnixMailbox(fp, msgfactory) \end{verbatim} -The above wrapper is defensive against ill-formed MIME messages in the -mailbox, but you have to be prepared to receive the empty string from -the mailbox's \function{next()} method. On the other hand, if you -know your mailbox contains only well-formed MIME messages, you can -simplify this to: +Alternatively, if you know your mailbox contains only well-formed MIME +messages, you can simplify this to: \begin{verbatim} import email @@ -124,35 +1353,57 @@ import mailbox mbox = mailbox.UnixMailbox(fp, email.message_from_file) \end{verbatim} -\begin{seealso} - \seetitle[http://www.qmail.org/man/man5/mbox.html]{mbox - - file containing mail messages}{Description of the - traditional ``mbox'' mailbox format.} - \seetitle[http://www.qmail.org/man/man5/maildir.html]{maildir - - directory for incoming mail messages}{Description of the - ``maildir'' mailbox format.} - \seetitle[http://home.netscape.com/eng/mozilla/2.0/relnotes/demo/content-length.html]{Configuring - Netscape Mail on \UNIX: Why the Content-Length Format is - Bad}{A description of problems with relying on the - \mailheader{Content-Length} header for messages stored in - mailbox files.} -\end{seealso} +\subsection{Examples} +\label{mailbox-examples} +A simple example of printing the subjects of all messages in a mailbox that +seem interesting: -\subsection{Mailbox Objects \label{mailbox-objects}} +\begin{verbatim} +import mailbox +for message in mailbox.mbox('~/mbox'): + subject = message['subject'] # Could possibly be None. + if subject and 'python' in subject.lower(): + print subject +\end{verbatim} -All implementations of mailbox objects are iterable objects, and -have one externally visible method. This method is used by iterators -created from mailbox objects and may also be used directly. +A (surprisingly) simple example of copying all mail from a Babyl mailbox to an +MH mailbox, converting all of the format-specific information that can be +converted: -\begin{methoddesc}[mailbox]{next}{} -Return the next message in the mailbox, created with the optional -\var{factory} argument passed into the mailbox object's constructor. -By default this is an \class{rfc822.Message} -object (see the \refmodule{rfc822} module). Depending on the mailbox -implementation the \var{fp} attribute of this object may be a true -file object or a class instance simulating a file object, taking care -of things like message boundaries if multiple mail messages are -contained in a single file, etc. If no more messages are available, -this method returns \code{None}. -\end{methoddesc} +\begin{verbatim} +import mailbox +destination = mailbox.MH('~/Mail') +for message in mailbox.Babyl('~/RMAIL'): + destination.add(MHMessage(message)) +\end{verbatim} + +An example of sorting mail from numerous mailing lists, being careful to avoid +mail corruption due to concurrent modification by other programs, mail loss due +to interruption of the program, or premature termination due to malformed +messages in the mailbox: + +\begin{verbatim} +import mailbox +import email.Errors +list_names = ('python-list', 'python-dev', 'python-bugs') +boxes = dict((name, mailbox.mbox('~/email/%s' % name)) for name in list_names) +inbox = mailbox.Maildir('~/Maildir', None) +for key in inbox.iterkeys(): + try: + message = inbox[key] + except email.Errors.MessageParseError: + continue # The message is malformed. Just leave it. + for name in list_names: + list_id = message['list-id'] + if list_id and name in list_id: + box = boxes[name] + box.lock() + box.add(message) + box.flush() # Write copy to disk before removing original. + box.unlock() + inbox.discard(key) + break # Found destination, so stop looking. +for box in boxes.itervalues(): + box.close() +\end{verbatim} diff --git a/Doc/lib/libmsilib.tex b/Doc/lib/libmsilib.tex new file mode 100644 index 0000000..1e044f4 --- /dev/null +++ b/Doc/lib/libmsilib.tex @@ -0,0 +1,485 @@ +\section{\module{msilib} --- + Read and write Microsoft Installer files} + +\declaremodule{standard}{msilib} + \platform{Windows} +\modulesynopsis{Creation of Microsoft Installer files, and CAB files.} +\moduleauthor{Martin v. L\"owis}{martin@v.loewis.de} +\sectionauthor{Martin v. L\"owis}{martin@v.loewis.de} + +\index{msi} + +\versionadded{2.5} + +The \module{msilib} supports the creation of Microsoft Installer +(\code{.msi}) files. Because these files often contain an embedded +``cabinet'' file (\code{.cab}), it also exposes an API to create +CAB files. Support for reading \code{.cab} files is currently not +implemented; read support for the \code{.msi} database is possible. + +This package aims to provide complete access to all tables in an +\code{.msi} file, therefore, it is a fairly low-level API. Two +primary applications of this package are the \module{distutils} +command \code{bdist_msi}, and the creation of Python installer +package itself (although that currently uses a different version +of \code{msilib}). + +The package contents can be roughly split into four parts: +low-level CAB routines, low-level MSI routines, higher-level +MSI routines, and standard table structures. + +\begin{funcdesc}{FCICreate}{cabname, files} + Create a new CAB file named \var{cabname}. \var{files} must + be a list of tuples, each containing the name of the file on + disk, and the name of the file inside the CAB file. + + The files are added to the CAB file in the order they appear + in the list. All files are added into a single CAB file, + using the MSZIP compression algorithm. + + Callbacks to Python for the various steps of MSI creation + are currently not exposed. +\end{funcdesc} + +\begin{funcdesc}{UUIDCreate}{} + Return the string representation of a new unique identifier. + This wraps the Windows API functions \cfunction{UuidCreate} and + \cfunction{UuidToString}. +\end{funcdesc} + +\begin{funcdesc}{OpenDatabase}{path, persist} + Return a new database object by calling MsiOpenDatabase. + \var{path} is the file name of the + MSI file; \var{persist} can be one of the constants + \code{MSIDBOPEN_CREATEDIRECT}, \code{MSIDBOPEN_CREATE}, + \code{MSIDBOPEN_DIRECT}, \code{MSIDBOPEN_READONLY}, or + \code{MSIDBOPEN_TRANSACT}, and may include the flag + \code{MSIDBOPEN_PATCHFILE}. See the Microsoft documentation for + the meaning of these flags; depending on the flags, + an existing database is opened, or a new one created. +\end{funcdesc} + +\begin{funcdesc}{CreateRecord}{count} + Return a new record object by calling \cfunction{MSICreateRecord}. + \var{count} is the number of fields of the record. +\end{funcdesc} + +\begin{funcdesc}{init_database}{name, schema, ProductName, ProductCode, ProductVersion, Manufacturer} + Create and return a new database \var{name}, initialize it + with \var{schema}, and set the properties \var{ProductName}, + \var{ProductCode}, \var{ProductVersion}, and \var{Manufacturer}. + + \var{schema} must be a module object containing \code{tables} and + \code{_Validation_records} attributes; typically, + \module{msilib.schema} should be used. + + The database will contain just the schema and the validation + records when this function returns. +\end{funcdesc} + +\begin{funcdesc}{add_data}{database, records} + Add all \var{records} to \var{database}. \var{records} should + be a list of tuples, each one containing all fields of a record + according to the schema of the table. For optional fields, + \code{None} can be passed. + + Field values can be int or long numbers, strings, or instances + of the Binary class. +\end{funcdesc} + +\begin{classdesc}{Binary}{filename} + Represents entries in the Binary table; inserting such + an object using \function{add_data} reads the file named + \var{filename} into the table. +\end{classdesc} + +\begin{funcdesc}{add_tables}{database, module} + Add all table content from \var{module} to \var{database}. + \var{module} must contain an attribute \var{tables} + listing all tables for which content should be added, + and one attribute per table that has the actual content. + + This is typically used to install the sequence tables. +\end{funcdesc} + +\begin{funcdesc}{add_stream}{database, name, path} + Add the file \var{path} into the \code{_Stream} table + of \var{database}, with the stream name \var{name}. +\end{funcdesc} + +\begin{funcdesc}{gen_uuid}{} + Return a new UUID, in the format that MSI typically + requires (i.e. in curly braces, and with all hexdigits + in upper-case). +\end{funcdesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/devnotes/winprog/fcicreate.asp]{FCICreateFile}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/rpc/rpc/uuidcreate.asp]{UuidCreate}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/rpc/rpc/uuidtostring.asp]{UuidToString}{} +\end{seealso} + +\subsection{Database Objects\label{database-objects}} + +\begin{methoddesc}{OpenView}{sql} + Return a view object, by calling \cfunction{MSIDatabaseOpenView}. + \var{sql} is the SQL statement to execute. +\end{methoddesc} + +\begin{methoddesc}{Commit}{} + Commit the changes pending in the current transaction, + by calling \cfunction{MSIDatabaseCommit}. +\end{methoddesc} + +\begin{methoddesc}{GetSummaryInformation}{count} + Return a new summary information object, by calling + \cfunction{MsiGetSummaryInformation}. \var{count} is the maximum number of + updated values. +\end{methoddesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msiopenview.asp]{MSIOpenView}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msidatabasecommit.asp]{MSIDatabaseCommit}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msigetsummaryinformation.asp]{MSIGetSummaryInformation}{} +\end{seealso} + +\subsection{View Objects\label{view-objects}} + +\begin{methoddesc}{Execute}{\optional{params=None}} + Execute the SQL query of the view, through \cfunction{MSIViewExecute}. + \var{params} is an optional record describing actual values + of the parameter tokens in the query. +\end{methoddesc} + +\begin{methoddesc}{GetColumnInfo}{kind} + Return a record describing the columns of the view, through + calling \cfunction{MsiViewGetColumnInfo}. \var{kind} can be either + \code{MSICOLINFO_NAMES} or \code{MSICOLINFO_TYPES}. +\end{methoddesc} + +\begin{methoddesc}{Fetch}{} + Return a result record of the query, through calling + \cfunction{MsiViewFetch}. +\end{methoddesc} + +\begin{methoddesc}{Modify}{kind, data} + Modify the view, by calling \cfunction{MsiViewModify}. \var{kind} + can be one of \code{MSIMODIFY_SEEK}, \code{MSIMODIFY_REFRESH}, + \code{MSIMODIFY_INSERT}, \code{MSIMODIFY_UPDATE}, \code{MSIMODIFY_ASSIGN}, + \code{MSIMODIFY_REPLACE}, \code{MSIMODIFY_MERGE}, \code{MSIMODIFY_DELETE}, + \code{MSIMODIFY_INSERT_TEMPORARY}, \code{MSIMODIFY_VALIDATE}, + \code{MSIMODIFY_VALIDATE_NEW}, \code{MSIMODIFY_VALIDATE_FIELD}, or + \code{MSIMODIFY_VALIDATE_DELETE}. + + \var{data} must be a record describing the new data. +\end{methoddesc} + +\begin{methoddesc}{Close}{} + Close the view, through \cfunction{MsiViewClose}. +\end{methoddesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msiviewexecute.asp]{MsiViewExecute}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msiviewgetcolumninfo.asp]{MSIViewGetColumnInfo}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msiviewfetch.asp]{MsiViewFetch}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msiviewmodify.asp]{MsiViewModify}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msiviewclose.asp]{MsiViewClose}{} +\end{seealso} + +\subsection{Summary Information Objects\label{summary-objects}} + +\begin{methoddesc}{GetProperty}{field} + Return a property of the summary, through \cfunction{MsiSummaryInfoGetProperty}. + \var{field} is the name of the property, and can be one of the + constants + \code{PID_CODEPAGE}, \code{PID_TITLE}, \code{PID_SUBJECT}, + \code{PID_AUTHOR}, \code{PID_KEYWORDS}, \code{PID_COMMENTS}, + \code{PID_TEMPLATE}, \code{PID_LASTAUTHOR}, \code{PID_REVNUMBER}, + \code{PID_LASTPRINTED}, \code{PID_CREATE_DTM}, \code{PID_LASTSAVE_DTM}, + \code{PID_PAGECOUNT}, \code{PID_WORDCOUNT}, \code{PID_CHARCOUNT}, + \code{PID_APPNAME}, or \code{PID_SECURITY}. +\end{methoddesc} + +\begin{methoddesc}{GetPropertyCount}{} + Return the number of summary properties, through + \cfunction{MsiSummaryInfoGetPropertyCount}. +\end{methoddesc} + +\begin{methoddesc}{SetProperty}{field, value} + Set a property through \cfunction{MsiSummaryInfoSetProperty}. \var{field} + can have the same values as in \method{GetProperty}, \var{value} + is the new value of the property. Possible value types are integer + and string. +\end{methoddesc} + +\begin{methoddesc}{Persist}{} + Write the modified properties to the summary information stream, + using \cfunction{MsiSummaryInfoPersist}. +\end{methoddesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msisummaryinfogetproperty.asp]{MsiSummaryInfoGetProperty}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msisummaryinfogetpropertycount.asp]{MsiSummaryInfoGetPropertyCount}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msisummaryinfosetproperty.asp]{MsiSummaryInfoSetProperty}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msisummaryinfopersist.asp]{MsiSummaryInfoPersist}{} +\end{seealso} + +\subsection{Record Objects\label{record-objects}} + +\begin{methoddesc}{GetFieldCount}{} + Return the number of fields of the record, through \cfunction{MsiRecordGetFieldCount}. +\end{methoddesc} + +\begin{methoddesc}{SetString}{field, value} + Set \var{field} to \var{value} through \cfunction{MsiRecordSetString}. + \var{field} must be an integer; \var{value} a string. +\end{methoddesc} + +\begin{methoddesc}{SetStream}{field, value} + Set \var{field} to the contents of the file named \var{value}, + through \cfunction{MsiRecordSetStream}. + \var{field} must be an integer; \var{value} a string. +\end{methoddesc} + +\begin{methoddesc}{SetInteger}{field, value} + Set \var{field} to \var{value} through \cfunction{MsiRecordSetInteger}. + Both \var{field} and \var{value} must be an integer. +\end{methoddesc} + +\begin{methoddesc}{ClearData}{} + Set all fields of the record to 0, through \cfunction{MsiRecordClearData}. +\end{methoddesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msirecordgetfieldcount.asp]{MsiRecordGetFieldCount}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msirecordsetstring.asp]{MsiRecordSetString}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msirecordsetstream.asp]{MsiRecordSetStream}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msirecordsetinteger.asp]{MsiRecordSetInteger}{} + \seetitle[http://msdn.microsoft.com/library/default.asp?url=/library/en-us/msi/setup/msirecordclear.asp]{MsiRecordClear}{} +\end{seealso} + +\subsection{Errors\label{msi-errors}} + +All wrappers around MSI functions raise \exception{MsiError}; +the string inside the exception will contain more detail. + +\subsection{CAB Objects\label{cab}} + +\begin{classdesc}{CAB}{name} + The class \class{CAB} represents a CAB file. During MSI construction, + files will be added simultaneously to the \code{Files} table, and + to a CAB file. Then, when all files have been added, the CAB file + can be written, then added to the MSI file. + + \var{name} is the name of the CAB file in the MSI file. +\end{classdesc} + +\begin{methoddesc}[CAB]{append}{full, logical} + Add the file with the pathname \var{full} to the CAB file, + under the name \var{logical}. If there is already a file + named \var{logical}, a new file name is created. + + Return the index of the file in the CAB file, and the + new name of the file inside the CAB file. +\end{methoddesc} + +\begin{methoddesc}[CAB]{append}{database} + Generate a CAB file, add it as a stream to the MSI file, + put it into the \code{Media} table, and remove the generated + file from the disk. +\end{methoddesc} + +\subsection{Directory Objects\label{msi-directory}} + +\begin{classdesc}{Directory}{database, cab, basedir, physical, + logical, default, component, \optional{componentflags}} + Create a new directory in the Directory table. There is a current + component at each point in time for the directory, which is either + explicitly created through \method{start_component}, or implicitly when files + are added for the first time. Files are added into the current + component, and into the cab file. To create a directory, a base + directory object needs to be specified (can be \code{None}), the path to + the physical directory, and a logical directory name. \var{default} + specifies the DefaultDir slot in the directory table. \var{componentflags} + specifies the default flags that new components get. +\end{classdesc} + +\begin{methoddesc}[Directory]{start_component}{\optional{component\optional{, + feature\optional{, flags\optional{, keyfile\optional{, uuid}}}}}} + Add an entry to the Component table, and make this component the + current component for this directory. If no component name is given, the + directory name is used. If no \var{feature} is given, the current feature + is used. If no \var{flags} are given, the directory's default flags are + used. If no \var{keyfile} is given, the KeyPath is left null in the + Component table. +\end{methoddesc} + +\begin{methoddesc}[Directory]{add_file}{file\optional{, src\optional{, + version\optional{, language}}}} + Add a file to the current component of the directory, starting a new + one if there is no current component. By default, the file name + in the source and the file table will be identical. If the \var{src} file + is specified, it is interpreted relative to the current + directory. Optionally, a \var{version} and a \var{language} can be specified for + the entry in the File table. +\end{methoddesc} + +\begin{methoddesc}[Directory]{glob}{pattern\optional{, exclude}} + Add a list of files to the current component as specified in the glob + pattern. Individual files can be excluded in the \var{exclude} list. +\end{methoddesc} + +\begin{methoddesc}[Directory]{remove_pyc}{} + Remove \code{.pyc}/\code{.pyo} files on uninstall. +\end{methoddesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/directory_table.asp]{Directory Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/file_table.asp]{File Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/component_table.asp]{Component Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/featurecomponents_table.asp]{FeatureComponents Table}{} +\end{seealso} + + +\subsection{Features\label{features}} + +\begin{classdesc}{Feature}{database, id, title, desc, display\optional{, + level=1\optional{, parent\optional\{, directory\optional{, + attributes=0}}}} + + Add a new record to the \code{Feature} table, using the values + \var{id}, \var{parent.id}, \var{title}, \var{desc}, \var{display}, + \var{level}, \var{directory}, and \var{attributes}. The resulting + feature object can be passed to the \method{start_component} method + of \class{Directory}. +\end{classdesc} + +\begin{methoddesc}[Feature]{set_current}{} + Make this feature the current feature of \module{msilib}. + New components are automatically added to the default feature, + unless a feature is explicitly specified. +\end{methoddesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/feature_table.asp]{Feature Table}{} +\end{seealso} + +\subsection{GUI classes\label{msi-gui}} + +\module{msilib} provides several classes that wrap the GUI tables in +an MSI database. However, no standard user interface is provided; use +\module{bdist_msi} to create MSI files with a user-interface for +installing Python packages. + +\begin{classdesc}{Control}{dlg, name} + Base class of the dialog controls. \var{dlg} is the dialog object + the control belongs to, and \var{name} is the control's name. +\end{classdesc} + +\begin{methoddesc}[Control]{event}{event, argument\optional{, + condition = ``1''\optional{, ordering}}} + + Make an entry into the \code{ControlEvent} table for this control. +\end{methoddesc} + +\begin{methoddesc}[Control]{mapping}{event, attribute} + Make an entry into the \code{EventMapping} table for this control. +\end{methoddesc} + +\begin{methoddesc}[Control]{condition}{action, condition} + Make an entry into the \code{ControlCondition} table for this control. +\end{methoddesc} + + +\begin{classdesc}{RadioButtonGroup}{dlg, name, property} + Create a radio button control named \var{name}. \var{property} + is the installer property that gets set when a radio button + is selected. +\end{classdesc} + +\begin{methoddesc}[RadioButtonGroup]{add}{name, x, y, width, height, text + \optional{, value}} + Add a radio button named \var{name} to the group, at the + coordinates \var{x}, \var{y}, \var{width}, \var{height}, and + with the label \var{text}. If \var{value} is omitted, it + defaults to \var{name}. +\end{methoddesc} + +\begin{classdesc}{Dialog}{db, name, x, y, w, h, attr, title, first, + default, cancel} + Return a new \class{Dialog} object. An entry in the \code{Dialog} table + is made, with the specified coordinates, dialog attributes, title, + name of the first, default, and cancel controls. +\end{classdesc} + +\begin{methoddesc}[Dialog]{control}{name, type, x, y, width, height, + attributes, property, text, control_next, help} + Return a new \class{Control} object. An entry in the \code{Control} table + is made with the specified parameters. + + This is a generic method; for specific types, specialized methods + are provided. +\end{methoddesc} + + +\begin{methoddesc}[Dialog]{text}{name, x, y, width, height, attributes, text} + Add and return a \code{Text} control. +\end{methoddesc} + +\begin{methoddesc}[Dialog]{bitmap}{name, x, y, width, height, text} + Add and return a \code{Bitmap} control. +\end{methoddesc} + +\begin{methoddesc}[Dialog]{line}{name, x, y, width, height} + Add and return a \code{Line} control. +\end{methoddesc} + +\begin{methoddesc}[Dialog]{pushbutton}{name, x, y, width, height, attributes, + text, next_control} + Add and return a \code{PushButton} control. +\end{methoddesc} + +\begin{methoddesc}[Dialog]{radiogroup}{name, x, y, width, height, + attributes, property, text, next_control} + Add and return a \code{RadioButtonGroup} control. +\end{methoddesc} + +\begin{methoddesc}[Dialog]{checkbox}{name, x, y, width, height, + attributes, property, text, next_control} + Add and return a \code{CheckBox} control. +\end{methoddesc} + +\begin{seealso} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/dialog_table.asp]{Dialog Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/control_table.asp]{Control Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/controls.asp]{Control Types}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/controlcondition_table.asp]{ControlCondition Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/controlevent_table.asp]{ControlEvent Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/eventmapping_table.asp]{EventMapping Table}{} + \seetitle[http://msdn.microsoft.com/library/en-us/msi/setup/radiobutton_table.asp]{RadioButton Table}{} +\end{seealso} + +\subsection{Precomputed tables\label{msi-tables}} + +\module{msilib} provides a few subpackages that contain +only schema and table definitions. Currently, these definitions +are based on MSI version 2.0. + +\begin{datadesc}{schema} + This is the standard MSI schema for MSI 2.0, with the + \var{tables} variable providing a list of table definitions, + and \var{_Validation_records} providing the data for + MSI validation. +\end{datadesc} + +\begin{datadesc}{sequence} + This module contains table contents for the standard sequence + tables: \var{AdminExecuteSequence}, \var{AdminUISequence}, + \var{AdvtExecuteSequence}, \var{InstallExecuteSequence}, and + \var{InstallUISequence}. +\end{datadesc} + +\begin{datadesc}{text} + This module contains definitions for the UIText and ActionText + tables, for the standard installer actions. +\end{datadesc} diff --git a/Doc/lib/liboperator.tex b/Doc/lib/liboperator.tex index 41da9b7..5ba3209 100644 --- a/Doc/lib/liboperator.tex +++ b/Doc/lib/liboperator.tex @@ -320,7 +320,7 @@ and \var{b} sequences. \begin{funcdesc}{irshift}{a, b} \funcline{__irshift__}{a, b} -\code{a = irshift(a, b)} is equivalent to \code{a >}\code{>= b}. +\code{a = irshift(a, b)} is equivalent to \code{a >>= b}. \versionadded{2.5} \end{funcdesc} @@ -499,7 +499,7 @@ symbols in the Python syntax and the functions in the {\code{neg(\var{a})}} \lineiii{Negation (Logical)}{\code{not \var{a}}} {\code{not_(\var{a})}} - \lineiii{Right Shift}{\code{\var{a} >\code{>} \var{b}}} + \lineiii{Right Shift}{\code{\var{a} >> \var{b}}} {\code{rshift(\var{a}, \var{b})}} \lineiii{Sequence Repitition}{\code{\var{seq} * \var{i}}} {\code{repeat(\var{seq}, \var{i})}} diff --git a/Doc/lib/liboptparse.tex b/Doc/lib/liboptparse.tex index 8aca501..ec43e3d 100644 --- a/Doc/lib/liboptparse.tex +++ b/Doc/lib/liboptparse.tex @@ -35,9 +35,9 @@ With these few lines of code, users of your script can now do the \end{verbatim} As it parses the command line, \code{optparse} sets attributes of the -\var{options} object returned by \method{parse{\_}args()} based on user-supplied +\code{options} object returned by \method{parse{\_}args()} based on user-supplied command-line values. When \method{parse{\_}args()} returns from parsing this -command line, \var{options.filename} will be \code{"outfile"} and +command line, \code{options.filename} will be \code{"outfile"} and \code{options.verbose} will be \code{False}. \code{optparse} supports both long and short options, allows short options to be merged together, and allows options to be associated with their arguments in a variety of @@ -100,8 +100,8 @@ options; the traditional \UNIX{} syntax is a hyphen (``-'') followed by a single letter, e.g. \code{"-x"} or \code{"-F"}. Also, traditional \UNIX{} syntax allows multiple options to be merged into a single argument, e.g. \code{"-x -F"} is equivalent to \code{"-xF"}. The GNU project -introduced \code{"{--}"} followed by a series of hyphen-separated words, -e.g. \code{"{--}file"} or \code{"{--}dry-run"}. These are the only two option +introduced \code{"-{}-"} followed by a series of hyphen-separated words, +e.g. \code{"-{}-file"} or \code{"-{}-dry-run"}. These are the only two option syntaxes provided by \module{optparse}. Some other option syntaxes that the world has seen include: @@ -170,7 +170,7 @@ For example, consider this hypothetical command-line: prog -v --report /tmp/report.txt foo bar \end{verbatim} -\code{"-v"} and \code{"{--}report"} are both options. Assuming that +\code{"-v"} and \code{"-{}-report"} are both options. Assuming that \longprogramopt{report} takes one argument, \code{"/tmp/report.txt"} is an option argument. \code{"foo"} and \code{"bar"} are positional arguments. @@ -287,12 +287,12 @@ but that's rarely necessary: by default it uses \code{sys.argv{[}1:]}.) \method{parse{\_}args()} returns two values: \begin{itemize} \item {} -\var{options}, an object containing values for all of your options{---}e.g. if \code{"-{}-file"} takes a single string argument, then -\var{options.file} will be the filename supplied by the user, or +\code{options}, an object containing values for all of your options{---}e.g. if \code{"-{}-file"} takes a single string argument, then +\code{options.file} will be the filename supplied by the user, or \code{None} if the user did not supply that option \item {} -\var{args}, the list of positional arguments leftover after parsing +\code{args}, the list of positional arguments leftover after parsing options \end{itemize} @@ -309,7 +309,7 @@ command line. There is a fixed set of actions hard-coded into \module{optparse} adding new actions is an advanced topic covered in section~\ref{optparse-extending}, Extending \module{optparse}. Most actions tell \module{optparse} to store a value in some variable{---}for example, take a string from the command line and store it in an -attribute of \var{options}. +attribute of \code{options}. If you don't specify an option action, \module{optparse} defaults to \code{store}. @@ -333,8 +333,8 @@ args = ["-f", "foo.txt"] \end{verbatim} When \module{optparse} sees the option string \code{"-f"}, it consumes the next -argument, \code{"foo.txt"}, and stores it in \var{options.filename}. So, -after this call to \method{parse{\_}args()}, \var{options.filename} is +argument, \code{"foo.txt"}, and stores it in \code{options.filename}. So, +after this call to \method{parse{\_}args()}, \code{options.filename} is \code{"foo.txt"}. Some other option types supported by \module{optparse} are \code{int} and \code{float}. @@ -379,7 +379,7 @@ types is covered in section~\ref{optparse-extending}, Extending \module{optparse Flag options{---}set a variable to true or false when a particular option is seen{---}are quite common. \module{optparse} supports them with two separate actions, \code{store{\_}true} and \code{store{\_}false}. For example, you might have a -\var{verbose} flag that is turned on with \code{"-v"} and off with \code{"-q"}: +\code{verbose} flag that is turned on with \code{"-v"} and off with \code{"-q"}: \begin{verbatim} parser.add_option("-v", action="store_true", dest="verbose") parser.add_option("-q", action="store_false", dest="verbose") @@ -421,7 +421,7 @@ want more control. \module{optparse} lets you supply a default value for each destination, which is assigned before the command line is parsed. First, consider the verbose/quiet example. If we want \module{optparse} to set -\var{verbose} to \code{True} unless \code{"-q"} is seen, then we can do this: +\code{verbose} to \code{True} unless \code{"-q"} is seen, then we can do this: \begin{verbatim} parser.add_option("-v", action="store_true", dest="verbose", default=True) parser.add_option("-q", action="store_false", dest="verbose") @@ -441,7 +441,7 @@ parser.add_option("-v", action="store_true", dest="verbose", default=False) parser.add_option("-q", action="store_false", dest="verbose", default=True) \end{verbatim} -Again, the default value for \var{verbose} will be \code{True}: the last +Again, the default value for \code{verbose} will be \code{True}: the last default value supplied for any particular destination is the one that counts. @@ -566,7 +566,7 @@ argument to OptionParser: parser = OptionParser(usage="%prog [-f] [-q]", version="%prog 1.0") \end{verbatim} -Note that \code{"{\%}prog"} is expanded just like it is in \var{usage}. Apart +Note that \code{"{\%}prog"} is expanded just like it is in \code{usage}. Apart from that, \code{version} can contain anything you like. When you supply it, \module{optparse} automatically adds a \code{"-{}-version"} option to your parser. If it encounters this option on the command line, it expands your @@ -580,14 +580,14 @@ foo 1.0 \end{verbatim} -\subsubsection{How \module{optparse} handles errors\label{optparse-how-optik-handles-errors}} +\subsubsection{How \module{optparse} handles errors\label{optparse-how-optparse-handles-errors}} There are two broad classes of errors that \module{optparse} has to worry about: programmer errors and user errors. Programmer errors are usually -erroneous calls to \code{parse.add{\_}option()}, e.g. invalid option strings, +erroneous calls to \code{parser.add{\_}option()}, e.g. invalid option strings, unknown option attributes, missing option attributes, etc. These are dealt with in the usual way: raise an exception (either -\exception{optparse.OptionError} or \exception{TypeError}) and let the program crash. +\code{optparse.OptionError} or \code{TypeError}) and let the program crash. Handling user errors is much more important, since they are guaranteed to happen no matter how stable your code is. \module{optparse} can automatically @@ -659,12 +659,66 @@ def main(): if __name__ == "__main__": main() \end{verbatim} -% $Id: tutorial.txt 415 2004-09-30 02:26:17Z greg $ +% $Id: tutorial.txt 505 2005-07-22 01:52:40Z gward $ \subsection{Reference Guide\label{optparse-reference-guide}} +\subsubsection{Creating the parser\label{optparse-creating-parser}} + +The first step in using \module{optparse} is to create an OptionParser instance: +\begin{verbatim} +parser = OptionParser(...) +\end{verbatim} + +The OptionParser constructor has no required arguments, but a number of +optional keyword arguments. You should always pass them as keyword +arguments, i.e. do not rely on the order in which the arguments are +declared. +\begin{quote} +\begin{description} +\item[\code{usage} (default: \code{"{\%}prog {[}options]"})] +The usage summary to print when your program is run incorrectly or +with a help option. When \module{optparse} prints the usage string, it expands +\code{{\%}prog} to \code{os.path.basename(sys.argv{[}0])} (or to \code{prog} if +you passed that keyword argument). To suppress a usage message, +pass the special value \code{optparse.SUPPRESS{\_}USAGE}. +\item[\code{option{\_}list} (default: \code{{[}]})] +A list of Option objects to populate the parser with. The options +in \code{option{\_}list} are added after any options in +\code{standard{\_}option{\_}list} (a class attribute that may be set by +OptionParser subclasses), but before any version or help options. +Deprecated; use \method{add{\_}option()} after creating the parser instead. +\item[\code{option{\_}class} (default: optparse.Option)] +Class to use when adding options to the parser in \method{add{\_}option()}. +\item[\code{version} (default: \code{None})] +A version string to print when the user supplies a version option. +If you supply a true value for \code{version}, \module{optparse} automatically adds +a version option with the single option string \code{"-{}-version"}. The +substring \code{"{\%}prog"} is expanded the same as for \code{usage}. +\item[\code{conflict{\_}handler} (default: \code{"error"})] +Specifies what to do when options with conflicting option strings +are added to the parser; see section~\ref{optparse-conflicts-between-options}, Conflicts between options. +\item[\code{description} (default: \code{None})] +A paragraph of text giving a brief overview of your program. \module{optparse} +reformats this paragraph to fit the current terminal width and +prints it when the user requests help (after \code{usage}, but before +the list of options). +\item[\code{formatter} (default: a new IndentedHelpFormatter)] +An instance of optparse.HelpFormatter that will be used for +printing help text. \module{optparse} provides two concrete classes for this +purpose: IndentedHelpFormatter and TitledHelpFormatter. +\item[\code{add{\_}help{\_}option} (default: \code{True})] +If true, \module{optparse} will add a help option (with option strings \code{"-h"} +and \code{"-{}-help"}) to the parser. +\item[\code{prog}] +The string to use when expanding \code{"{\%}prog"} in \code{usage} and +\code{version} instead of \code{os.path.basename(sys.argv{[}0])}. +\end{description} +\end{quote} + + \subsubsection{Populating the parser\label{optparse-populating-parser}} There are several ways to populate the parser with options. The @@ -708,38 +762,34 @@ strings, e.g. \programopt{-f} and \longprogramopt{file}. You can specify any number of short or long option strings, but you must specify at least one overall option string. -The canonical way to create an Option instance is by calling -\function{make{\_}option()}, so that is what will be shown here. However, the -most common and convenient way is to use \code{parser.add{\_}option()}. Note -that \function{make{\_}option()} and \code{parser.add{\_}option()} have identical call -signatures: +The canonical way to create an Option instance is with the +\method{add{\_}option()} method of \class{OptionParser}: \begin{verbatim} -make_option(opt_str, ..., attr=value, ...) -parser.add_option(opt_str, ..., attr=value, ...) +parser.add_option(opt_str[, ...], attr=value, ...) \end{verbatim} To define an option with only a short option string: \begin{verbatim} -make_option("-f", attr=value, ...) +parser.add_option("-f", attr=value, ...) \end{verbatim} And to define an option with only a long option string: \begin{verbatim} -make_option("--foo", attr=value, ...) +parser.add_option("--foo", attr=value, ...) \end{verbatim} -The \code{attr=value} keyword arguments define option attributes, -i.e. attributes of the Option object. The most important option -attribute is \member{action}, and it largely determines what other attributes -are relevant or required. If you pass irrelevant option attributes, or -fail to pass required ones, \module{optparse} raises an OptionError exception -explaining your mistake. +The keyword arguments define attributes of the new Option object. The +most important option attribute is \member{action}, and it largely determines +which other attributes are relevant or required. If you pass irrelevant +option attributes, or fail to pass required ones, \module{optparse} raises an +OptionError exception explaining your mistake. -An options's \emph{action} determines what \module{optparse} does when it encounters -this option on the command-line. The actions hard-coded into \module{optparse} are: +An options's \emph{action} determines what \module{optparse} does when it encounters this +option on the command-line. The standard option actions hard-coded into +\module{optparse} are: \begin{description} \item[\code{store}] -store this option's argument {[}default] +store this option's argument (default) \item[\code{store{\_}const}] store a constant value \item[\code{store{\_}true}] @@ -748,6 +798,8 @@ store a true value store a false value \item[\code{append}] append this option's argument to a list +\item[\code{append{\_}const}] +append a constant value to a list \item[\code{count}] increment a counter by one \item[\code{callback}] @@ -762,24 +814,25 @@ action, you may also supply \member{type} and \member{dest} option attributes; s below.) As you can see, most actions involve storing or updating a value -somewhere. \module{optparse} always creates an instance of \code{optparse.Values} -specifically for this purpose; we refer to this instance as \var{options}. -Option arguments (and various other values) are stored as attributes of -this object, according to the \member{dest} (destination) option attribute. +somewhere. \module{optparse} always creates a special object for this, +conventionally called \code{options} (it happens to be an instance of +\code{optparse.Values}). Option arguments (and various other values) are +stored as attributes of this object, according to the \member{dest} +(destination) option attribute. For example, when you call \begin{verbatim} parser.parse_args() \end{verbatim} -one of the first things \module{optparse} does is create the \var{options} object: +one of the first things \module{optparse} does is create the \code{options} object: \begin{verbatim} options = Values() \end{verbatim} If one of the options in this parser is defined with \begin{verbatim} -make_option("-f", "--file", action="store", type="string", dest="filename") +parser.add_option("-f", "--file", action="store", type="string", dest="filename") \end{verbatim} and the command-line being parsed includes any of the following: @@ -790,8 +843,7 @@ and the command-line being parsed includes any of the following: --file foo \end{verbatim} -then \module{optparse}, on seeing the \programopt{-f} or \longprogramopt{file} option, will do the -equivalent of +then \module{optparse}, on seeing this option, will do the equivalent of \begin{verbatim} options.filename = "foo" \end{verbatim} @@ -912,6 +964,13 @@ options.tracks.append(int("4")) \end{verbatim} \item {} +\code{append{\_}const} {[}required: \code{const}; relevant: \member{dest}] + +Like \code{store{\_}const}, but the value \code{const} is appended to \member{dest}; +as with \code{append}, \member{dest} defaults to \code{None}, and an an empty list is +automatically created the first time the option is encountered. + +\item {} \code{count} {[}relevant: \member{dest}] Increment the integer stored at \member{dest}. If no default value is @@ -939,14 +998,9 @@ options.verbosity += 1 \code{callback} {[}required: \code{callback}; relevant: \member{type}, \code{nargs}, \code{callback{\_}args}, \code{callback{\_}kwargs}] -Call the function specified by \code{callback}. The signature of -this function should be +Call the function specified by \code{callback}, which is called as \begin{verbatim} -func(option : Option, - opt : string, - value : any, - parser : OptionParser, - *args, **kwargs) +func(option, opt_str, value, parser, *args, **kwargs) \end{verbatim} See section~\ref{optparse-option-callbacks}, Option Callbacks for more detail. @@ -956,7 +1010,7 @@ See section~\ref{optparse-option-callbacks}, Option Callbacks for more detail. Prints a complete help message for all the options in the current option parser. The help message is constructed from -the \var{usage} string passed to OptionParser's constructor and +the \code{usage} string passed to OptionParser's constructor and the \member{help} string passed to every option. If no \member{help} string is supplied for an option, it will still be @@ -1007,6 +1061,87 @@ constructor. As with \member{help} options, you will rarely create \end{itemize} +\subsubsection{Option attributes\label{optparse-option-attributes}} + +The following option attributes may be passed as keyword arguments +to \code{parser.add{\_}option()}. If you pass an option attribute +that is not relevant to a particular option, or fail to pass a required +option attribute, \module{optparse} raises OptionError. +\begin{itemize} +\item {} +\member{action} (default: \code{"store"}) + +Determines \module{optparse}'s behaviour when this option is seen on the command +line; the available options are documented above. + +\item {} +\member{type} (default: \code{"string"}) + +The argument type expected by this option (e.g., \code{"string"} or +\code{"int"}); the available option types are documented below. + +\item {} +\member{dest} (default: derived from option strings) + +If the option's action implies writing or modifying a value somewhere, +this tells \module{optparse} where to write it: \member{dest} names an attribute of the +\code{options} object that \module{optparse} builds as it parses the command line. + +\item {} +\code{default} (deprecated) + +The value to use for this option's destination if the option is not +seen on the command line. Deprecated; use \code{parser.set{\_}defaults()} +instead. + +\item {} +\code{nargs} (default: 1) + +How many arguments of type \member{type} should be consumed when this +option is seen. If {\textgreater} 1, \module{optparse} will store a tuple of values to +\member{dest}. + +\item {} +\code{const} + +For actions that store a constant value, the constant value to store. + +\item {} +\code{choices} + +For options of type \code{"choice"}, the list of strings the user +may choose from. + +\item {} +\code{callback} + +For options with action \code{"callback"}, the callable to call when this +option is seen. See section~\ref{optparse-option-callbacks}, Option Callbacks for detail on the arguments +passed to \code{callable}. + +\item {} +\code{callback{\_}args}, \code{callback{\_}kwargs} + +Additional positional and keyword arguments to pass to \code{callback} +after the four standard callback arguments. + +\item {} +\member{help} + +Help text to print for this option when listing all available options +after the user supplies a \member{help} option (such as \code{"-{}-help"}). +If no help text is supplied, the option will be listed without help +text. To hide this option, use the special value \code{SUPPRESS{\_}HELP}. + +\item {} +\code{metavar} (default: derived from option strings) + +Stand-in for the option argument(s) to use when printing help text. +See section~\ref{optparse-tutorial}, the tutorial for an example. + +\end{itemize} + + \subsubsection{Standard option types\label{optparse-standard-option-types}} \module{optparse} has six built-in option types: \code{string}, \code{int}, \code{long}, @@ -1017,22 +1152,74 @@ Arguments to string options are not checked or converted in any way: the text on the command line is stored in the destination (or passed to the callback) as-is. -Integer arguments are passed to \code{int()} to convert them to Python -integers. If \code{int()} fails, so will \module{optparse}, although with a more -useful error message. (Internally, \module{optparse} raises -\exception{OptionValueError}; OptionParser catches this exception higher -up and terminates your program with a useful error message.) +Integer arguments (type \code{int} or \code{long}) are parsed as follows: +\begin{quote} +\begin{itemize} +\item {} +if the number starts with \code{0x}, it is parsed as a hexadecimal number + +\item {} +if the number starts with \code{0}, it is parsed as an octal number + +\item {} +if the number starts with \code{0b}, is is parsed as a binary number + +\item {} +otherwise, the number is parsed as a decimal number + +\end{itemize} +\end{quote} + +The conversion is done by calling either \code{int()} or \code{long()} with +the appropriate base (2, 8, 10, or 16). If this fails, so will \module{optparse}, +although with a more useful error message. -Likewise, \code{float} arguments are passed to \code{float()} for conversion, -\code{long} arguments to \code{long()}, and \code{complex} arguments to -\code{complex()}. Apart from that, they are handled identically to integer -arguments. +\code{float} and \code{complex} option arguments are converted directly with +\code{float()} and \code{complex()}, with similar error-handling. \code{choice} options are a subtype of \code{string} options. The \code{choices} option attribute (a sequence of strings) defines the set of allowed -option arguments. \code{optparse.option.check{\_}choice()} compares +option arguments. \code{optparse.check{\_}choice()} compares user-supplied option arguments against this master list and raises -\exception{OptionValueError} if an invalid string is given. +OptionValueError if an invalid string is given. + + +\subsubsection{Parsing arguments\label{optparse-parsing-arguments}} + +The whole point of creating and populating an OptionParser is to call +its \method{parse{\_}args()} method: +\begin{verbatim} +(options, args) = parser.parse_args(args=None, options=None) +\end{verbatim} + +where the input parameters are +\begin{description} +\item[\code{args}] +the list of arguments to process (\code{sys.argv{[}1:]} by default) +\item[\code{options}] +object to store option arguments in (a new instance of +optparse.Values by default) +\end{description} + +and the return values are +\begin{description} +\item[\code{options}] +the same object as was passed in as \code{options}, or the new +optparse.Values instance created by \module{optparse} +\item[\code{args}] +the leftover positional arguments after all options have been +processed +\end{description} + +The most common usage is to supply neither keyword argument. If you +supply a \code{values} object, it will be repeatedly modified with a +\code{setattr()} call for every option argument written to an option +destination, and finally returned by \method{parse{\_}args()}. + +If \method{parse{\_}args()} encounters any errors in the argument list, it calls +the OptionParser's \method{error()} method with an appropriate end-user error +message. This ultimately terminates your process with an exit status of +2 (the traditional \UNIX{} exit status for command-line errors). \subsubsection{Querying and manipulating your option parser\label{optparse-querying-manipulating-option-parser}} @@ -1050,9 +1237,8 @@ Returns the Option instance with the option string \code{opt{\_}str}, or If the OptionParser has an option corresponding to \code{opt{\_}str}, that option is removed. If that option provided any other option strings, all of those option strings become invalid. - If \code{opt{\_}str} does not occur in any option belonging to this -OptionParser, raises \exception{ValueError}. +OptionParser, raises ValueError. \end{description} @@ -1074,20 +1260,20 @@ options. If it finds any, it invokes the current conflict-handling mechanism. You can set the conflict-handling mechanism either in the constructor: \begin{verbatim} -parser = OptionParser(..., conflict_handler="...") +parser = OptionParser(..., conflict_handler=handler) \end{verbatim} or with a separate call: \begin{verbatim} -parser.set_conflict_handler("...") +parser.set_conflict_handler(handler) \end{verbatim} -The available conflict-handling mechanisms are: +The available conflict handlers are: \begin{quote} \begin{description} \item[\code{error} (default)] assume option conflicts are a programming error and raise -\exception{OptionConflictError} +OptionConflictError \item[\code{resolve}] resolve option conflicts intelligently (see below) \end{description} @@ -1131,7 +1317,78 @@ options: -n, --noisy be noisy --dry-run new dry-run option \end{verbatim} -% $Id: reference.txt 415 2004-09-30 02:26:17Z greg $ + + +\subsubsection{Cleanup\label{optparse-cleanup}} + +OptionParser instances have several cyclic references. This should not +be a problem for Python's garbage collector, but you may wish to break +the cyclic references explicitly by calling \code{destroy()} on your +OptionParser once you are done with it. This is particularly useful in +long-running applications where large object graphs are reachable from +your OptionParser. + + +\subsubsection{Other methods\label{optparse-other-methods}} + +OptionParser supports several other public methods: +\begin{itemize} +\item {} +\code{set{\_}usage(usage)} + +Set the usage string according to the rules described above for the +\code{usage} constructor keyword argument. Passing \code{None} sets the +default usage string; use \code{SUPPRESS{\_}USAGE} to suppress a usage +message. + +\item {} +\code{enable{\_}interspersed{\_}args()}, \code{disable{\_}interspersed{\_}args()} + +Enable/disable positional arguments interspersed with options, similar +to GNU getopt (enabled by default). For example, if \code{"-a"} and +\code{"-b"} are both simple options that take no arguments, \module{optparse} +normally accepts this syntax: +\begin{verbatim} +prog -a arg1 -b arg2 +\end{verbatim} + +and treats it as equivalent to +\begin{verbatim} +prog -a -b arg1 arg2 +\end{verbatim} + +To disable this feature, call \code{disable{\_}interspersed{\_}args()}. This +restores traditional \UNIX{} syntax, where option parsing stops with the +first non-option argument. + +\item {} +\code{set{\_}defaults(dest=value, ...)} + +Set default values for several option destinations at once. Using +\method{set{\_}defaults()} is the preferred way to set default values for +options, since multiple options can share the same destination. For +example, if several ``mode'' options all set the same destination, any +one of them can set the default, and the last one wins: +\begin{verbatim} +parser.add_option("--advanced", action="store_const", + dest="mode", const="advanced", + default="novice") # overridden below +parser.add_option("--novice", action="store_const", + dest="mode", const="novice", + default="advanced") # overrides above setting +\end{verbatim} + +To avoid this confusion, use \method{set{\_}defaults()}: +\begin{verbatim} +parser.set_defaults(mode="advanced") +parser.add_option("--advanced", action="store_const", + dest="mode", const="advanced") +parser.add_option("--novice", action="store_const", + dest="mode", const="novice") +\end{verbatim} + +\end{itemize} +% $Id: reference.txt 505 2005-07-22 01:52:40Z gward $ \subsection{Option Callbacks\label{optparse-option-callbacks}} @@ -1234,7 +1491,7 @@ its instance attributes: the current list of leftover arguments, ie. arguments that have been consumed but are neither options nor option arguments. Feel free to modify \code{parser.largs}, e.g. by adding more -arguments to it. (This list will become \var{args}, the second +arguments to it. (This list will become \code{args}, the second return value of \method{parse{\_}args()}.) \item[\code{parser.rargs}] the current list of remaining arguments, ie. with \code{opt{\_}str} and @@ -1260,7 +1517,7 @@ is a dictionary of arbitrary keyword arguments supplied via \subsubsection{Raising errors in a callback\label{optparse-raising-errors-in-callback}} -The callback function should raise \exception{OptionValueError} if there are any +The callback function should raise OptionValueError if there are any problems with the option or its argument(s). \module{optparse} catches this and terminates the program, printing the error message you supply to stderr. Your message should be clear, concise, accurate, and mention diff --git a/Doc/lib/libpdb.tex b/Doc/lib/libpdb.tex index a5b36a6..b252aeb 100644 --- a/Doc/lib/libpdb.tex +++ b/Doc/lib/libpdb.tex @@ -178,12 +178,12 @@ most commands. \item[d(own)] Move the current frame one level down in the stack trace -(to an newer frame). +(to a newer frame). \item[u(p)] Move the current frame one level up in the stack trace -(to a older frame). +(to an older frame). \item[b(reak) \optional{\optional{\var{filename}:}\var{lineno}\code{\Large{|}}\var{function}\optional{, \var{condition}}}] diff --git a/Doc/lib/libposixpath.tex b/Doc/lib/libposixpath.tex index cea963e..9f0de1f 100644 --- a/Doc/lib/libposixpath.tex +++ b/Doc/lib/libposixpath.tex @@ -146,8 +146,9 @@ should detect mount points for all \UNIX{} and \POSIX{} variants. \end{funcdesc} \begin{funcdesc}{join}{path1\optional{, path2\optional{, ...}}} -Joins one or more path components intelligently. If any component is -an absolute path, all previous components are thrown away, and joining +Join one or more path components intelligently. If any component is +an absolute path, all previous components (on Windows, including the +previous drive letter, if there was one) are thrown away, and joining continues. The return value is the concatenation of \var{path1}, and optionally \var{path2}, etc., with exactly one directory separator (\code{os.sep}) inserted between components, unless \var{path2} is diff --git a/Doc/lib/libprofile.tex b/Doc/lib/libprofile.tex index 9ff5ba0..0108b21 100644 --- a/Doc/lib/libprofile.tex +++ b/Doc/lib/libprofile.tex @@ -384,14 +384,15 @@ arguments to supply the globals and locals dictionaries for the \var{command} string. \end{funcdesc} -Analysis of the profiler data is done using this class from the -\module{pstats} module: +Analysis of the profiler data is done using the \class{Stats} class. + +\note{The \class{Stats} class is defined in the \module{pstats} module.} % now switch modules.... % (This \stmodindex use may be hard to change ;-( ) \stmodindex{pstats} -\begin{classdesc}{Stats}{filename\optional{, \moreargs\optional{, stream=sys.stdout}}} +\begin{classdesc}{Stats}{filename\optional{, stream=sys.stdout\optional{, \moreargs}}} This class constructor creates an instance of a ``statistics object'' from a \var{filename} (or set of filenames). \class{Stats} objects are manipulated by methods, in order to print useful reports. You may specify @@ -409,6 +410,8 @@ functions will be coalesced, so that an overall view of several processes can be considered in a single report. If additional files need to be combined with data in an existing \class{Stats} object, the \method{add()} method can be used. + +\versionchanged[The \var{stream} parameter was added]{2.5} \end{classdesc} diff --git a/Doc/lib/librlcompleter.tex b/Doc/lib/librlcompleter.tex index b2a1eba7..cb2ac59 100644 --- a/Doc/lib/librlcompleter.tex +++ b/Doc/lib/librlcompleter.tex @@ -2,18 +2,17 @@ Completion function for GNU readline} \declaremodule{standard}{rlcompleter} - \platform{Unix} \sectionauthor{Moshe Zadka}{moshez@zadka.site.co.il} -\modulesynopsis{Python identifier completion for the GNU readline library.} +\modulesynopsis{Python identifier completion, suitable for the GNU readline library.} -The \module{rlcompleter} module defines a completion function for +The \module{rlcompleter} module defines a completion function suitable for the \refmodule{readline} module by completing valid Python identifiers and keywords. -This module is \UNIX-specific due to its dependence on the -\refmodule{readline} module. - -The \module{rlcompleter} module defines the \class{Completer} class. +When this module is imported on a \UNIX\ platform with the \module{readline} +module available, an instance of the \class{Completer} class is automatically +created and its \method{complete} method is set as the \module{readline} +completer. Example: @@ -44,6 +43,9 @@ else: \end{verbatim} +On platforms without \module{readline}, the \class{Completer} class defined +by this module can still be used for custom purposes. + \subsection{Completer Objects \label{completer-objects}} Completer objects have the following method: diff --git a/Doc/lib/librunpy.tex b/Doc/lib/librunpy.tex index 4be9901..c7a7e51 100644 --- a/Doc/lib/librunpy.tex +++ b/Doc/lib/librunpy.tex @@ -10,7 +10,7 @@ \versionadded{2.5} The \module{runpy} module is used to locate and run Python modules -without importing them first. It's main use is to implement the +without importing them first. Its main use is to implement the \programopt{-m} command line switch that allows scripts to be located using the Python module namespace rather than the filesystem. diff --git a/Doc/lib/libsqlite3.tex b/Doc/lib/libsqlite3.tex new file mode 100644 index 0000000..8c80eb6 --- /dev/null +++ b/Doc/lib/libsqlite3.tex @@ -0,0 +1,503 @@ +\section{\module{sqlite3} --- + DB-API 2.0 interface for SQLite databases} + +\declaremodule{builtin}{sqlite3} +\modulesynopsis{A DB-API 2.0 implementation using SQLite 3.x.} +\sectionauthor{Gerhard Häring}{gh@ghaering.de} +\versionadded{2.5} + +\subsection{Module functions and constants\label{sqlite3-Module-Contents}} + +\begin{datadesc}{PARSE_DECLTYPES} +This constant is meant to be used with the \var{detect_types} parameter of the +\function{connect} function. + +Setting it makes the \module{sqlite3} module parse the declared type for each column it +returns. It will parse out the first word of the declared type, i. e. for +"integer primary key", it will parse out "integer". Then for that column, it +will look into the converters dictionary and use the converter function +registered for that type there. Converter names are case-sensitive! +\end{datadesc} + + +\begin{datadesc}{PARSE_COLNAMES} +This constant is meant to be used with the \var{detect_types} parameter of the +\function{connect} function. + +Setting this makes the SQLite interface parse the column name for each column +it returns. It will look for a string formed [mytype] in there, and then +decide that 'mytype' is the type of the column. It will try to find an entry of +'mytype' in the converters dictionary and then use the converter function found +there to return the value. The column name found in \member{cursor.description} is only +the first word of the column name, i. e. if you use something like +\code{'as "x [datetime]"'} in your SQL, then we will parse out everything until the +first blank for the column name: the column name would simply be "x". +\end{datadesc} + +\begin{funcdesc}{connect}{database\optional{, timeout, isolation_level, detect_types, factory}} +Opens a connection to the SQLite database file \var{database}. You can use +\code{":memory:"} to open a database connection to a database that resides in +RAM instead of on disk. + +When a database is accessed by multiple connections, and one of the processes +modifies the database, the SQLite database is locked until that transaction is +committed. The \var{timeout} parameter specifies how long the connection should +wait for the lock to go away until raising an exception. The default for the +timeout parameter is 5.0 (five seconds). + +For the \var{isolation_level} parameter, please see \member{isolation_level} +\ref{sqlite3-Connection-IsolationLevel} property of \class{Connection} objects. + +SQLite natively supports only the types TEXT, INTEGER, FLOAT, BLOB and NULL. If +you want to use other types, like you have to add support for them yourself. +The \var{detect_types} parameter and the using custom \strong{converters} registered with +the module-level \function{register_converter} function allow you to easily do that. + +\var{detect_types} defaults to 0 (i. e. off, no type detection), you can set it +to any combination of \constant{PARSE_DECLTYPES} and \constant{PARSE_COLNAMES} to turn type +detection on. + +By default, the \module{sqlite3} module uses its \class{Connection} class for the +connect call. You can, however, subclass the \class{Connection} class and make +\function{connect} use your class instead by providing your class for the +\var{factory} parameter. + +Consult the section \ref{sqlite3-Types} of this manual for details. + +The \module{sqlite3} module internally uses a statement cache to avoid SQL parsing +overhead. If you want to explicitly set the number of statements that are +cached for the connection, you can set the \var{cached_statements} parameter. +The currently implemented default is to cache 100 statements. +\end{funcdesc} + +\begin{funcdesc}{register_converter}{typename, callable} +Registers a callable to convert a bytestring from the database into a custom +Python type. The callable will be invoked for all database values that are of +the type \var{typename}. Confer the parameter \var{detect_types} of the +\function{connect} function for how the type detection works. Note that the case of +\var{typename} and the name of the type in your query must match! +\end{funcdesc} + +\begin{funcdesc}{register_adapter}{type, callable} +Registers a callable to convert the custom Python type \var{type} into one of +SQLite's supported types. The callable \var{callable} accepts as single +parameter the Python value, and must return a value of the following types: +int, long, float, str (UTF-8 encoded), unicode or buffer. +\end{funcdesc} + +\begin{funcdesc}{complete_statement}{sql} +Returns \constant{True} if the string \var{sql} one or more complete SQL +statements terminated by semicolons. It does not verify if the SQL is +syntactically correct, only if there are no unclosed string literals and if the +statement is terminated by a semicolon. + +This can be used to build a shell for SQLite, like in the following example: + + \verbatiminput{sqlite3/complete_statement.py} +\end{funcdesc} + +\subsection{Connection Objects \label{sqlite3-Connection-Objects}} + +A \class{Connection} instance has the following attributes and methods: + +\label{sqlite3-Connection-IsolationLevel} +\begin{memberdesc}{isolation_level} + Get or set the current isolation level. None for autocommit mode or one of + "DEFERRED", "IMMEDIATE" or "EXLUSIVE". See Controlling Transactions + \ref{sqlite3-Controlling-Transactions} for a more detailed explanation. +\end{memberdesc} + +\begin{methoddesc}{cursor}{\optional{cursorClass}} + The cursor method accepts a single optional parameter \var{cursorClass}. + This is a custom cursor class which must extend \class{sqlite3.Cursor}. +\end{methoddesc} + +\begin{methoddesc}{execute}{sql, \optional{parameters}} +This is a nonstandard shortcut that creates an intermediate cursor object by +calling the cursor method, then calls the cursor's \method{execute} method with the +parameters given. +\end{methoddesc} + +\begin{methoddesc}{executemany}{sql, \optional{parameters}} +This is a nonstandard shortcut that creates an intermediate cursor object by +calling the cursor method, then calls the cursor's \method{executemany} method with the +parameters given. +\end{methoddesc} + +\begin{methoddesc}{executescript}{sql_script} +This is a nonstandard shortcut that creates an intermediate cursor object by +calling the cursor method, then calls the cursor's \method{executescript} method with the +parameters given. +\end{methoddesc} + +\begin{methoddesc}{create_function}{name, num_params, func} + +Creates a user-defined function that you can later use from within SQL +statements under the function name \var{name}. \var{num_params} is the number +of parameters the function accepts, and \var{func} is a Python callable that is +called as SQL function. + +The function can return any of the types supported by SQLite: unicode, str, +int, long, float, buffer and None. Exceptions in the function are ignored and +they are handled as if the function returned None. + +Example: + + \verbatiminput{sqlite3/md5func.py} +\end{methoddesc} + +\begin{methoddesc}{create_aggregate}{name, num_params, aggregate_class} + +Creates a user-defined aggregate function. + +The aggregate class must implement a \code{step} method, which accepts the +number of parameters \var{num_params}, and a \code{finalize} method which +will return the final result of the aggregate. + +The \code{finalize} method can return any of the types supported by SQLite: +unicode, str, int, long, float, buffer and None. Any exceptions are ignored. + +Example: + + \verbatiminput{sqlite3/mysumaggr.py} +\end{methoddesc} + +\begin{methoddesc}{create_collation}{name, callable} + +Creates a collation with the specified \var{name} and \var{callable}. The +callable will be passed two string arguments. It should return -1 if the first +is ordered lower than the second, 0 if they are ordered equal and 1 and if the +first is ordered higher than the second. Note that this controls sorting +(ORDER BY in SQL) so your comparisons don't affect other SQL operations. + +Note that the callable will get its parameters as Python bytestrings, which +will normally be encoded in UTF-8. + +The following example shows a custom collation that sorts "the wrong way": + + \verbatiminput{sqlite3/collation_reverse.py} + +To remove a collation, call \code{create_collation} with None as callable: + +\begin{verbatim} + con.create_collation("reverse", None) +\end{verbatim} +\end{methoddesc} + + +\begin{memberdesc}{row_factory} + You can change this attribute to a callable that accepts the cursor and + the original row as tuple and will return the real result row. This + way, you can implement more advanced ways of returning results, like + ones that can also access columns by name. + + Example: + + \verbatiminput{sqlite3/row_factory.py} + + If the standard tuple types don't suffice for you, and you want name-based + access to columns, you should consider setting \member{row_factory} to the + highly-optimized sqlite3.Row type. It provides both + index-based and case-insensitive name-based access to columns with almost + no memory overhead. Much better than your own custom dictionary-based + approach or even a db_row based solution. +\end{memberdesc} + +\begin{memberdesc}{text_factory} + Using this attribute you can control what objects are returned for the + TEXT data type. By default, this attribute is set to \class{unicode} and + the \module{sqlite3} module will return Unicode objects for TEXT. If you want to return + bytestrings instead, you can set it to \class{str}. + + For efficiency reasons, there's also a way to return Unicode objects only + for non-ASCII data, and bytestrings otherwise. To activate it, set this + attribute to \constant{sqlite3.OptimizedUnicode}. + + You can also set it to any other callable that accepts a single bytestring + parameter and returns the result object. + + See the following example code for illustration: + + \verbatiminput{sqlite3/text_factory.py} +\end{memberdesc} + +\begin{memberdesc}{total_changes} + Returns the total number of database rows that have be modified, inserted, + or deleted since the database connection was opened. +\end{memberdesc} + + + + + +\subsection{Cursor Objects \label{sqlite3-Cursor-Objects}} + +A \class{Cursor} instance has the following attributes and methods: + +\begin{methoddesc}{execute}{sql, \optional{parameters}} + +Executes a SQL statement. The SQL statement may be parametrized (i. e. +placeholders instead of SQL literals). The \module{sqlite3} module supports two kinds of +placeholders: question marks (qmark style) and named placeholders (named +style). + +This example shows how to use parameters with qmark style: + + \verbatiminput{sqlite3/execute_1.py} + +This example shows how to use the named style: + + \verbatiminput{sqlite3/execute_2.py} + + \method{execute} will only execute a single SQL statement. If you try to + execute more than one statement with it, it will raise a Warning. Use + \method{executescript} if want to execute multiple SQL statements with one + call. +\end{methoddesc} + + +\begin{methoddesc}{executemany}{sql, seq_of_parameters} +Executes a SQL command against all parameter sequences or mappings found in the +sequence \var{sql}. The \module{sqlite3} module also allows +to use an iterator yielding parameters instead of a sequence. + +\verbatiminput{sqlite3/executemany_1.py} + +Here's a shorter example using a generator: + +\verbatiminput{sqlite3/executemany_2.py} +\end{methoddesc} + +\begin{methoddesc}{executescript}{sql_script} + +This is a nonstandard convenience method for executing multiple SQL statements +at once. It issues a COMMIT statement before, then executes the SQL script it +gets as a parameter. + +\var{sql_script} can be a bytestring or a Unicode string. + +Example: + +\verbatiminput{sqlite3/executescript.py} +\end{methoddesc} + +\begin{memberdesc}{rowcount} + Although the \class{Cursor} class of the \module{sqlite3} module implements this + attribute, the database engine's own support for the determination of "rows + affected"/"rows selected" is quirky. + + For \code{SELECT} statements, \member{rowcount} is always None because we cannot + determine the number of rows a query produced until all rows were fetched. + + For \code{DELETE} statements, SQLite reports \member{rowcount} as 0 if you make a + \code{DELETE FROM table} without any condition. + + For \method{executemany} statements, the number of modifications are summed + up into \member{rowcount}. + + As required by the Python DB API Spec, the \member{rowcount} attribute "is -1 + in case no executeXX() has been performed on the cursor or the rowcount + of the last operation is not determinable by the interface". +\end{memberdesc} + +\subsection{SQLite and Python types\label{sqlite3-Types}} + +\subsubsection{Introduction} + +SQLite natively supports the following types: NULL, INTEGER, REAL, TEXT, BLOB. + +The following Python types can thus be sent to SQLite without any problem: + +\begin{tableii} {c|l}{code}{Python type}{SQLite type} +\lineii{None}{NULL} +\lineii{int}{INTEGER} +\lineii{long}{INTEGER} +\lineii{float}{REAL} +\lineii{str (UTF8-encoded)}{TEXT} +\lineii{unicode}{TEXT} +\lineii{buffer}{BLOB} +\end{tableii} + +This is how SQLite types are converted to Python types by default: + +\begin{tableii} {c|l}{code}{SQLite type}{Python type} +\lineii{NULL}{None} +\lineii{INTEGER}{int or long, depending on size} +\lineii{REAL}{float} +\lineii{TEXT}{depends on text_factory, unicode by default} +\lineii{BLOB}{buffer} +\end{tableii} + +The type system of the \module{sqlite3} module is extensible in both ways: you can store +additional Python types in a SQLite database via object adaptation, and you can +let the \module{sqlite3} module convert SQLite types to different Python types via +converters. + +\subsubsection{Using adapters to store additional Python types in SQLite databases} + +Like described before, SQLite supports only a limited set of types natively. To +use other Python types with SQLite, you must \strong{adapt} them to one of the sqlite3 +module's supported types for SQLite. So, one of NoneType, int, long, float, +str, unicode, buffer. + +The \module{sqlite3} module uses the Python object adaptation, like described in PEP 246 +for this. The protocol to use is \class{PrepareProtocol}. + +There are two ways to enable the \module{sqlite3} module to adapt a custom Python type +to one of the supported ones. + +\paragraph{Letting your object adapt itself} + +This is a good approach if you write the class yourself. Let's suppose you have +a class like this: + +\begin{verbatim} +class Point(object): + def __init__(self, x, y): + self.x, self.y = x, y +\end{verbatim} + +Now you want to store the point in a single SQLite column. You'll have to +choose one of the supported types first that you use to represent the point in. +Let's just use str and separate the coordinates using a semicolon. Then you +need to give your class a method \code{__conform__(self, protocol)} which must +return the converted value. The parameter \var{protocol} will be +\class{PrepareProtocol}. + +\verbatiminput{sqlite3/adapter_point_1.py} + +\paragraph{Registering an adapter callable} + +The other possibility is to create a function that converts the type to the +string representation and register the function with \method{register_adapter}. + + \verbatiminput{sqlite3/adapter_point_2.py} + +\begin{notice} +The type/class to adapt must be a new-style class, i. e. it must have +\class{object} as one of its bases. +\end{notice} + +The \module{sqlite3} module has two default adapters for Python's builtin +\class{datetime.date} and \class{datetime.datetime} types. Now let's suppose we +want to store \class{datetime.datetime} objects not in ISO representation, but +as Unix timestamp. + + \verbatiminput{sqlite3/adapter_datetime.py} + +\subsubsection{Converting SQLite values to custom Python types} + +Now that's all nice and dandy that you can send custom Python types to SQLite. +But to make it really useful we need to make the Python to SQLite to Python +roundtrip work. + +Enter converters. + +Let's go back to the Point class. We stored the x and y coordinates separated +via semicolons as strings in SQLite. + +Let's first define a converter function that accepts the string as a parameter and constructs a Point object from it. + +\begin{notice} +Converter functions \strong{always} get called with a string, no matter +under which data type you sent the value to SQLite. +\end{notice} + +\begin{notice} +Converter names are looked up in a case-sensitive manner. +\end{notice} + + +\begin{verbatim} + def convert_point(s): + x, y = map(float, s.split(";")) + return Point(x, y) +\end{verbatim} + +Now you need to make the \module{sqlite3} module know that what you select from the +database is actually a point. There are two ways of doing this: + +\begin{itemize} + \item Implicitly via the declared type + \item Explicitly via the column name +\end{itemize} + +Both ways are described at \ref{sqlite3-Module-Contents} in the text explaining +the constants \constant{PARSE_DECLTYPES} and \constant{PARSE_COlNAMES}. + + +The following example illustrates both ways. + + \verbatiminput{sqlite3/converter_point.py} + +\subsubsection{Default adapters and converters} + +There are default adapters for the date and datetime types in the datetime +module. They will be sent as ISO dates/ISO timestamps to SQLite. + +The default converters are registered under the name "date" for datetime.date +and under the name "timestamp" for datetime.datetime. + +This way, you can use date/timestamps from Python without any additional +fiddling in most cases. The format of the adapters is also compatible with the +experimental SQLite date/time functions. + +The following example demonstrates this. + + \verbatiminput{sqlite3/pysqlite_datetime.py} + +\subsection{Controlling Transactions \label{sqlite3-Controlling-Transactions}} + +By default, the \module{sqlite3} module opens transactions implicitly before a DML +statement (INSERT/UPDATE/DELETE/REPLACE), and commits transactions implicitly +before a non-DML, non-DQL statement (i. e. anything other than +SELECT/INSERT/UPDATE/DELETE/REPLACE). + +So if you are within a transaction, and issue a command like \code{CREATE TABLE +...}, \code{VACUUM}, \code{PRAGMA}, the \module{sqlite3} module will commit implicitly +before executing that command. There are two reasons for doing that. The first +is that some of these commands don't work within transactions. The other reason +is that pysqlite needs to keep track of the transaction state (if a transaction +is active or not). + +You can control which kind of "BEGIN" statements pysqlite implicitly executes +(or none at all) via the \var{isolation_level} parameter to the +\function{connect} call, or via the \member{isolation_level} property of +connections. + +If you want \strong{autocommit mode}, then set \member{isolation_level} to None. + +Otherwise leave it at it's default, which will result in a plain "BEGIN" +statement, or set it to one of SQLite's supported isolation levels: DEFERRED, +IMMEDIATE or EXCLUSIVE. + +As the \module{sqlite3} module needs to keep track of the transaction state, you should +not use \code{OR ROLLBACK} or \code{ON CONFLICT ROLLBACK} in your SQL. Instead, +catch the \exception{IntegrityError} and call the \method{rollback} method of +the connection yourself. + +\subsection{Using pysqlite efficiently} + +\subsubsection{Using shortcut methods} + +Using the nonstandard \method{execute}, \method{executemany} and +\method{executescript} methods of the \class{Connection} object, your code can +be written more concisely, because you don't have to create the - often +superfluous \class{Cursor} objects explicitly. Instead, the \class{Cursor} +objects are created implicitly and these shortcut methods return the cursor +objects. This way, you can for example execute a SELECT statement and iterate +over it directly using only a single call on the \class{Connection} object. + + \verbatiminput{sqlite3/shortcut_methods.py} + +\subsubsection{Accessing columns by name instead of by index} + +One cool feature of the \module{sqlite3} module is the builtin \class{sqlite3.Row} class +designed to be used as a row factory. + +Rows wrapped with this class can be accessed both by index (like tuples) and +case-insensitively by name: + + \verbatiminput{sqlite3/rowclass.py} + + diff --git a/Doc/lib/libstdtypes.tex b/Doc/lib/libstdtypes.tex index 8d011fd..f44360b 100644 --- a/Doc/lib/libstdtypes.tex +++ b/Doc/lib/libstdtypes.tex @@ -1,12 +1,11 @@ \section{Built-in Types \label{types}} The following sections describe the standard types that are built into -the interpreter. Historically, Python's built-in types have differed -from user-defined types because it was not possible to use the built-in -types as the basis for object-oriented inheritance. With the 2.2 -release this situation has started to change, although the intended -unification of user-defined and built-in types is as yet far from -complete. +the interpreter. +\note{Historically (until release 2.2), Python's built-in types have +differed from user-defined types because it was not possible to use +the built-in types as the basis for object-oriented inheritance. +This limitation does not exist any longer.} The principal built-in types are numerics, sequences, mappings, files classes, instances and exceptions. @@ -19,7 +18,7 @@ the equivalent \function{repr()} function, or the slightly different \function{str()} function). The latter function is implicitly used when an object is written by the \keyword{print}\stindex{print} statement. -(Information on \ulink{\keyword{print} statement}{../ref/print.html} +(Information on the \ulink{\keyword{print} statement}{../ref/print.html} and other language statements can be found in the \citetitle[../ref/ref.html]{Python Reference Manual} and the \citetitle[../tut/tut.html]{Python Tutorial}.) @@ -728,6 +727,15 @@ a prefix; rather, all combinations of its values are stripped: \versionchanged[Support for the \var{chars} argument]{2.2.2} \end{methoddesc} +\begin{methoddesc}[string]{partition}{sep} +Split the string at the first occurrence of \var{sep}, and return +a 3-tuple containing the part before the separator, the separator +itself, and the part after the separator. If the separator is not +found, return a 3-tuple containing the string itself, followed by +two empty strings. +\versionadded{2.5} +\end{methoddesc} + \begin{methoddesc}[string]{replace}{old, new\optional{, count}} Return a copy of the string with all occurrences of substring \var{old} replaced by \var{new}. If the optional argument @@ -755,6 +763,15 @@ The original string is returned if \versionchanged[Support for the \var{fillchar} argument]{2.4} \end{methoddesc} +\begin{methoddesc}[string]{rpartition}{sep} +Split the string at the last occurrence of \var{sep}, and return +a 3-tuple containing the part before the separator, the separator +itself, and the part after the separator. If the separator is not +found, return a 3-tuple containing the string itself, followed by +two empty strings. +\versionadded{2.5} +\end{methoddesc} + \begin{methoddesc}[string]{rsplit}{\optional{sep \optional{,maxsplit}}} Return a list of the words in the string, using \var{sep} as the delimiter string. If \var{maxsplit} is given, at most \var{maxsplit} @@ -971,20 +988,22 @@ The conversion types are: \lineiii{u}{Unsigned decimal.}{} \lineiii{x}{Unsigned hexadecimal (lowercase).}{(2)} \lineiii{X}{Unsigned hexadecimal (uppercase).}{(2)} - \lineiii{e}{Floating point exponential format (lowercase).}{} - \lineiii{E}{Floating point exponential format (uppercase).}{} - \lineiii{f}{Floating point decimal format.}{} - \lineiii{F}{Floating point decimal format.}{} - \lineiii{g}{Same as \character{e} if exponent is greater than -4 or - less than precision, \character{f} otherwise.}{} - \lineiii{G}{Same as \character{E} if exponent is greater than -4 or - less than precision, \character{F} otherwise.}{} + \lineiii{e}{Floating point exponential format (lowercase).}{(3)} + \lineiii{E}{Floating point exponential format (uppercase).}{(3)} + \lineiii{f}{Floating point decimal format.}{(3)} + \lineiii{F}{Floating point decimal format.}{(3)} + \lineiii{g}{Floating point format. Uses exponential format + if exponent is greater than -4 or less than precision, + decimal format otherwise.}{(4)} + \lineiii{G}{Floating point format. Uses exponential format + if exponent is greater than -4 or less than precision, + decimal format otherwise.}{(4)} \lineiii{c}{Single character (accepts integer or single character string).}{} \lineiii{r}{String (converts any python object using - \function{repr()}).}{(3)} + \function{repr()}).}{(5)} \lineiii{s}{String (converts any python object using - \function{str()}).}{(4)} + \function{str()}).}{(6)} \lineiii{\%}{No argument is converted, results in a \character{\%} character in the result.}{} \end{tableiii} @@ -1004,10 +1023,27 @@ Notes: formatting of the number if the leading character of the result is not already a zero. \item[(3)] - The \code{\%r} conversion was added in Python 2.0. + The alternate form causes the result to always contain a decimal + point, even if no digits follow it. + + The precision determines the number of digits after the decimal + point and defaults to 6. \item[(4)] + The alternate form causes the result to always contain a decimal + point, and trailing zeroes are not removed as they would + otherwise be. + + The precision determines the number of significant digits before + and after the decimal point and defaults to 6. + \item[(5)] + The \code{\%r} conversion was added in Python 2.0. + + The precision determines the maximal number of characters used. + \item[(6)] If the object or format provided is a \class{unicode} string, the resulting string will also be \class{unicode}. + + The precision determines the maximal number of characters used. \end{description} % XXX Examples? @@ -1747,6 +1783,87 @@ implemented in C will have to provide a writable \end{memberdesc} +\subsection{Context Manager Types \label{typecontextmanager}} + +\versionadded{2.5} +\index{context manager} +\index{context management protocol} +\index{protocol!context management} + +Python's \keyword{with} statement supports the concept of a runtime +context defined by a context manager. This is implemented using +two separate methods that allow user-defined classes to define +a runtime context that is entered before the statement body is +executed and exited when the statement ends. + +The \dfn{context management protocol} consists of a pair of +methods that need to be provided for a context manager object to +define a runtime context: + +\begin{methoddesc}[context manager]{__enter__}{} + Enter the runtime context and return either this object or another + object related to the runtime context. The value returned by this + method is bound to the identifier in the \keyword{as} clause of + \keyword{with} statements using this context manager. + + An example of a context manager that returns itself is a file object. + File objects return themselves from __enter__() to allow + \function{open()} to be used as the context expression in a + \keyword{with} statement. + + An example of a context manager that returns a related + object is the one returned by \code{decimal.Context.get_manager()}. + These managers set the active decimal context to a copy of the + original decimal context and then return the copy. This allows + changes to be made to the current decimal context in the body of + the \keyword{with} statement without affecting code outside + the \keyword{with} statement. +\end{methoddesc} + +\begin{methoddesc}[context manager]{__exit__}{exc_type, exc_val, exc_tb} + Exit the runtime context and return a Boolean flag indicating if any + expection that occurred should be suppressed. If an exception + occurred while executing the body of the \keyword{with} statement, the + arguments contain the exception type, value and traceback information. + Otherwise, all three arguments are \var{None}. + + Returning a true value from this method will cause the \keyword{with} + statement to suppress the exception and continue execution with the + statement immediately following the \keyword{with} statement. Otherwise + the exception continues propagating after this method has finished + executing. Exceptions that occur during execution of this method will + replace any exception that occurred in the body of the \keyword{with} + statement. + + The exception passed in should never be reraised explicitly - instead, + this method should return a false value to indicate that the method + completed successfully and does not want to suppress the raised + exception. This allows context management code (such as + \code{contextlib.nested}) to easily detect whether or not an + \method{__exit__()} method has actually failed. +\end{methoddesc} + +Python defines several context managers to support easy thread +synchronisation, prompt closure of files or other objects, and +simpler manipulation of the active decimal arithmetic +context. The specific types are not treated specially beyond +their implementation of the context management protocol. + +Python's generators and the \code{contextlib.contextfactory} decorator +provide a convenient way to implement these protocols. If a generator +function is decorated with the \code{contextlib.contextfactory} +decorator, it will return a context manager implementing the necessary +\method{__enter__()} and \method{__exit__()} methods, rather than the +iterator produced by an undecorated generator function. + +Note that there is no specific slot for any of these methods in the +type structure for Python objects in the Python/C API. Extension +types wanting to define these methods must provide them as a normal +Python accessible method. Compared to the overhead of setting up the +runtime context, the overhead of a single class dictionary lookup +is negligible. + + \subsection{Other Built-in Types \label{typesother}} The interpreter supports several other kinds of objects. diff --git a/Doc/lib/libsubprocess.tex b/Doc/lib/libsubprocess.tex index 4417797..bde92eb 100644 --- a/Doc/lib/libsubprocess.tex +++ b/Doc/lib/libsubprocess.tex @@ -70,10 +70,10 @@ value for \var{bufsize} is \constant{0} (unbuffered). The \var{executable} argument specifies the program to execute. It is very seldom needed: Usually, the program to execute is defined by the -\var{args} argument. If \var{shell=True}, the \var{executable} +\var{args} argument. If \code{shell=True}, the \var{executable} argument specifies which shell to use. On \UNIX{}, the default shell -is /bin/sh. On Windows, the default shell is specified by the COMSPEC -environment variable. +is \file{/bin/sh}. On Windows, the default shell is specified by the +\envvar{COMSPEC} environment variable. \var{stdin}, \var{stdout} and \var{stderr} specify the executed programs' standard input, standard output and standard error file @@ -88,16 +88,19 @@ handle as for stdout. If \var{preexec_fn} is set to a callable object, this object will be called in the child process just before the child is executed. +(\UNIX{} only) If \var{close_fds} is true, all file descriptors except \constant{0}, \constant{1} and \constant{2} will be closed before the child process is -executed. +executed. (\UNIX{} only) If \var{shell} is \constant{True}, the specified command will be executed through the shell. -If \var{cwd} is not \code{None}, the current directory will be changed -to cwd before the child is executed. +If \var{cwd} is not \code{None}, the child's current directory will be +changed to \var{cwd} before it is executed. Note that this directory +is not considered when searching the executable, so you can't specify +the program's path relative to \var{cwd}. If \var{env} is not \code{None}, it defines the environment variables for the new process. diff --git a/Doc/lib/libsys.tex b/Doc/lib/libsys.tex index 1a57da4..6b5b755 100644 --- a/Doc/lib/libsys.tex +++ b/Doc/lib/libsys.tex @@ -410,7 +410,7 @@ else: Strings specifying the primary and secondary prompt of the interpreter. These are only defined if the interpreter is in interactive mode. Their initial values in this case are - \code{'>\code{>}> '} and \code{'... '}. If a non-string object is + \code{'>>>~'} and \code{'...~'}. If a non-string object is assigned to either variable, its \function{str()} is re-evaluated each time the interpreter prepares to read a new interactive command; this can be used to implement a dynamic prompt. diff --git a/Doc/lib/libtarfile.tex b/Doc/lib/libtarfile.tex index f705e8d..ca6e65a 100644 --- a/Doc/lib/libtarfile.tex +++ b/Doc/lib/libtarfile.tex @@ -128,7 +128,7 @@ Some facts and figures: \seemodule{zipfile}{Documentation of the \refmodule{zipfile} standard module.} - \seetitle[http://www.gnu.org/software/tar/manual/html_chapter/tar_8.html\#SEC134] + \seetitle[http://www.gnu.org/software/tar/manual/html_node/tar_134.html\#SEC134] {GNU tar manual, Basic Tar Format}{Documentation for tar archive files, including GNU tar extensions.} \end{seealso} @@ -334,8 +334,12 @@ the file's data itself. Create and return a \class{TarInfo} object from a string buffer. \end{methoddesc} -\begin{methoddesc}{tobuf}{} +\begin{methoddesc}{tobuf}{posix} Create a string buffer from a \class{TarInfo} object. + See \class{TarFile}'s \member{posix} attribute for information + on the \var{posix} argument. It defaults to \constant{False}. + + \versionadded[The \var{posix} parameter]{2.5} \end{methoddesc} A \code{TarInfo} object has the following public data attributes: diff --git a/Doc/lib/libthread.tex b/Doc/lib/libthread.tex index 9e0c202..9573ab3 100644 --- a/Doc/lib/libthread.tex +++ b/Doc/lib/libthread.tex @@ -44,8 +44,8 @@ then the thread exits (but other threads continue to run). \end{funcdesc} \begin{funcdesc}{interrupt_main}{} -Raise a KeyboardInterrupt in the main thread. A subthread can use this -function to interrupt the main thread. +Raise a \exception{KeyboardInterrupt} exception in the main thread. A subthread +can use this function to interrupt the main thread. \versionadded{2.3} \end{funcdesc} diff --git a/Doc/lib/libtokenize.tex b/Doc/lib/libtokenize.tex index cdbb4b8..8c9ad3e 100644 --- a/Doc/lib/libtokenize.tex +++ b/Doc/lib/libtokenize.tex @@ -47,7 +47,7 @@ An older entry point is retained for backward compatibility: call to the function should return one line of input as a string. Alternately, \var{readline} may be a callable object that signals completion by raising \exception{StopIteration}. - \versionchanged[Added StopIteration support]{2.5} + \versionchanged[Added \exception{StopIteration} support]{2.5} The second parameter, \var{tokeneater}, must also be a callable object. It is called once for each token, with five arguments, diff --git a/Doc/lib/libtrace.tex b/Doc/lib/libtrace.tex new file mode 100644 index 0000000..2465aac --- /dev/null +++ b/Doc/lib/libtrace.tex @@ -0,0 +1,125 @@ +\section{\module{trace} --- + Trace or track Python statement execution} + +\declaremodule{standard}{trace} +\modulesynopsis{Trace or track Python statement execution.} + +The \module{trace} module allows you to trace program execution, generate +annotated statement coverage listings, print caller/callee relationships and +list functions executed during a program run. It can be used in another +program or from the command line. + +\subsection{Command Line Usage\label{trace-cli}} + +The \module{trace} module can be invoked from the command line. It can be +as simple as + +\begin{verbatim} +python -m trace --count somefile.py ... +\end{verbatim} + +The above will generate annotated listings of all Python modules imported +during the execution of \file{somefile.py}. + +The following command-line arguments are supported: + +\begin{description} +\item[\longprogramopt{trace}, \programopt{-t}] +Display lines as they are executed. + +\item[\longprogramopt{count}, \programopt{-c}] +Produce a set of annotated listing files upon program +completion that shows how many times each statement was executed. + +\item[\longprogramopt{report}, \programopt{-r}] +Produce an annotated list from an earlier program run that +used the \longprogramopt{count} and \longprogramopt{file} arguments. + +\item[\longprogramopt{no-report}, \programopt{-R}] +Do not generate annotated listings. This is useful if you intend to make +several runs with \longprogramopt{count} then produce a single set +of annotated listings at the end. + +\item[\longprogramopt{listfuncs}, \programopt{-l}] +List the functions executed by running the program. + +\item[\longprogramopt{trackcalls}, \programopt{-T}] +Generate calling relationships exposed by running the program. + +\item[\longprogramopt{file}, \programopt{-f}] +Name a file containing (or to contain) counts. + +\item[\longprogramopt{coverdir}, \programopt{-C}] +Name a directory in which to save annotated listing files. + +\item[\longprogramopt{missing}, \programopt{-m}] +When generating annotated listings, mark lines which +were not executed with `\code{>>>>>>}'. + +\item[\longprogramopt{summary}, \programopt{-s}] +When using \longprogramopt{count} or \longprogramopt{report}, write a +brief summary to stdout for each file processed. + +\item[\longprogramopt{ignore-module}] +Ignore the named module and its submodules (if it is +a package). May be given multiple times. + +\item[\longprogramopt{ignore-dir}] +Ignore all modules and packages in the named directory +and subdirectories. May be given multiple times. +\end{description} + +\subsection{Programming Interface\label{trace-api}} + +\begin{classdesc}{Trace}{\optional{count=1\optional{, trace=1\optional{, + countfuncs=0\optional{, countcallers=0\optional{, + ignoremods=()\optional{, ignoredirs=()\optional{, + infile=None\optional{, outfile=None}}}}}}}}} +Create an object to trace execution of a single statement or expression. +All parameters are optional. \var{count} enables counting of line numbers. +\var{trace} enables line execution tracing. \var{countfuncs} enables +listing of the functions called during the run. \var{countcallers} enables +call relationship tracking. \var{ignoremods} is a list of modules or +packages to ignore. \var{ignoredirs} is a list of directories whose modules +or packages should be ignored. \var{infile} is the file from which to read +stored count information. \var{outfile} is a file in which to write updated +count information. +\end{classdesc} + +\begin{methoddesc}[Trace]{run}{cmd} +Run \var{cmd} under control of the Trace object with the current tracing +parameters. +\end{methoddesc} + +\begin{methoddesc}[Trace]{runctx}{cmd\optional{, globals=None\optional{, + locals=None}}} +Run \var{cmd} under control of the Trace object with the current tracing +parameters in the defined global and local environments. If not defined, +\var{globals} and \var{locals} default to empty dictionaries. +\end{methoddesc} + +\begin{methoddesc}[Trace]{runfunc}{func, *args, **kwds} +Call \var{func} with the given arguments under control of the +\class{Trace} object with the current tracing parameters. +\end{methoddesc} + +This is a simple example showing the use of this module: + +\begin{verbatim} +import sys +import trace + +# create a Trace object, telling it what to ignore, and whether to +# do tracing or line-counting or both. +tracer = trace.Trace( + ignoredirs=[sys.prefix, sys.exec_prefix], + trace=0, + count=1) + +# run the new command using the given tracer +tracer.run('main()') + +# make a report, placing output in /tmp +r = tracer.results() +r.write_results(show_missing=True, coverdir="/tmp") +\end{verbatim} diff --git a/Doc/lib/libunittest.tex b/Doc/lib/libunittest.tex index 6c8769d..51b321e 100644 --- a/Doc/lib/libunittest.tex +++ b/Doc/lib/libunittest.tex @@ -226,7 +226,7 @@ runs, an exception will be raised, and the testing framework will identify the test case as a \dfn{failure}. Other exceptions that do not arise from checks made through the \method{assert*()} and \method{fail*()} methods are identified by the testing framework as -dfn{errors}. +\dfn{errors}. The way to run a test case will be described later. For now, note that to construct an instance of such a test case, we call its diff --git a/Doc/lib/liburllib2.tex b/Doc/lib/liburllib2.tex index e0c4568..7c8ad5d 100644 --- a/Doc/lib/liburllib2.tex +++ b/Doc/lib/liburllib2.tex @@ -621,14 +621,20 @@ user/password. \subsection{AbstractBasicAuthHandler Objects \label{abstract-basic-auth-handler}} -\begin{methoddesc}[AbstractBasicAuthHandler]{handle_authentication_request} +\begin{methoddesc}[AbstractBasicAuthHandler]{http_error_auth_reqed} {authreq, host, req, headers} Handle an authentication request by getting a user/password pair, and re-trying the request. \var{authreq} should be the name of the header where the information about the realm is included in the request, -\var{host} is the host to authenticate to, \var{req} should be the -(failed) \class{Request} object, and \var{headers} should be the error -headers. +\var{host} specifies the URL and path to authenticate for, \var{req} +should be the (failed) \class{Request} object, and \var{headers} +should be the error headers. + +\var{host} is either an authority (e.g. \code{"python.org"}) or a URL +containing an authority component (e.g. \code{"http://python.org/"}). +In either case, the authority must not contain a userinfo component +(so, \code{"python.org"} and \code{"python.org:80"} are fine, +\code{"joe:password@python.org"} is not). \end{methoddesc} @@ -653,7 +659,7 @@ Retry the request with authentication information, if available. \subsection{AbstractDigestAuthHandler Objects \label{abstract-digest-auth-handler}} -\begin{methoddesc}[AbstractDigestAuthHandler]{handle_authentication_request} +\begin{methoddesc}[AbstractDigestAuthHandler]{http_error_auth_reqed} {authreq, host, req, headers} \var{authreq} should be the name of the header where the information about the realm is included in the request, \var{host} should be the host to diff --git a/Doc/lib/libweakref.tex b/Doc/lib/libweakref.tex index 840b674..fc949e6 100644 --- a/Doc/lib/libweakref.tex +++ b/Doc/lib/libweakref.tex @@ -147,6 +147,24 @@ information. to vanish "by magic" (as a side effect of garbage collection).} \end{classdesc} +\class{WeakKeyDictionary} objects have the following additional +methods. These expose the internal references directly. The +references are not guaranteed to be ``live'' at the time they are +used, so the result of calling the references needs to be checked +before being used. This can be used to avoid creating references that +will cause the garbage collector to keep the keys around longer than +needed. + +\begin{methoddesc}{iterkeyrefs}{} + Return an iterator that yields the weak references to the keys. + \versionadded{2.5} +\end{methoddesc} + +\begin{methoddesc}{keyrefs}{} + Return a list of weak references to the keys. + \versionadded{2.5} +\end{methoddesc} + \begin{classdesc}{WeakValueDictionary}{\optional{dict}} Mapping class that references values weakly. Entries in the dictionary will be discarded when no strong reference to the value @@ -160,6 +178,21 @@ information. to vanish "by magic" (as a side effect of garbage collection).} \end{classdesc} +\class{WeakValueDictionary} objects have the following additional +methods. These method have the same issues as the +\method{iterkeyrefs()} and \method{keyrefs()} methods of +\class{WeakKeyDictionary} objects. + +\begin{methoddesc}{itervaluerefs}{} + Return an iterator that yields the weak references to the values. + \versionadded{2.5} +\end{methoddesc} + +\begin{methoddesc}{valuerefs}{} + Return a list of weak references to the values. + \versionadded{2.5} +\end{methoddesc} + \begin{datadesc}{ReferenceType} The type object for weak references objects. \end{datadesc} diff --git a/Doc/lib/libxmlrpclib.tex b/Doc/lib/libxmlrpclib.tex index 1c36f99..3645b82 100644 --- a/Doc/lib/libxmlrpclib.tex +++ b/Doc/lib/libxmlrpclib.tex @@ -81,9 +81,11 @@ Python type): This is the full set of data types supported by XML-RPC. Method calls may also raise a special \exception{Fault} instance, used to signal XML-RPC server errors, or \exception{ProtocolError} used to signal an -error in the HTTP/HTTPS transport layer. Note that even though starting -with Python 2.2 you can subclass builtin types, the xmlrpclib module -currently does not marshal instances of such subclasses. +error in the HTTP/HTTPS transport layer. Both \exception{Fault} and +\exception{ProtocolError} derive from a base class called +\exception{Error}. Note that even though starting with Python 2.2 you +can subclass builtin types, the xmlrpclib module currently does not +marshal instances of such subclasses. When passing strings, characters special to XML such as \samp{<}, \samp{>}, and \samp{\&} will be automatically escaped. However, it's @@ -340,6 +342,7 @@ objects, they are converted to \class{DateTime} objects internally, so only \begin{verbatim} # simple test program (from the XML-RPC specification) +from xmlrpclib import ServerProxy, Error # server = ServerProxy("http://localhost:8000") # local server server = ServerProxy("http://betty.userland.com") diff --git a/Doc/lib/libzlib.tex b/Doc/lib/libzlib.tex index dfbb43d..876f8c0 100644 --- a/Doc/lib/libzlib.tex +++ b/Doc/lib/libzlib.tex @@ -123,6 +123,12 @@ prevents compressing any more data. After calling action is to delete the object. \end{methoddesc} +\begin{methoddesc}[Compress]{copy}{} +Returns a copy of the compression object. This can be used to efficiently +compress a set of data that share a common initial prefix. +\versionadded{2.5} +\end{methoddesc} + Decompression objects support the following methods, and two attributes: \begin{memberdesc}{unused_data} @@ -176,6 +182,13 @@ The optional parameter \var{length} sets the initial size of the output buffer. \end{methoddesc} +\begin{methoddesc}[Decompress]{copy}{} +Returns a copy of the decompression object. This can be used to save the +state of the decompressor midway through the data stream in order to speed up +random seeks into the stream at a future point. +\versionadded{2.5} +\end{methoddesc} + \begin{seealso} \seemodule{gzip}{Reading and writing \program{gzip}-format files.} \seeurl{http://www.zlib.net}{The zlib library home page.} diff --git a/Doc/lib/sqlite3/adapter_datetime.py b/Doc/lib/sqlite3/adapter_datetime.py new file mode 100644 index 0000000..3460498 --- /dev/null +++ b/Doc/lib/sqlite3/adapter_datetime.py @@ -0,0 +1,14 @@ +import sqlite3 +import datetime, time + +def adapt_datetime(ts): + return time.mktime(ts.timetuple()) + +sqlite3.register_adapter(datetime.datetime, adapt_datetime) + +con = sqlite3.connect(":memory:") +cur = con.cursor() + +now = datetime.datetime.now() +cur.execute("select ?", (now,)) +print cur.fetchone()[0] diff --git a/Doc/lib/sqlite3/adapter_point_1.py b/Doc/lib/sqlite3/adapter_point_1.py new file mode 100644 index 0000000..a741f6c --- /dev/null +++ b/Doc/lib/sqlite3/adapter_point_1.py @@ -0,0 +1,16 @@ +import sqlite3 + +class Point(object): + def __init__(self, x, y): + self.x, self.y = x, y + + def __conform__(self, protocol): + if protocol is sqlite3.PrepareProtocol: + return "%f;%f" % (self.x, self.y) + +con = sqlite3.connect(":memory:") +cur = con.cursor() + +p = Point(4.0, -3.2) +cur.execute("select ?", (p,)) +print cur.fetchone()[0] diff --git a/Doc/lib/sqlite3/adapter_point_2.py b/Doc/lib/sqlite3/adapter_point_2.py new file mode 100644 index 0000000..200a064 --- /dev/null +++ b/Doc/lib/sqlite3/adapter_point_2.py @@ -0,0 +1,17 @@ +import sqlite3 + +class Point(object): + def __init__(self, x, y): + self.x, self.y = x, y + +def adapt_point(point): + return "%f;%f" % (point.x, point.y) + +sqlite3.register_adapter(Point, adapt_point) + +con = sqlite3.connect(":memory:") +cur = con.cursor() + +p = Point(4.0, -3.2) +cur.execute("select ?", (p,)) +print cur.fetchone()[0] diff --git a/Doc/lib/sqlite3/collation_reverse.py b/Doc/lib/sqlite3/collation_reverse.py new file mode 100644 index 0000000..e956402 --- /dev/null +++ b/Doc/lib/sqlite3/collation_reverse.py @@ -0,0 +1,15 @@ +import sqlite3 + +def collate_reverse(string1, string2): + return -cmp(string1, string2) + +con = sqlite3.connect(":memory:") +con.create_collation("reverse", collate_reverse) + +cur = con.cursor() +cur.execute("create table test(x)") +cur.executemany("insert into test(x) values (?)", [("a",), ("b",)]) +cur.execute("select x from test order by x collate reverse") +for row in cur: + print row +con.close() diff --git a/Doc/lib/sqlite3/complete_statement.py b/Doc/lib/sqlite3/complete_statement.py new file mode 100644 index 0000000..89fc250 --- /dev/null +++ b/Doc/lib/sqlite3/complete_statement.py @@ -0,0 +1,30 @@ +# A minimal SQLite shell for experiments + +import sqlite3 + +con = sqlite3.connect(":memory:") +con.isolation_level = None +cur = con.cursor() + +buffer = "" + +print "Enter your SQL commands to execute in sqlite3." +print "Enter a blank line to exit." + +while True: + line = raw_input() + if line == "": + break + buffer += line + if sqlite3.complete_statement(buffer): + try: + buffer = buffer.strip() + cur.execute(buffer) + + if buffer.lstrip().upper().startswith("SELECT"): + print cur.fetchall() + except sqlite3.Error, e: + print "An error occured:", e.args[0] + buffer = "" + +con.close() diff --git a/Doc/lib/sqlite3/connect_db_1.py b/Doc/lib/sqlite3/connect_db_1.py new file mode 100644 index 0000000..1b97523 --- /dev/null +++ b/Doc/lib/sqlite3/connect_db_1.py @@ -0,0 +1,3 @@ +import sqlite3 + +con = sqlite3.connect("mydb") diff --git a/Doc/lib/sqlite3/connect_db_2.py b/Doc/lib/sqlite3/connect_db_2.py new file mode 100644 index 0000000..f9728b36 --- /dev/null +++ b/Doc/lib/sqlite3/connect_db_2.py @@ -0,0 +1,3 @@ +import sqlite3 + +con = sqlite3.connect(":memory:") diff --git a/Doc/lib/sqlite3/converter_point.py b/Doc/lib/sqlite3/converter_point.py new file mode 100644 index 0000000..e220e9b --- /dev/null +++ b/Doc/lib/sqlite3/converter_point.py @@ -0,0 +1,47 @@ +import sqlite3 + +class Point(object): + def __init__(self, x, y): + self.x, self.y = x, y + + def __repr__(self): + return "(%f;%f)" % (self.x, self.y) + +def adapt_point(point): + return "%f;%f" % (point.x, point.y) + +def convert_point(s): + x, y = map(float, s.split(";")) + return Point(x, y) + +# Register the adapter +sqlite3.register_adapter(Point, adapt_point) + +# Register the converter +sqlite3.register_converter("point", convert_point) + +p = Point(4.0, -3.2) + +######################### +# 1) Using declared types +con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES) +cur = con.cursor() +cur.execute("create table test(p point)") + +cur.execute("insert into test(p) values (?)", (p,)) +cur.execute("select p from test") +print "with declared types:", cur.fetchone()[0] +cur.close() +con.close() + +####################### +# 1) Using column names +con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_COLNAMES) +cur = con.cursor() +cur.execute("create table test(p)") + +cur.execute("insert into test(p) values (?)", (p,)) +cur.execute('select p as "p [point]" from test') +print "with column names:", cur.fetchone()[0] +cur.close() +con.close() diff --git a/Doc/lib/sqlite3/countcursors.py b/Doc/lib/sqlite3/countcursors.py new file mode 100644 index 0000000..df04cad --- /dev/null +++ b/Doc/lib/sqlite3/countcursors.py @@ -0,0 +1,15 @@ +import sqlite3 + +class CountCursorsConnection(sqlite3.Connection): + def __init__(self, *args, **kwargs): + sqlite3.Connection.__init__(self, *args, **kwargs) + self.numcursors = 0 + + def cursor(self, *args, **kwargs): + self.numcursors += 1 + return sqlite3.Connection.cursor(self, *args, **kwargs) + +con = sqlite3.connect(":memory:", factory=CountCursorsConnection) +cur1 = con.cursor() +cur2 = con.cursor() +print con.numcursors diff --git a/Doc/lib/sqlite3/createdb.py b/Doc/lib/sqlite3/createdb.py new file mode 100644 index 0000000..ee2950b --- /dev/null +++ b/Doc/lib/sqlite3/createdb.py @@ -0,0 +1,28 @@ +# Not referenced from the documentation, but builds the database file the other +# code snippets expect. + +import sqlite3 +import os + +DB_FILE = "mydb" + +if os.path.exists(DB_FILE): + os.remove(DB_FILE) + +con = sqlite3.connect(DB_FILE) +cur = con.cursor() +cur.execute(""" + create table people + ( + name_last varchar(20), + age integer + ) + """) + +cur.execute("insert into people (name_last, age) values ('Yeltsin', 72)") +cur.execute("insert into people (name_last, age) values ('Putin', 51)") + +con.commit() + +cur.close() +con.close() diff --git a/Doc/lib/sqlite3/execsql_fetchonerow.py b/Doc/lib/sqlite3/execsql_fetchonerow.py new file mode 100644 index 0000000..8044ecf --- /dev/null +++ b/Doc/lib/sqlite3/execsql_fetchonerow.py @@ -0,0 +1,17 @@ +import sqlite3 + +con = sqlite3.connect("mydb") + +cur = con.cursor() +SELECT = "select name_last, age from people order by age, name_last" + +# 1. Iterate over the rows available from the cursor, unpacking the +# resulting sequences to yield their elements (name_last, age): +cur.execute(SELECT) +for (name_last, age) in cur: + print '%s is %d years old.' % (name_last, age) + +# 2. Equivalently: +cur.execute(SELECT) +for row in cur: + print '%s is %d years old.' % (row[0], row[1]) diff --git a/Doc/lib/sqlite3/execsql_printall_1.py b/Doc/lib/sqlite3/execsql_printall_1.py new file mode 100644 index 0000000..d27d735 --- /dev/null +++ b/Doc/lib/sqlite3/execsql_printall_1.py @@ -0,0 +1,13 @@ +import sqlite3 + +# Create a connection to the database file "mydb": +con = sqlite3.connect("mydb") + +# Get a Cursor object that operates in the context of Connection con: +cur = con.cursor() + +# Execute the SELECT statement: +cur.execute("select * from people order by age") + +# Retrieve all rows as a sequence and print that sequence: +print cur.fetchall() diff --git a/Doc/lib/sqlite3/execute_1.py b/Doc/lib/sqlite3/execute_1.py new file mode 100644 index 0000000..fb3784f --- /dev/null +++ b/Doc/lib/sqlite3/execute_1.py @@ -0,0 +1,11 @@ +import sqlite3 + +con = sqlite3.connect("mydb") + +cur = con.cursor() + +who = "Yeltsin" +age = 72 + +cur.execute("select name_last, age from people where name_last=? and age=?", (who, age)) +print cur.fetchone() diff --git a/Doc/lib/sqlite3/execute_2.py b/Doc/lib/sqlite3/execute_2.py new file mode 100644 index 0000000..df6c894 --- /dev/null +++ b/Doc/lib/sqlite3/execute_2.py @@ -0,0 +1,12 @@ +import sqlite3 + +con = sqlite3.connect("mydb") + +cur = con.cursor() + +who = "Yeltsin" +age = 72 + +cur.execute("select name_last, age from people where name_last=:who and age=:age", + {"who": who, "age": age}) +print cur.fetchone() diff --git a/Doc/lib/sqlite3/execute_3.py b/Doc/lib/sqlite3/execute_3.py new file mode 100644 index 0000000..b64621f --- /dev/null +++ b/Doc/lib/sqlite3/execute_3.py @@ -0,0 +1,12 @@ +import sqlite3 + +con = sqlite3.connect("mydb") + +cur = con.cursor() + +who = "Yeltsin" +age = 72 + +cur.execute("select name_last, age from people where name_last=:who and age=:age", + locals()) +print cur.fetchone() diff --git a/Doc/lib/sqlite3/executemany_1.py b/Doc/lib/sqlite3/executemany_1.py new file mode 100644 index 0000000..24357c5 --- /dev/null +++ b/Doc/lib/sqlite3/executemany_1.py @@ -0,0 +1,24 @@ +import sqlite3 + +class IterChars: + def __init__(self): + self.count = ord('a') + + def __iter__(self): + return self + + def next(self): + if self.count > ord('z'): + raise StopIteration + self.count += 1 + return (chr(self.count - 1),) # this is a 1-tuple + +con = sqlite3.connect(":memory:") +cur = con.cursor() +cur.execute("create table characters(c)") + +theIter = IterChars() +cur.executemany("insert into characters(c) values (?)", theIter) + +cur.execute("select c from characters") +print cur.fetchall() diff --git a/Doc/lib/sqlite3/executemany_2.py b/Doc/lib/sqlite3/executemany_2.py new file mode 100644 index 0000000..05857c0 --- /dev/null +++ b/Doc/lib/sqlite3/executemany_2.py @@ -0,0 +1,15 @@ +import sqlite3 + +def char_generator(): + import string + for c in string.letters[:26]: + yield (c,) + +con = sqlite3.connect(":memory:") +cur = con.cursor() +cur.execute("create table characters(c)") + +cur.executemany("insert into characters(c) values (?)", char_generator()) + +cur.execute("select c from characters") +print cur.fetchall() diff --git a/Doc/lib/sqlite3/executescript.py b/Doc/lib/sqlite3/executescript.py new file mode 100644 index 0000000..0795b47 --- /dev/null +++ b/Doc/lib/sqlite3/executescript.py @@ -0,0 +1,24 @@ +import sqlite3 + +con = sqlite3.connect(":memory:") +cur = con.cursor() +cur.executescript(""" + create table person( + firstname, + lastname, + age + ); + + create table book( + title, + author, + published + ); + + insert into book(title, author, published) + values ( + 'Dirk Gently''s Holistic Detective Agency + 'Douglas Adams', + 1987 + ); + """) diff --git a/Doc/lib/sqlite3/insert_more_people.py b/Doc/lib/sqlite3/insert_more_people.py new file mode 100644 index 0000000..edbc79e --- /dev/null +++ b/Doc/lib/sqlite3/insert_more_people.py @@ -0,0 +1,16 @@ +import sqlite3 + +con = sqlite3.connect("mydb") + +cur = con.cursor() + +newPeople = ( + ('Lebed' , 53), + ('Zhirinovsky' , 57), + ) + +for person in newPeople: + cur.execute("insert into people (name_last, age) values (?, ?)", person) + +# The changes will not be saved unless the transaction is committed explicitly: +con.commit() diff --git a/Doc/lib/sqlite3/md5func.py b/Doc/lib/sqlite3/md5func.py new file mode 100644 index 0000000..5769687 --- /dev/null +++ b/Doc/lib/sqlite3/md5func.py @@ -0,0 +1,11 @@ +import sqlite3 +import md5 + +def md5sum(t): + return md5.md5(t).hexdigest() + +con = sqlite3.connect(":memory:") +con.create_function("md5", 1, md5sum) +cur = con.cursor() +cur.execute("select md5(?)", ("foo",)) +print cur.fetchone()[0] diff --git a/Doc/lib/sqlite3/mysumaggr.py b/Doc/lib/sqlite3/mysumaggr.py new file mode 100644 index 0000000..6d0cd55 --- /dev/null +++ b/Doc/lib/sqlite3/mysumaggr.py @@ -0,0 +1,20 @@ +import sqlite3 + +class MySum: + def __init__(self): + self.count = 0 + + def step(self, value): + self.count += value + + def finalize(self): + return self.count + +con = sqlite3.connect(":memory:") +con.create_aggregate("mysum", 1, MySum) +cur = con.cursor() +cur.execute("create table test(i)") +cur.execute("insert into test(i) values (1)") +cur.execute("insert into test(i) values (2)") +cur.execute("select mysum(i) from test") +print cur.fetchone()[0] diff --git a/Doc/lib/sqlite3/parse_colnames.py b/Doc/lib/sqlite3/parse_colnames.py new file mode 100644 index 0000000..fcded00 --- /dev/null +++ b/Doc/lib/sqlite3/parse_colnames.py @@ -0,0 +1,8 @@ +import sqlite3 +import datetime + +con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_COLNAMES) +cur = con.cursor() +cur.execute('select ? as "x [timestamp]"', (datetime.datetime.now(),)) +dt = cur.fetchone()[0] +print dt, type(dt) diff --git a/Doc/lib/sqlite3/pysqlite_datetime.py b/Doc/lib/sqlite3/pysqlite_datetime.py new file mode 100644 index 0000000..efa4b06 --- /dev/null +++ b/Doc/lib/sqlite3/pysqlite_datetime.py @@ -0,0 +1,20 @@ +import sqlite3 +import datetime + +con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) +cur = con.cursor() +cur.execute("create table test(d date, ts timestamp)") + +today = datetime.date.today() +now = datetime.datetime.now() + +cur.execute("insert into test(d, ts) values (?, ?)", (today, now)) +cur.execute("select d, ts from test") +row = cur.fetchone() +print today, "=>", row[0], type(row[0]) +print now, "=>", row[1], type(row[1]) + +cur.execute('select current_date as "d [date]", current_timestamp as "ts [timestamp]"') +row = cur.fetchone() +print "current_date", row[0], type(row[0]) +print "current_timestamp", row[1], type(row[1]) diff --git a/Doc/lib/sqlite3/row_factory.py b/Doc/lib/sqlite3/row_factory.py new file mode 100644 index 0000000..64676c8 --- /dev/null +++ b/Doc/lib/sqlite3/row_factory.py @@ -0,0 +1,13 @@ +import sqlite3 + +def dict_factory(cursor, row): + d = {} + for idx, col in enumerate(cursor.description): + d[col[0]] = row[idx] + return d + +con = sqlite3.connect(":memory:") +con.row_factory = dict_factory +cur = con.cursor() +cur.execute("select 1 as a") +print cur.fetchone()["a"] diff --git a/Doc/lib/sqlite3/rowclass.py b/Doc/lib/sqlite3/rowclass.py new file mode 100644 index 0000000..3fa0b87 --- /dev/null +++ b/Doc/lib/sqlite3/rowclass.py @@ -0,0 +1,12 @@ +import sqlite3 + +con = sqlite3.connect("mydb") +con.row_factory = sqlite3.Row + +cur = con.cursor() +cur.execute("select name_last, age from people") +for row in cur: + assert row[0] == row["name_last"] + assert row["name_last"] == row["nAmE_lAsT"] + assert row[1] == row["age"] + assert row[1] == row["AgE"] diff --git a/Doc/lib/sqlite3/shared_cache.py b/Doc/lib/sqlite3/shared_cache.py new file mode 100644 index 0000000..bf1d7b4 --- /dev/null +++ b/Doc/lib/sqlite3/shared_cache.py @@ -0,0 +1,6 @@ +import sqlite3 + +# The shared cache is only available in SQLite versions 3.3.3 or later +# See the SQLite documentaton for details. + +sqlite3.enable_shared_cache(True) diff --git a/Doc/lib/sqlite3/shortcut_methods.py b/Doc/lib/sqlite3/shortcut_methods.py new file mode 100644 index 0000000..72ed4b3 --- /dev/null +++ b/Doc/lib/sqlite3/shortcut_methods.py @@ -0,0 +1,21 @@ +import sqlite3 + +persons = [ + ("Hugo", "Boss"), + ("Calvin", "Klein") + ] + +con = sqlite3.connect(":memory:") + +# Create the table +con.execute("create table person(firstname, lastname)") + +# Fill the table +con.executemany("insert into person(firstname, lastname) values (?, ?)", persons) + +# Print the table contents +for row in con.execute("select firstname, lastname from person"): + print row + +# Using a dummy WHERE clause to not let SQLite take the shortcut table deletes. +print "I just deleted", con.execute("delete from person where 1=1").rowcount, "rows" diff --git a/Doc/lib/sqlite3/simple_tableprinter.py b/Doc/lib/sqlite3/simple_tableprinter.py new file mode 100644 index 0000000..67ea6a2 --- /dev/null +++ b/Doc/lib/sqlite3/simple_tableprinter.py @@ -0,0 +1,26 @@ +import sqlite3 + +FIELD_MAX_WIDTH = 20 +TABLE_NAME = 'people' +SELECT = 'select * from %s order by age, name_last' % TABLE_NAME + +con = sqlite3.connect("mydb") + +cur = con.cursor() +cur.execute(SELECT) + +# Print a header. +for fieldDesc in cur.description: + print fieldDesc[0].ljust(FIELD_MAX_WIDTH) , +print # Finish the header with a newline. +print '-' * 78 + +# For each row, print the value of each field left-justified within +# the maximum possible width of that field. +fieldIndices = range(len(cur.description)) +for row in cur: + for fieldIndex in fieldIndices: + fieldValue = str(row[fieldIndex]) + print fieldValue.ljust(FIELD_MAX_WIDTH) , + + print # Finish the row with a newline. diff --git a/Doc/lib/sqlite3/text_factory.py b/Doc/lib/sqlite3/text_factory.py new file mode 100644 index 0000000..3e157a8 --- /dev/null +++ b/Doc/lib/sqlite3/text_factory.py @@ -0,0 +1,42 @@ +import sqlite3 + +con = sqlite3.connect(":memory:") +cur = con.cursor() + +# Create the table +con.execute("create table person(lastname, firstname)") + +AUSTRIA = u"\xd6sterreich" + +# by default, rows are returned as Unicode +cur.execute("select ?", (AUSTRIA,)) +row = cur.fetchone() +assert row[0] == AUSTRIA + +# but we can make pysqlite always return bytestrings ... +con.text_factory = str +cur.execute("select ?", (AUSTRIA,)) +row = cur.fetchone() +assert type(row[0]) == str +# the bytestrings will be encoded in UTF-8, unless you stored garbage in the +# database ... +assert row[0] == AUSTRIA.encode("utf-8") + +# we can also implement a custom text_factory ... +# here we implement one that will ignore Unicode characters that cannot be +# decoded from UTF-8 +con.text_factory = lambda x: unicode(x, "utf-8", "ignore") +cur.execute("select ?", ("this is latin1 and would normally create errors" + u"\xe4\xf6\xfc".encode("latin1"),)) +row = cur.fetchone() +assert type(row[0]) == unicode + +# pysqlite offers a builtin optimized text_factory that will return bytestring +# objects, if the data is in ASCII only, and otherwise return unicode objects +con.text_factory = sqlite3.OptimizedUnicode +cur.execute("select ?", (AUSTRIA,)) +row = cur.fetchone() +assert type(row[0]) == unicode + +cur.execute("select ?", ("Germany",)) +row = cur.fetchone() +assert type(row[0]) == str diff --git a/Doc/mac/scripting.tex b/Doc/mac/scripting.tex index a6d5df7..5ec4978 100644 --- a/Doc/mac/scripting.tex +++ b/Doc/mac/scripting.tex @@ -12,7 +12,7 @@ read Apple's documentation. The "Applescript Language Guide" explains the conceptual model and the terminology, and documents the standard suite. The "Open Scripting Architecture" document explains how to use OSA from an application programmers point of view. In the Apple Help -Viewer these book sare located in the Developer Documentation, Core +Viewer these books are located in the Developer Documentation, Core Technologies section. @@ -49,7 +49,7 @@ line. The generated output is a package with a number of modules, one for every suite used in the program plus an \module{__init__} module to glue it all together. The Python inheritance graph follows the AppleScript -inheritance graph, so if a programs dictionary specifies that it +inheritance graph, so if a program's dictionary specifies that it includes support for the Standard Suite, but extends one or two verbs with extra arguments then the output suite will contain a module \module{Standard_Suite} that imports and re-exports everything from diff --git a/Doc/perl/l2hinit.perl b/Doc/perl/l2hinit.perl index 89deed0..7c5d123 100644 --- a/Doc/perl/l2hinit.perl +++ b/Doc/perl/l2hinit.perl @@ -4,7 +4,17 @@ package main; use L2hos; -$HTML_VERSION = 4.0; +$HTML_VERSION = 4.01; +$LOWER_CASE_TAGS = 1; +$NO_FRENCH_QUOTES = 1; + +# '' in \code{...} is still converted, so we can't use this yet. +#$USE_CURLY_QUOTES = 1; + +# Force Unicode support to be loaded; request UTF-8 output. +do_require_extension('unicode'); +do_require_extension('utf8'); +$HTML_OPTIONS = 'utf8'; $MAX_LINK_DEPTH = 2; $ADDRESS = ''; @@ -106,6 +116,13 @@ sub custom_driver_hook { $ENV{'TEXINPUTS'} = undef; } print "\nSetting \$TEXINPUTS to $TEXINPUTS\n"; + + # Not sure why we need to deal with this both here and at the top, + # but this is needed to actually make it work. + do_require_extension('utf8'); + $charset = $utf8_str; + $CHARSET = $utf8_str; + $USE_UTF = 1; } diff --git a/Doc/perl/python.perl b/Doc/perl/python.perl index 437c5cb..ab93c7c 100644 --- a/Doc/perl/python.perl +++ b/Doc/perl/python.perl @@ -530,7 +530,6 @@ sub add_index_entry($$){ sub new_link_name_info(){ my $name = "l2h-" . ++$globals{'max_id'}; - my $aname = "
"; my $ahref = gen_link($CURRENT_FILE, $name); return ($name, $ahref); } diff --git a/Doc/ref/ref2.tex b/Doc/ref/ref2.tex index 34e8a9e..2ed8a5d 100644 --- a/Doc/ref/ref2.tex +++ b/Doc/ref/ref2.tex @@ -308,22 +308,28 @@ identifiers. They must be spelled exactly as written here:% \index{reserved word} \begin{verbatim} -and del for is raise -assert elif from lambda return -break else global not try -class except if or while -continue exec import pass yield -def finally in print +and del from not while +as elif global or with +assert else if pass yield +break except import print +class exec in raise +continue finally is return +def for lambda try \end{verbatim} % When adding keywords, use reswords.py for reformatting -Note that although the identifier \code{as} can be used as part of the -syntax of \keyword{import} statements, it is not currently a reserved -word. +\versionchanged[\constant{None} became a constant and is now +recognized by the compiler as a name for the built-in object +\constant{None}. Although it is not a keyword, you cannot assign +a different object to it]{2.4} -In some future version of Python, the identifiers \code{as} and -\code{None} will both become keywords. +\versionchanged[Both \keyword{as} and \keyword{with} are only recognized +when the \code{with_statement} future feature has been enabled. +It will always be enabled in Python 2.6. See section~\ref{with} for +details. Note that using \keyword{as} and \keyword{with} as identifiers +will always issue a warning, even when the \code{with_statement} future +directive is not in effect]{2.5} \subsection{Reserved classes of identifiers\label{id-classes}} @@ -652,7 +658,7 @@ Some examples of floating point literals: \end{verbatim} Note that numeric literals do not include a sign; a phrase like -\code{-1} is actually an expression composed of the operator +\code{-1} is actually an expression composed of the unary operator \code{-} and the literal \code{1}. diff --git a/Doc/ref/ref3.tex b/Doc/ref/ref3.tex index 964013f..d0c8ccf 100644 --- a/Doc/ref/ref3.tex +++ b/Doc/ref/ref3.tex @@ -1875,8 +1875,8 @@ These methods are called to implement the binary arithmetic operations (\code{+}, \code{-}, \code{*}, \code{//}, \code{\%}, \function{divmod()}\bifuncindex{divmod}, -\function{pow()}\bifuncindex{pow}, \code{**}, \code{<}\code{<}, -\code{>}\code{>}, \code{\&}, \code{\^}, \code{|}). For instance, to +\function{pow()}\bifuncindex{pow}, \code{**}, \code{<<}, +\code{>>}, \code{\&}, \code{\^}, \code{|}). For instance, to evaluate the expression \var{x}\code{+}\var{y}, where \var{x} is an instance of a class that has an \method{__add__()} method, \code{\var{x}.__add__(\var{y})} is called. The \method{__divmod__()} @@ -1915,8 +1915,8 @@ These methods are called to implement the binary arithmetic operations (\code{+}, \code{-}, \code{*}, \code{/}, \code{\%}, \function{divmod()}\bifuncindex{divmod}, -\function{pow()}\bifuncindex{pow}, \code{**}, \code{<}\code{<}, -\code{>}\code{>}, \code{\&}, \code{\^}, \code{|}) with reflected +\function{pow()}\bifuncindex{pow}, \code{**}, \code{<<}, +\code{>>}, \code{\&}, \code{\^}, \code{|}) with reflected (swapped) operands. These functions are only called if the left operand does not support the corresponding operation. For instance, to evaluate the expression \var{x}\code{-}\var{y}, where \var{y} is an @@ -1942,7 +1942,7 @@ complicated). \methodline[numeric object]{__ior__}{self, other} These methods are called to implement the augmented arithmetic operations (\code{+=}, \code{-=}, \code{*=}, \code{/=}, \code{\%=}, -\code{**=}, \code{<}\code{<=}, \code{>}\code{>=}, \code{\&=}, +\code{**=}, \code{<<=}, \code{>>=}, \code{\&=}, \code{\textasciicircum=}, \code{|=}). These methods should attempt to do the operation in-place (modifying \var{self}) and return the result (which could be, but does not have to be, \var{self}). If a specific method @@ -1983,9 +1983,9 @@ Called to implement the built-in functions \end{methoddesc} \begin{methoddesc}[numeric object]{__index__}{self} -Called to implement operator.index(). Also called whenever Python -needs an integer object (such as in slicing). Must return an integer -(int or long). +Called to implement \function{operator.index()}. Also called whenever +Python needs an integer object (such as in slicing). Must return an +integer (int or long). \versionadded{2.5} \end{methoddesc} @@ -2112,49 +2112,41 @@ implement a \method{__coerce__()} method, for use by the built-in \end{itemize} -\subsection{Context Managers and Contexts\label{context-managers}} +\subsection{With Statement Context Managers\label{context-managers}} \versionadded{2.5} -A \dfn{context manager} is an object that manages the entry to, and exit -from, a \dfn{context} surrounding a block of code. Context managers are -normally invoked using the \keyword{with} statement (described in -section~\ref{with}), but can also be used by directly invoking their -methods. +A \dfn{context manager} is an object that defines the runtime +context to be established when executing a \keyword{with} +statement. The context manager handles the entry into, +and the exit from, the desired runtime context for the execution +of the block of code. Context managers are normally invoked using +the \keyword{with} statement (described in section~\ref{with}), but +can also be used by directly invoking their methods. + \stindex{with} \index{context manager} -\index{context} - -Typical uses of context managers include saving and restoring various -kinds of global state, locking and unlocking resources, closing opened -files, etc. - -\begin{methoddesc}[context manager]{__context__}{self} -Invoked when the object is used as the context expression of a -\keyword{with} statement. The return value must implement -\method{__enter__()} and \method{__exit__()} methods. Simple context -managers that wish to directly -implement \method{__enter__()} and \method{__exit__()} should just -return \var{self}. - -Context managers written in Python can also implement this method using -a generator function decorated with the -\function{contextlib.contextmanager} decorator, as this can be simpler -than writing individual \method{__enter__()} and \method{__exit__()} -methods when the state to be managed is complex. -\end{methoddesc} -\begin{methoddesc}[context]{__enter__}{self} -Enter the context defined by this object. The \keyword{with} statement -will bind this method's return value to the target(s) specified in the -\keyword{as} clause of the statement, if any. +Typical uses of context managers include saving and +restoring various kinds of global state, locking and unlocking +resources, closing opened files, etc. + +For more information on context managers, see +``\ulink{Context Types}{../lib/typecontextmanager.html}'' in the +\citetitle[../lib/lib.html]{Python Library Reference}. + +\begin{methoddesc}[context manager]{__enter__}{self} +Enter the runtime context related to this object. The \keyword{with} +statement will bind this method's return value to the target(s) +specified in the \keyword{as} clause of the statement, if any. \end{methoddesc} -\begin{methoddesc}[context]{__exit__}{exc_type, exc_value, traceback} -Exit the context defined by this object. The parameters describe the -exception that caused the context to be exited. If the context was -exited without an exception, all three arguments will be -\constant{None}. +\begin{methoddesc}[context manager]{__exit__} +{self, exc_type, exc_value, traceback} +Exit the runtime context related to this object. The parameters +describe the exception that caused the context to be exited. If +the context was exited without an exception, all three arguments +will be \constant{None}. If an exception is supplied, and the method wishes to suppress the exception (i.e., prevent it from being propagated), it should return a diff --git a/Doc/ref/ref4.tex b/Doc/ref/ref4.tex index 6a3a4ef..dcdc823 100644 --- a/Doc/ref/ref4.tex +++ b/Doc/ref/ref4.tex @@ -127,7 +127,7 @@ to delete the name. An error will be reported at compile time. If the wild card form of import --- \samp{import *} --- is used in a function and the function contains or is a nested block with free -variables, the compiler will raise a SyntaxError. +variables, the compiler will raise a \exception{SyntaxError}. If \keyword{exec} is used in a function and the function contains or is a nested block with free variables, the compiler will raise a diff --git a/Doc/ref/ref5.tex b/Doc/ref/ref5.tex index 1f2dc5e..89f9977 100644 --- a/Doc/ref/ref5.tex +++ b/Doc/ref/ref5.tex @@ -22,9 +22,9 @@ are the same as for \code{othername}. When a description of an arithmetic operator below uses the phrase ``the numeric arguments are converted to a common type,'' the -arguments are coerced using the coercion rules listed at the end of -chapter \ref{datamodel}. If both arguments are standard numeric -types, the following coercions are applied: +arguments are coerced using the coercion rules listed at +~\ref{coercion-rules}. If both arguments are standard numeric types, +the following coercions are applied: \begin{itemize} \item If either argument is a complex number, the other is converted @@ -391,7 +391,8 @@ type but a string of exactly one character. A slicing selects a range of items in a sequence object (e.g., a string, tuple or list). Slicings may be used as expressions or as -targets in assignment or del statements. The syntax for a slicing: +targets in assignment or \keyword{del} statements. The syntax for a +slicing: \obindex{sequence} \obindex{string} \obindex{tuple} @@ -1158,7 +1159,7 @@ have the same precedence and chain from left to right --- see section \hline \lineii{\code{\&}} {Bitwise AND} \hline - \lineii{\code{<}\code{<}, \code{>}\code{>}} {Shifts} + \lineii{\code{<<}, \code{>>}} {Shifts} \hline \lineii{\code{+}, \code{-}}{Addition and subtraction} \hline diff --git a/Doc/ref/ref6.tex b/Doc/ref/ref6.tex index 1eb1258..04db013 100644 --- a/Doc/ref/ref6.tex +++ b/Doc/ref/ref6.tex @@ -377,7 +377,7 @@ right type (but even this is determined by the sliced object). \begin{productionlist} \production{print_stmt} {"print" ( \optional{\token{expression} ("," \token{expression})* \optional{","}}} - \productioncont{| ">\code{>}" \token{expression} + \productioncont{| ">>" \token{expression} \optional{("," \token{expression})+ \optional{","}} )} \end{productionlist} @@ -417,7 +417,7 @@ exception is raised. \keyword{print} also has an extended\index{extended print statement} form, defined by the second portion of the syntax described above. This form is sometimes referred to as ``\keyword{print} chevron.'' -In this form, the first expression after the \code{>}\code{>} must +In this form, the first expression after the \code{>>} must evaluate to a ``file-like'' object, specifically an object that has a \method{write()} method as described above. With this extended form, the subsequent expressions are printed to this file object. If the @@ -809,13 +809,14 @@ import __future__ [as name] That is not a future statement; it's an ordinary import statement with no special semantics or syntax restrictions. -Code compiled by an exec statement or calls to the builtin functions +Code compiled by an \keyword{exec} statement or calls to the builtin functions \function{compile()} and \function{execfile()} that occur in a module \module{M} containing a future statement will, by default, use the new syntax or semantics associated with the future statement. This can, starting with Python 2.2 be controlled by optional arguments to -\function{compile()} --- see the documentation of that function in the -library reference for details. +\function{compile()} --- see the documentation of that function in the +\citetitle[../lib/built-in-funcs.html]{Python Library Reference} for +details. A future statement typed at an interactive interpreter prompt will take effect for the rest of the interpreter session. If an diff --git a/Doc/ref/ref7.tex b/Doc/ref/ref7.tex index a2d46a8..6bc0b08 100644 --- a/Doc/ref/ref7.tex +++ b/Doc/ref/ref7.tex @@ -281,11 +281,8 @@ and is not handled, the exception is temporarily saved. The it is re-raised at the end of the \keyword{finally} clause. If the \keyword{finally} clause raises another exception or executes a \keyword{return} or \keyword{break} statement, the saved -exception is lost. A \keyword{continue} statement is illegal in the -\keyword{finally} clause. (The reason is a problem with the current -implementation -- this restriction may be lifted in the future). The -exception information is not available to the program during execution of -the \keyword{finally} clause. +exception is lost. The exception information is not available to the +program during execution of the \keyword{finally} clause. \kwindex{finally} When a \keyword{return}, \keyword{break} or \keyword{continue} statement is @@ -312,38 +309,34 @@ The \keyword{with} statement is used to wrap the execution of a block with methods defined by a context manager (see section~\ref{context-managers}). This allows common \keyword{try}...\keyword{except}...\keyword{finally} usage patterns to -be encapsulated as context managers for convenient reuse. +be encapsulated for convenient reuse. \begin{productionlist} \production{with_stmt} - {"with" \token{expression} ["as" target_list] ":" \token{suite}} + {"with" \token{expression} ["as" target] ":" \token{suite}} \end{productionlist} The execution of the \keyword{with} statement proceeds as follows: \begin{enumerate} -\item The expression is evaluated, to obtain a context manager -object. +\item The context expression is evaluated to obtain a context manager. -\item The context manager's \method{__context__()} method is invoked to -obtain a context object. +\item The context manager's \method{__enter__()} method is invoked. -\item The context object's \method{__enter__()} method is invoked. - -\item If a target list was included in the \keyword{with} +\item If a target was included in the \keyword{with} statement, the return value from \method{__enter__()} is assigned to it. \note{The \keyword{with} statement guarantees that if the \method{__enter__()} method returns without an error, then \method{__exit__()} will always be called. Thus, if an error occurs during the assignment to the target list, it will be treated the same as -an error occurring within the suite would be. See step 6 below.} +an error occurring within the suite would be. See step 5 below.} \item The suite is executed. -\item The context object's \method{__exit__()} method is invoked. If an -exception caused the suite to be exited, its type, value, and +\item The context manager's \method{__exit__()} method is invoked. If +an exception caused the suite to be exited, its type, value, and traceback are passed as arguments to \method{__exit__()}. Otherwise, three \constant{None} arguments are supplied. diff --git a/Doc/texinputs/python.sty b/Doc/texinputs/python.sty index 4cb02de..3ce62f4 100644 --- a/Doc/texinputs/python.sty +++ b/Doc/texinputs/python.sty @@ -848,8 +848,17 @@ % but only if we actually used hyperref: \ifpdf \newcommand{\url}[1]{{% - \py@pdfstartlink attr{/Border [0 0 0]} user{/S /URI /URI (#1)}% - \py@LinkColor% color of the link text + \py@pdfstartlink% + attr{ /Border [0 0 0] }% + user{% + /Subtype/Link% + /A<<% + /Type/Action% + /S/URI% + /URI(#1)% + >>% + }% + \py@LinkColor% color of the link text \py@smallsize\sf #1% \py@NormalColor% Turn it back off; these are declarative \pdfendlink}% and don't appear bound to the current @@ -925,7 +934,16 @@ \ifpdf \newcommand{\ulink}[2]{{% % For PDF, we *should* only generate a link when the URL is absolute. - \py@pdfstartlink attr{/Border [0 0 0]} user{/S /URI /URI (#2)}% + \py@pdfstartlink% + attr{ /Border [0 0 0] }% + user{% + /Subtype/Link% + /A<<% + /Type/Action% + /S/URI% + /URI(#2)% + >>% + }% \py@LinkColor% color of the link text #1% \py@NormalColor% Turn it back off; these are declarative diff --git a/Doc/tut/glossary.tex b/Doc/tut/glossary.tex index c8082d5..17cc767 100644 --- a/Doc/tut/glossary.tex +++ b/Doc/tut/glossary.tex @@ -7,7 +7,7 @@ \index{>>>} -\item[\code{>\code{>}>}] +\item[\code{>>>}] The typical Python prompt of the interactive shell. Often seen for code examples that can be tried right away in the interpreter. diff --git a/Doc/tut/tut.tex b/Doc/tut/tut.tex index 7f9a7ee..4e0a26b 100644 --- a/Doc/tut/tut.tex +++ b/Doc/tut/tut.tex @@ -264,7 +264,7 @@ the command or module to handle. When commands are read from a tty, the interpreter is said to be in \emph{interactive mode}. In this mode it prompts for the next command with the \emph{primary prompt}, usually three greater-than signs -(\samp{>\code{>}>~}); for continuation lines it prompts with the +(\samp{>>>~}); for continuation lines it prompts with the \emph{secondary prompt}, by default three dots (\samp{...~}). The interpreter prints a welcome message stating its version number and a copyright notice before printing the first prompt: @@ -423,7 +423,7 @@ if filename and os.path.isfile(filename): \chapter{An Informal Introduction to Python \label{informal}} In the following examples, input and output are distinguished by the -presence or absence of prompts (\samp{>\code{>}>~} and \samp{...~}): to repeat +presence or absence of prompts (\samp{>>>~} and \samp{...~}): to repeat the example, you must type everything after the prompt, when the prompt appears; lines that do not begin with a prompt are output from the interpreter. % @@ -455,7 +455,7 @@ STRING = "# This is not a comment." \section{Using Python as a Calculator \label{calculator}} Let's try some simple Python commands. Start the interpreter and wait -for the primary prompt, \samp{>\code{>}>~}. (It shouldn't take long.) +for the primary prompt, \samp{>>>~}. (It shouldn't take long.) \subsection{Numbers \label{numbers}} @@ -2723,7 +2723,7 @@ standard module \module{__builtin__}\refbimodindex{__builtin__}: 'FloatingPointError', 'FutureWarning', 'IOError', 'ImportError', 'IndentationError', 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError', 'None', 'NotImplemented', - 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning', + 'NotImplementedError', 'OSError', 'OverflowError', 'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError', 'True', @@ -3763,6 +3763,38 @@ for releasing external resources (such as files or network connections), regardless of whether the use of the resource was successful. +\section{Predefined Clean-up Actions \label{cleanup-with}} + +Some objects define standard clean-up actions to be undertaken when +the object is no longer needed, regardless of whether or not the +operation using the object succeeded or failed. +Look at the following example, which tries to open a file and print +its contents to the screen. + +\begin{verbatim} +for line in open("myfile.txt"): + print line +\end{verbatim} + +The problem with this code is that it leaves the file open for an +indeterminate amount of time after the code has finished executing. +This is not an issue in simple scripts, but can be a problem for +larger applications. The \keyword{with} statement allows +objects like files to be used in a way that ensures they are +always cleaned up promptly and correctly. + +\begin{verbatim} +with open("myfile.txt") as f: + for line in f: + print line +\end{verbatim} + +After the statement is executed, the file \var{f} is always closed, +even if a problem was encountered while processing the lines. Other +objects which provide predefined clean-up actions will indicate +this in their documentation. + + \chapter{Classes \label{classes}} Python's class mechanism adds classes to the language with a minimum @@ -4757,7 +4789,7 @@ for sending mail: \begin{verbatim} >>> import urllib2 >>> for line in urllib2.urlopen('http://tycho.usno.navy.mil/cgi-bin/timer.pl'): -... if 'EST' in line: # look for Eastern Standard Time +... if 'EST' in line or 'EDT' in line: # look for Eastern Time ... print line
Nov. 25, 09:43:32 PM EST diff --git a/Doc/whatsnew/whatsnew20.tex b/Doc/whatsnew/whatsnew20.tex index bf458fa..56d15b8 100644 --- a/Doc/whatsnew/whatsnew20.tex +++ b/Doc/whatsnew/whatsnew20.tex @@ -330,7 +330,7 @@ List comprehensions have the form: [ expression for expr in sequence1 for expr2 in sequence2 ... for exprN in sequenceN - if condition + if condition ] \end{verbatim} The \keyword{for}...\keyword{in} clauses contain the sequences to be @@ -356,7 +356,7 @@ for expr1 in sequence1: # resulting list. \end{verbatim} -This means that when there are \keyword{for}...\keyword{in} clauses, +This means that when there are multiple \keyword{for}...\keyword{in} clauses, the resulting list will be equal to the product of the lengths of all the sequences. If you have two lists of length 3, the output list is 9 elements long: @@ -400,7 +400,7 @@ statement \code{a += 2} increments the value of the variable % The empty groups below prevent conversion to guillemets. The full list of supported assignment operators is \code{+=}, \code{-=}, \code{*=}, \code{/=}, \code{\%=}, \code{**=}, \code{\&=}, -\code{|=}, \verb|^=|, \code{>{}>=}, and \code{<{}<=}. Python classes can +\code{|=}, \verb|^=|, \code{>>=}, and \code{<<=}. Python classes can override the augmented assignment operators by defining methods named \method{__iadd__}, \method{__isub__}, etc. For example, the following \class{Number} class stores a number and supports using += to create a diff --git a/Doc/whatsnew/whatsnew23.tex b/Doc/whatsnew/whatsnew23.tex index e29ecdd..a122083 100644 --- a/Doc/whatsnew/whatsnew23.tex +++ b/Doc/whatsnew/whatsnew23.tex @@ -318,7 +318,7 @@ Hisao and Martin von~L\"owis.} %====================================================================== -\section{PEP 273: Importing Modules from Zip Archives} +\section{PEP 273: Importing Modules from ZIP Archives} The new \module{zipimport} module adds support for importing modules from a ZIP-format archive. You don't need to import the diff --git a/Doc/whatsnew/whatsnew25.tex b/Doc/whatsnew/whatsnew25.tex index 750162f..3006624 100644 --- a/Doc/whatsnew/whatsnew25.tex +++ b/Doc/whatsnew/whatsnew25.tex @@ -2,13 +2,11 @@ \usepackage{distutils} % $Id$ -% The easy_install stuff -% Describe the pkgutil module % Fix XXX comments % Count up the patches and bugs \title{What's New in Python 2.5} -\release{0.1} +\release{0.2} \author{A.M. Kuchling} \authoraddress{\email{amk@amk.ca}} @@ -34,32 +32,6 @@ rationale, refer to the PEP for a particular new feature. %====================================================================== -\section{PEP 243: Uploading Modules to PyPI\label{pep-243}} - -PEP 243 describes an HTTP-based protocol for submitting software -packages to a central archive. The Python package index at -\url{http://cheeseshop.python.org} now supports package uploads, and -the new \command{upload} Distutils command will upload a package to the -repository. - -Before a package can be uploaded, you must be able to build a -distribution using the \command{sdist} Distutils command. Once that -works, you can run \code{python setup.py upload} to add your package -to the PyPI archive. Optionally you can GPG-sign the package by -supplying the \longprogramopt{sign} and -\longprogramopt{identity} options. - -\begin{seealso} - -\seepep{243}{Module Repository Upload Mechanism}{PEP written by -Sean Reifschneider; implemented by Martin von~L\"owis -and Richard Jones. Note that the PEP doesn't exactly -describe what's implemented in PyPI.} - -\end{seealso} - - -%====================================================================== \section{PEP 308: Conditional Expressions\label{pep-308}} For a long time, people have been requesting a way to write @@ -236,6 +208,20 @@ setup(name='PyPackage', % VERSION), ) \end{verbatim} + +Another new enhancement to the Python package index at +\url{http://cheeseshop.python.org} is storing source and binary +archives for a package. The new \command{upload} Distutils command +will upload a package to the repository. + +Before a package can be uploaded, you must be able to build a +distribution using the \command{sdist} Distutils command. Once that +works, you can run \code{python setup.py upload} to add your package +to the PyPI archive. Optionally you can GPG-sign the package by +supplying the \longprogramopt{sign} and +\longprogramopt{identity} options. + +Package uploading was implemented by Martin von~L\"owis and Richard Jones. \begin{seealso} @@ -394,13 +380,17 @@ finally: \end{verbatim} The code in \var{block-1} is executed. If the code raises an -exception, the handlers are tried in order: \var{handler-1}, -\var{handler-2}, ... If no exception is raised, the \var{else-block} -is executed. No matter what happened previously, the -\var{final-block} is executed once the code block is complete and any -raised exceptions handled. Even if there's an error in an exception -handler or the \var{else-block} and a new exception is raised, the -\var{final-block} is still executed. +exception, the various \keyword{except} blocks are tested: if the +exception is of class \class{Exception1}, \var{handler-1} is executed; +otherwise if it's of class \class{Exception2}, \var{handler-2} is +executed, and so forth. If no exception is raised, the +\var{else-block} is executed. + +No matter what happened previously, the \var{final-block} is executed +once the code block is complete and any raised exceptions handled. +Even if there's an error in an exception handler or the +\var{else-block} and a new exception is raised, the +code in the \var{final-block} is still run. \begin{seealso} @@ -415,7 +405,7 @@ implementation by Thomas Lee.} Python 2.5 adds a simple way to pass values \emph{into} a generator. As introduced in Python 2.3, generators only produce output; once a -generator's code is invoked to create an iterator, there's no way to +generator's code was invoked to create an iterator, there was no way to pass any new information into the function when its execution is resumed. Sometimes the ability to pass in some information would be useful. Hackish solutions to this include making the generator's code @@ -522,9 +512,9 @@ generators: \exception{GeneratorExit} or \exception{StopIteration}; catching the exception and doing anything else is illegal and will trigger a \exception{RuntimeError}. \method{close()} will also be called by - Python's garbage collection when the generator is garbage-collected. + Python's garbage collector when the generator is garbage-collected. - If you need to run cleanup code in case of a \exception{GeneratorExit}, + If you need to run cleanup code when a \exception{GeneratorExit} occurs, I suggest using a \code{try: ... finally:} suite instead of catching \exception{GeneratorExit}. @@ -535,8 +525,8 @@ one-way producers of information into both producers and consumers. Generators also become \emph{coroutines}, a more generalized form of subroutines. Subroutines are entered at one point and exited at -another point (the top of the function, and a \keyword{return -statement}), but coroutines can be entered, exited, and resumed at +another point (the top of the function, and a \keyword{return} +statement), but coroutines can be entered, exited, and resumed at many different points (the \keyword{yield} statements). We'll have to figure out patterns for using coroutines effectively in Python. @@ -579,14 +569,12 @@ Sugalski.} %====================================================================== \section{PEP 343: The 'with' statement\label{pep-343}} -The '\keyword{with}' statement allows a clearer version of code that -uses \code{try...finally} blocks to ensure that clean-up code is -executed. - -In this section, I'll discuss the statement as it will commonly be -used. In the next section, I'll examine the implementation details -and show how to write objects called ``context managers'' and -``contexts'' for use with this statement. +The '\keyword{with}' statement clarifies code that previously would +use \code{try...finally} blocks to ensure that clean-up code is +executed. In this section, I'll discuss the statement as it will +commonly be used. In the next section, I'll examine the +implementation details and show how to write objects for use with this +statement. The '\keyword{with}' statement is a new control-flow structure whose basic structure is: @@ -596,13 +584,13 @@ with expression [as variable]: with-block \end{verbatim} -The expression is evaluated, and it should result in a type of object -that's called a context manager. The context manager can return a +The expression is evaluated, and it should result in an object that +supports the context management protocol. This object may return a value that can optionally be bound to the name \var{variable}. (Note -carefully: \var{variable} is \emph{not} assigned the result of -\var{expression}.) One method of the context manager is run before -\var{with-block} is executed, and another method is run after the -block is done, even if the block raised an exception. +carefully that \var{variable} is \emph{not} assigned the result of +\var{expression}.) The object can then run set-up code +before \var{with-block} is executed and some clean-up code +is executed after the block is done, even if the block raised an exception. To enable the statement in Python 2.5, you need to add the following directive to your module: @@ -613,7 +601,8 @@ from __future__ import with_statement The statement will always be enabled in Python 2.6. -Some standard Python objects can now behave as context managers. File +Some standard Python objects now support the context management +protocol and can be used with the '\keyword{with}' statement. File objects are one example: \begin{verbatim} @@ -637,12 +626,12 @@ with lock: ... \end{verbatim} -The lock is acquired before the block is executed, and always released once +The lock is acquired before the block is executed and always released once the block is complete. The \module{decimal} module's contexts, which encapsulate the desired -precision and rounding characteristics for computations, can also be -used as context managers. +precision and rounding characteristics for computations, provide a +\method{context_manager()} method for getting a context manager: \begin{verbatim} import decimal @@ -651,7 +640,8 @@ import decimal v1 = decimal.Decimal('578') print v1.sqrt() -with decimal.Context(prec=16): +ctx = decimal.Context(prec=16) +with ctx.context_manager(): # All code in this block uses a precision of 16 digits. # The original context is restored on exiting the block. print v1.sqrt() @@ -660,47 +650,45 @@ with decimal.Context(prec=16): \subsection{Writing Context Managers\label{context-managers}} Under the hood, the '\keyword{with}' statement is fairly complicated. -Most people will only use '\keyword{with}' in company with -existing objects that are documented to work as context managers, and -don't need to know these details, so you can skip the following section if -you like. Authors of new context managers will need to understand the -details of the underlying implementation. +Most people will only use '\keyword{with}' in company with existing +objects and don't need to know these details, so you can skip the rest +of this section if you like. Authors of new objects will need to +understand the details of the underlying implementation and should +keep reading. A high-level explanation of the context management protocol is: \begin{itemize} -\item The expression is evaluated and should result in an object -that's a context manager, meaning that it has a -\method{__context__()} method. -\item This object's \method{__context__()} method is called, and must -return a context object. +\item The expression is evaluated and should result in an object +called a ``context manager''. The context manager must have +\method{__enter__()} and \method{__exit__()} methods. -\item The context's \method{__enter__()} method is called. -The value returned is assigned to \var{VAR}. If no \code{'as \var{VAR}'} -clause is present, the value is simply discarded. +\item The context manager's \method{__enter__()} method is called. The value +returned is assigned to \var{VAR}. If no \code{'as \var{VAR}'} clause +is present, the value is simply discarded. \item The code in \var{BLOCK} is executed. -\item If \var{BLOCK} raises an exception, the context object's +\item If \var{BLOCK} raises an exception, the \method{__exit__(\var{type}, \var{value}, \var{traceback})} is called -with the exception's information, the same values returned by -\function{sys.exc_info()}. The method's return value -controls whether the exception is re-raised: any false value -re-raises the exception, and \code{True} will result in suppressing it. -You'll only rarely want to suppress the exception; the -author of the code containing the '\keyword{with}' statement will -never realize anything went wrong. +with the exception details, the same values returned by +\function{sys.exc_info()}. The method's return value controls whether +the exception is re-raised: any false value re-raises the exception, +and \code{True} will result in suppressing it. You'll only rarely +want to suppress the exception, because if you do +the author of the code containing the +'\keyword{with}' statement will never realize anything went wrong. \item If \var{BLOCK} didn't raise an exception, -the context object's \method{__exit__()} is still called, +the \method{__exit__()} method is still called, but \var{type}, \var{value}, and \var{traceback} are all \code{None}. \end{itemize} Let's think through an example. I won't present detailed code but -will only sketch the necessary code. The example will be writing a -context manager for a database that supports transactions. +will only sketch the methods necessary for a database that supports +transactions. (For people unfamiliar with database terminology: a set of changes to the database are grouped into a transaction. Transactions can be @@ -721,22 +709,13 @@ with db_connection as cursor: # ... more operations ... \end{verbatim} -The transaction should either be committed if the code in the block -runs flawlessly, or rolled back if there's an exception. - -First, the \class{DatabaseConnection} needs a \method{__context__()} -method. Sometimes an object can be its own context manager and can -simply return \code{self}; the \module{threading} module's lock objects -can do this. For our database example, though, we need to -create a new object; I'll call this class \class{DatabaseContext}. -Our \method{__context__()} must therefore look like this: +The transaction should be committed if the code in the block +runs flawlessly or rolled back if there's an exception. +Here's the basic interface +for \class{DatabaseConnection} that I'll assume: \begin{verbatim} class DatabaseConnection: - ... - def __context__ (self): - return DatabaseContext(self) - # Database interface def cursor (self): "Returns a cursor object and starts a new transaction" @@ -746,29 +725,18 @@ class DatabaseConnection: "Rolls back current transaction" \end{verbatim} -The context needs the connection object so that the connection -object's \method{commit()} or \method{rollback()} methods can be -called: +The \method {__enter__()} method is pretty easy, having only to start +a new transaction. For this application the resulting cursor object +would be a useful result, so the method will return it. The user can +then add \code{as cursor} to their '\keyword{with}' statement to bind +the cursor to a variable name. \begin{verbatim} -class DatabaseContext: - def __init__ (self, connection): - self.connection = connection -\end{verbatim} - -The \method {__enter__()} method is pretty easy, having only -to start a new transaction. In this example, -the resulting cursor object would be a useful result, -so the method will return it. The user can -then add \code{as cursor} to their '\keyword{with}' statement -to bind the cursor to a variable name. - -\begin{verbatim} -class DatabaseContext: +class DatabaseConnection: ... def __enter__ (self): # Code to start a new transaction - cursor = self.connection.cursor() + cursor = self.cursor() return cursor \end{verbatim} @@ -776,21 +744,23 @@ The \method{__exit__()} method is the most complicated because it's where most of the work has to be done. The method has to check if an exception occurred. If there was no exception, the transaction is committed. The transaction is rolled back if there was an exception. -Here the code will just fall off the end of the function, returning -the default value of \code{None}. \code{None} is false, so the exception -will be re-raised automatically. If you wished, you could be more explicit -and add a \keyword{return} at the marked location. + +In the code below, execution will just fall off the end of the +function, returning the default value of \code{None}. \code{None} is +false, so the exception will be re-raised automatically. If you +wished, you could be more explicit and add a \keyword{return} +statement at the marked location. \begin{verbatim} -class DatabaseContext: +class DatabaseConnection: ... def __exit__ (self, type, value, tb): if tb is None: # No exception, so commit - self.connection.commit() + self.commit() else: # Exception occurred, so rollback. - self.connection.rollback() + self.rollback() # return False \end{verbatim} @@ -798,25 +768,26 @@ class DatabaseContext: \subsection{The contextlib module\label{module-contextlib}} The new \module{contextlib} module provides some functions and a -decorator that are useful for writing context managers. - -The decorator is called \function{contextmanager}, and lets you write -a simple context manager as a generator. The generator should yield -exactly one value. The code up to the \keyword{yield} will be -executed as the \method{__enter__()} method, and the value yielded -will be the method's return value that will get bound to the variable -in the '\keyword{with}' statement's \keyword{as} clause, if any. The -code after the \keyword{yield} will be executed in the -\method{__exit__()} method. Any exception raised in the block -will be raised by the \keyword{yield} statement. +decorator that are useful for writing objects for use with the +'\keyword{with}' statement. + +The decorator is called \function{contextfactory}, and lets you write +a single generator function instead of defining a new class. The generator +should yield exactly one value. The code up to the \keyword{yield} +will be executed as the \method{__enter__()} method, and the value +yielded will be the method's return value that will get bound to the +variable in the '\keyword{with}' statement's \keyword{as} clause, if +any. The code after the \keyword{yield} will be executed in the +\method{__exit__()} method. Any exception raised in the block will be +raised by the \keyword{yield} statement. Our database example from the previous section could be written using this decorator as: \begin{verbatim} -from contextlib import contextmanager +from contextlib import contextfactory -@contextmanager +@contextfactory def db_transaction (connection): cursor = connection.cursor() try: @@ -832,29 +803,11 @@ with db_transaction(db) as cursor: ... \end{verbatim} -You can also use this decorator to write the \method{__context__()} method -for a class without creating a new class for the context: - -\begin{verbatim} -class DatabaseConnection: - - @contextmanager - def __context__ (self): - cursor = self.cursor() - try: - yield cursor - except: - self.rollback() - raise - else: - self.commit() -\end{verbatim} - - -There's a \function{nested(\var{mgr1}, \var{mgr2}, ...)} manager that -combines a number of context managers so you don't need to write -nested '\keyword{with}' statements. This example statement does two -things, starting a database transaction and acquiring a thread lock: +The \module{contextlib} module also has a \function{nested(\var{mgr1}, +\var{mgr2}, ...)} function that combines a number of context managers so you +don't need to write nested '\keyword{with}' statements. In this +example, the single '\keyword{with}' statement both starts a database +transaction and acquires a thread lock: \begin{verbatim} lock = threading.Lock() @@ -862,7 +815,7 @@ with nested (db_transaction(db), lock) as (cursor, locked): ... \end{verbatim} -Finally, the \function{closing(\var{object})} context manager +Finally, the \function{closing(\var{object})} function returns \var{object} so that it can be bound to a variable, and calls \code{\var{object}.close()} at the end of the block. @@ -880,8 +833,7 @@ with closing(urllib.urlopen('http://www.yahoo.com')) as f: \seepep{343}{The ``with'' statement}{PEP written by Guido van~Rossum and Nick Coghlan; implemented by Mike Bland, Guido van~Rossum, and Neal Norwitz. The PEP shows the code generated for a '\keyword{with}' -statement, which can be helpful in learning how context managers -work.} +statement, which can be helpful in learning how the statement works.} \seeurl{../lib/module-contextlib.html}{The documentation for the \module{contextlib} module.} @@ -1064,7 +1016,7 @@ and implemented by Travis Oliphant.} %====================================================================== -\section{Other Language Changes} +\section{Other Language Changes\label{other-lang}} Here are all of the changes that Python 2.5 makes to the core Python language. @@ -1090,6 +1042,36 @@ print d[1], d[2] # Prints 1, 2 print d[3], d[4] # Prints 0, 0 \end{verbatim} +\item Both 8-bit and Unicode strings have new \method{partition(sep)} +and \method{rpartition(sep)} methods that simplify a common use case. +The \method{find(S)} method is often used to get an index which is +then used to slice the string and obtain the pieces that are before +and after the separator. + +\method{partition(sep)} condenses this +pattern into a single method call that returns a 3-tuple containing +the substring before the separator, the separator itself, and the +substring after the separator. If the separator isn't found, the +first element of the tuple is the entire string and the other two +elements are empty. \method{rpartition(sep)} also returns a 3-tuple +but starts searching from the end of the string; the \samp{r} stands +for 'reverse'. + +Some examples: + +\begin{verbatim} +>>> ('http://www.python.org').partition('://') +('http', '://', 'www.python.org') +>>> (u'Subject: a quick question').partition(':') +(u'Subject', u':', u' a quick question') +>>> ('file:/usr/share/doc/index.html').partition('://') +('file:/usr/share/doc/index.html', '', '') +>>> 'www.python.org'.rpartition('.') +('www.python', '.', 'org') +\end{verbatim} + +(Implemented by Fredrik Lundh following a suggestion by Raymond Hettinger.) + \item The \function{min()} and \function{max()} built-in functions gained a \code{key} keyword parameter analogous to the \code{key} argument for \method{sort()}. This parameter supplies a function that @@ -1127,6 +1109,14 @@ a line like this near the top of the source file: # -*- coding: latin1 -*- \end{verbatim} +\item One error that Python programmers sometimes make is forgetting +to include an \file{__init__.py} module in a package directory. +Debugging this mistake can be confusing, and usually requires running +Python with the \programopt{-v} switch to log all the paths searched. +In Python 2.5, a new \exception{ImportWarning} warning is raised when +an import would have picked up a directory as a package but no +\file{__init__.py} was found. (Implemented by Thomas Wouters.) + \item The list of base classes in a class definition can now be empty. As an example, this is now legal: @@ -1140,7 +1130,7 @@ class C(): %====================================================================== -\subsection{Interactive Interpreter Changes} +\subsection{Interactive Interpreter Changes\label{interactive}} In the interactive interpreter, \code{quit} and \code{exit} have long been strings so that new users get a somewhat helpful message @@ -1158,7 +1148,14 @@ interpreter as they expect. (Implemented by Georg Brandl.) %====================================================================== -\subsection{Optimizations} +\subsection{Optimizations\label{opts}} + +Several of the optimizations were developed at the NeedForSpeed +sprint, an event held in Reykjavik, Iceland, from May 21--28 2006. +The sprint focused on speed enhancements to the CPython implementation +and was funded by EWT LLC with local support from CCP Games. Those +optimizations added at this sprint are specially marked in the +following list. \begin{itemize} @@ -1169,15 +1166,53 @@ In 2.5 the internal data structure has been customized for implementing sets, and as a result sets will use a third less memory and are somewhat faster. (Implemented by Raymond Hettinger.) -\item The performance of some Unicode operations, such as -character map decoding, has been improved. +\item The speed of some Unicode operations, such as +finding substrings, string splitting, and character map decoding, has +been improved. (Substring search and splitting improvements were +added by Fredrik Lundh and Andrew Dalke at the NeedForSpeed +sprint. Character map decoding was improved by Walter D\"orwald.) % Patch 1313939 +\item The \function{long(\var{str}, \var{base})} function is now +faster on long digit strings because fewer intermediate results are +calculated. The peak is for strings of around 800--1000 digits where +the function is 6 times faster. +(Contributed by Alan McIntyre and committed at the NeedForSpeed sprint.) +% Patch 1442927 + +\item The \module{struct} module now compiles structure format +strings into an internal representation and caches this +representation, yielding a 20\% speedup. (Contributed by Bob Ippolito +at the NeedForSpeed sprint.) + \item The code generator's peephole optimizer now performs simple constant folding in expressions. If you write something like \code{a = 2+3}, the code generator will do the arithmetic and produce code corresponding to \code{a = 5}. +\item Function calls are now faster because code objects now keep +the most recently finished frame (a ``zombie frame'') in an internal +field of the code object, reusing it the next time the code object is +invoked. (Original patch by Michael Hudson, modified by Armin Rigo +and Richard Jones; committed at the NeedForSpeed sprint.) +% Patch 876206 + +Frame objects are also slightly smaller, which may improve cache locality +and reduce memory usage a bit. (Contributed by Neal Norwitz.) +% Patch 1337051 + +\item Python's built-in exceptions are now new-style classes, a change +that speeds up instantiation considerably. Exception handling in +Python 2.5 is therefore about 30\% faster than in 2.4. +(Contributed by Richard Jones, Georg Brandl and Sean Reifschneider at +the NeedForSpeed sprint.) + +\item Importing now caches the paths tried, recording whether +they exist or not so that the interpreter makes fewer +\cfunction{open()} and \cfunction{stat()} calls on startup. +(Contributed by Martin von~L\"owis and Georg Brandl.) +% Patch 921466 + \end{itemize} The net result of the 2.5 optimizations is that Python 2.5 runs the @@ -1185,7 +1220,7 @@ pystone benchmark around XXX\% faster than Python 2.4. %====================================================================== -\section{New, Improved, and Removed Modules} +\section{New, Improved, and Removed Modules\label{modules}} The standard library received many enhancements and bug fixes in Python 2.5. Here's a partial list of the most notable changes, sorted @@ -1255,7 +1290,6 @@ raising \exception{ValueError} if the value isn't found. \item New module: The \module{contextlib} module contains helper functions for use with the new '\keyword{with}' statement. See section~\ref{module-contextlib} for more about this module. -(Contributed by Phillip J. Eby.) \item New module: The \module{cProfile} module is a C implementation of the existing \module{profile} module that has much lower overhead. @@ -1266,8 +1300,8 @@ which is also written in C but doesn't match the \module{profile} module's interface, will continue to be maintained in future versions of Python. (Contributed by Armin Rigo.) -Also, the \module{pstats} module used to analyze the data measured by -the profiler now supports directing the output to any file stream +Also, the \module{pstats} module for analyzing the data measured by +the profiler now supports directing the output to any file object by supplying a \var{stream} argument to the \class{Stats} constructor. (Contributed by Skip Montanaro.) @@ -1295,6 +1329,11 @@ ts = datetime.strptime('10:13:15 2006-03-07', '%H:%M:%S %Y-%m-%d') \end{verbatim} +\item The \module{doctest} module gained a \code{SKIP} option that +keeps an example from being executed at all. This is intended for +code snippets that are usage examples intended for the reader and +aren't actually test cases. + \item The \module{fileinput} module was made more flexible. Unicode filenames are now supported, and a \var{mode} parameter that defaults to \code{"r"} was added to the @@ -1344,6 +1383,35 @@ itertools.islice(iterable, s.start, s.stop, s.step) (Contributed by Raymond Hettinger.) +\item The \module{mailbox} module underwent a massive rewrite to add +the capability to modify mailboxes in addition to reading them. A new +set of classes that include \class{mbox}, \class{MH}, and +\class{Maildir} are used to read mailboxes, and have an +\method{add(\var{message})} method to add messages, +\method{remove(\var{key})} to remove messages, and +\method{lock()}/\method{unlock()} to lock/unlock the mailbox. The +following example converts a maildir-format mailbox into an mbox-format one: + +\begin{verbatim} +import mailbox + +# 'factory=None' uses email.Message.Message as the class representing +# individual messages. +src = mailbox.Maildir('maildir', factory=None) +dest = mailbox.mbox('/tmp/mbox') + +for msg in src: + dest.add(msg) +\end{verbatim} + +(Contributed by Gregory K. Johnson. Funding was provided by Google's +2005 Summer of Code.) + +\item New module: the \module{msilib} module allows creating +Microsoft Installer \file{.msi} files and CAB files. Some support +for reading the \file{.msi} database is also included. +(Contributed by Martin von~L\"owis.) + \item The \module{nis} module now supports accessing domains other than the system default domain by supplying a \var{domain} argument to the \function{nis.match()} and \function{nis.maps()} functions. @@ -1358,6 +1426,11 @@ this new feature with the \method{sort()} method's \code{key} parameter lets you easily sort lists using multiple fields. (Contributed by Raymond Hettinger.) +\item The \module{optparse} module was updated to version 1.5.1 of the +Optik library. The \class{OptionParser} class gained an +\member{epilog} attribute, a string that will be printed after the +help message, and a \method{destroy()} method to break reference +cycles created by the object. (Contributed by Greg Ward.) \item The \module{os} module underwent several changes. The \member{stat_float_times} variable now defaults to true, meaning that @@ -1389,12 +1462,35 @@ The \member{st_flags} member is also available, if the platform supports it. (Contributed by Antti Louko and Diego Petten\`o.) % (Patch 1180695, 1212117) +\item The Python debugger provided by the \module{pdb} module +can now store lists of commands to execute when a breakpoint is +reached and execution stops. Once breakpoint \#1 has been created, +enter \samp{commands 1} and enter a series of commands to be executed, +finishing the list with \samp{end}. The command list can include +commands that resume execution, such as \samp{continue} or +\samp{next}. (Contributed by Gr\'egoire Dooms.) +% Patch 790710 + \item The \module{pickle} and \module{cPickle} modules no longer accept a return value of \code{None} from the \method{__reduce__()} method; the method must return a tuple of arguments instead. The ability to return \code{None} was deprecated in Python 2.4, so this completes the removal of the feature. +\item The \module{pkgutil} module, containing various utility +functions for finding packages, was enhanced to support PEP 302's +import hooks and now also works for packages stored in ZIP-format archives. +(Contributed by Phillip J. Eby.) + +\item The pybench benchmark suite by Marc-Andr\'e~Lemburg is now +included in the \file{Tools/pybench} directory. The pybench suite is +an improvement on the commonly used \file{pystone.py} program because +pybench provides a more detailed measurement of the interpreter's +speed. It times particular operations such as function calls, +tuple slicing, method lookups, and numeric operations, instead of +performing many different operations and reducing the result to a +single number as \file{pystone.py} does. + \item The old \module{regex} and \module{regsub} modules, which have been deprecated ever since Python 2.0, have finally been deleted. Other deleted modules: \module{statcache}, \module{tzparse}, @@ -1406,6 +1502,12 @@ which includes ancient modules such as \module{dircmp} and \code{sys.path}, so unless your programs explicitly added the directory to \code{sys.path}, this removal shouldn't affect your code. +\item The \module{rlcompleter} module is no longer +dependent on importing the \module{readline} module and +therefore now works on non-{\UNIX} platforms. +(Patch from Robert Kiendl.) +% Patch #1472854 + \item The \module{socket} module now supports \constant{AF_NETLINK} sockets on Linux, thanks to a patch from Philippe Biondi. Netlink sockets are a Linux-specific mechanism for communications @@ -1414,20 +1516,52 @@ article about them is at \url{http://www.linuxjournal.com/article/7356}. In Python code, netlink addresses are represented as a tuple of 2 integers, \code{(\var{pid}, \var{group_mask})}. -Socket objects also gained accessor methods \method{getfamily()}, -\method{gettype()}, and \method{getproto()} methods to retrieve the -family, type, and protocol values for the socket. +Two new methods on socket objects, \method{recv_buf(\var{buffer})} and +\method{recvfrom_buf(\var{buffer})}, store the received data in an object +that supports the buffer protocol instead of returning the data as a +string. This means you can put the data directly into an array or a +memory-mapped file. + +Socket objects also gained \method{getfamily()}, \method{gettype()}, +and \method{getproto()} accessor methods to retrieve the family, type, +and protocol values for the socket. \item New module: the \module{spwd} module provides functions for accessing the shadow password database on systems that support shadow passwords. +\item The \module{struct} is now faster because it +compiles format strings into \class{Struct} objects +with \method{pack()} and \method{unpack()} methods. This is similar +to how the \module{re} module lets you create compiled regular +expression objects. You can still use the module-level +\function{pack()} and \function{unpack()} functions; they'll create +\class{Struct} objects and cache them. Or you can use +\class{Struct} instances directly: + +\begin{verbatim} +s = struct.Struct('ih3s') + +data = s.pack(1972, 187, 'abc') +year, number, name = s.unpack(data) +\end{verbatim} + +You can also pack and unpack data to and from buffer objects directly +using the \method{pack_to(\var{buffer}, \var{offset}, \var{v1}, +\var{v2}, ...)} and \method{unpack_from(\var{buffer}, \var{offset})} +methods. This lets you store data directly into an array or a +memory-mapped file. + +(\class{Struct} objects were implemented by Bob Ippolito at the +NeedForSpeed sprint. Support for buffer objects was added by Martin +Blais, also at the NeedForSpeed sprint.) + \item The Python developers switched from CVS to Subversion during the 2.5 -development process. Information about the exact build version is -available as the \code{sys.subversion} variable, a 3-tuple -of \code{(\var{interpreter-name}, \var{branch-name}, \var{revision-range})}. -For example, at the time of writing -my copy of 2.5 was reporting \code{('CPython', 'trunk', '45313:45315')}. +development process. Information about the exact build version is +available as the \code{sys.subversion} variable, a 3-tuple of +\code{(\var{interpreter-name}, \var{branch-name}, +\var{revision-range})}. For example, at the time of writing my copy +of 2.5 was reporting \code{('CPython', 'trunk', '45313:45315')}. This information is also available to C extensions via the \cfunction{Py_GetBuildInfo()} function that returns a @@ -1449,7 +1583,7 @@ using the mode \code{'r|*'}. \item The \module{unicodedata} module has been updated to use version 4.1.0 of the Unicode character database. Version 3.2.0 is required by some specifications, so it's still available as -\member{unicodedata.db_3_2_0}. +\member{unicodedata.ucd_3_2_0}. \item The \module{webbrowser} module received a number of enhancements. @@ -1474,13 +1608,19 @@ Brandl.) (Contributed by Skip Montanaro.) % Patch 1120353 +\item The \module{zlib} module's \class{Compress} and \class{Decompress} +objects now support a \method{copy()} method that makes a copy of the +object's internal state and returns a new +\class{Compress} or \class{Decompress} object. +(Contributed by Chris AtLee.) +% Patch 1435422 \end{itemize} %====================================================================== -\subsection{The ctypes package} +\subsection{The ctypes package\label{module-ctypes}} The \module{ctypes} package, written by Thomas Heller, has been added to the standard library. \module{ctypes} lets you call arbitrary functions @@ -1562,10 +1702,10 @@ of extension modules, now that \module{ctypes} is included with core Python. %====================================================================== -\subsection{The ElementTree package} +\subsection{The ElementTree package\label{module-etree}} A subset of Fredrik Lundh's ElementTree library for processing XML has -been added to the standard library as \module{xmlcore.etree}. The +been added to the standard library as \module{xml.etree}. The available modules are \module{ElementTree}, \module{ElementPath}, and \module{ElementInclude} from ElementTree 1.2.6. @@ -1587,7 +1727,7 @@ takes either a string (assumed to contain a filename) or a file-like object and returns an \class{ElementTree} instance: \begin{verbatim} -from xmlcore.etree import ElementTree as ET +from xml.etree import ElementTree as ET tree = ET.parse('ex-1.xml') @@ -1605,7 +1745,7 @@ This function provides a tidy way to incorporate XML fragments, approaching the convenience of an XML literal: \begin{verbatim} -svg = et.XML(""" +svg = ET.XML(""" """) svg.set('height', '320px') svg.append(elem1) @@ -1619,7 +1759,7 @@ values, and list-like operations are used to access child nodes. \lineii{elem[n]}{Returns n'th child element.} \lineii{elem[m:n]}{Returns list of m'th through n'th child elements.} \lineii{len(elem)}{Returns number of child elements.} - \lineii{elem.getchildren()}{Returns list of child elements.} + \lineii{list(elem)}{Returns list of child elements.} \lineii{elem.append(elem2)}{Adds \var{elem2} as a child.} \lineii{elem.insert(index, elem2)}{Inserts \var{elem2} at the specified location.} \lineii{del elem[n]}{Deletes n'th child element.} @@ -1651,14 +1791,15 @@ tree.write('output.xml') # Encoding is UTF-8 f = open('output.xml', 'w') -tree.write(f, 'utf-8') +tree.write(f, encoding='utf-8') \end{verbatim} -(Caution: the default encoding used for output is ASCII, which isn't -very useful for general XML work, raising an exception if there are -any characters with values greater than 127. You should always -specify a different encoding such as UTF-8 that can handle any Unicode -character.) +(Caution: the default encoding used for output is ASCII. For general +XML work, where an element's name may contain arbitrary Unicode +characters, ASCII isn't a very useful encoding because it will raise +an exception if an element's name contains any characters with values +greater than 127. Therefore, it's best to specify a different +encoding such as UTF-8 that can handle any Unicode character.) This section is only a partial description of the ElementTree interfaces. Please read the package's official documentation for more details. @@ -1673,7 +1814,7 @@ Please read the package's official documentation for more details. %====================================================================== -\subsection{The hashlib package} +\subsection{The hashlib package\label{module-hashlib}} A new \module{hashlib} module, written by Gregory P. Smith, has been added to replace the @@ -1721,7 +1862,7 @@ and \method{copy()} returns a new hashing object with the same digest state. %====================================================================== -\subsection{The sqlite3 package} +\subsection{The sqlite3 package\label{module-sqlite}} The pysqlite module (\url{http://www.pysqlite.org}), a wrapper for the SQLite embedded database, has been added to the standard library under @@ -1786,7 +1927,7 @@ c.execute("... where symbol = '%s'" % symbol) # Do this instead t = (symbol,) -c.execute('select * from stocks where symbol=?', ('IBM',)) +c.execute('select * from stocks where symbol=?', t) # Larger example for t in (('2006-03-28', 'BUY', 'IBM', 1000, 45.00), @@ -1835,7 +1976,7 @@ Marc-Andr\'e Lemburg.} % ====================================================================== -\section{Build and C API Changes} +\section{Build and C API Changes\label{build-api}} Changes to Python's build process and to the C API include: @@ -1901,6 +2042,22 @@ string of build information like this: \code{"trunk:45355:45356M, Apr 13 2006, 07:42:19"}. (Contributed by Barry Warsaw.) +\item Two new macros can be used to indicate C functions that are +local to the current file so that a faster calling convention can be +used. \cfunction{Py_LOCAL(\var{type})} declares the function as +returning a value of the specified \var{type} and uses a fast-calling +qualifier. \cfunction{Py_LOCAL_INLINE(\var{type})} does the same thing +and also requests the function be inlined. If +\cfunction{PY_LOCAL_AGGRESSIVE} is defined before \file{python.h} is +included, a set of more aggressive optimizations are enabled for the +module; you should benchmark the results to find out if these +optimizations actually make the code faster. (Contributed by Fredrik +Lundh at the NeedForSpeed sprint.) + +\item \cfunction{PyErr_NewException(\var{name}, \var{base}, +\var{dict})} can now accept a tuple of base classes as its \var{base} +argument. (Contributed by Georg Brandl.) + \item The CPython interpreter is still written in C, but the code can now be compiled with a {\Cpp} compiler without errors. (Implemented by Anthony Baxter, Martin von~L\"owis, Skip Montanaro.) @@ -1913,7 +2070,7 @@ error checking. %====================================================================== -\subsection{Port-Specific Changes} +\subsection{Port-Specific Changes\label{ports}} \begin{itemize} @@ -1921,6 +2078,11 @@ error checking. now uses the \cfunction{dlopen()} function instead of MacOS-specific functions. +\item MacOS X: a \longprogramopt{enable-universalsdk} switch was added +to the \program{configure} script that compiles the interpreter as a +universal binary able to run on both PowerPC and Intel processors. +(Contributed by Ronald Oussoren.) + \item Windows: \file{.dll} is no longer supported as a filename extension for extension modules. \file{.pyd} is now the only filename extension that will be searched for. @@ -1977,7 +2139,7 @@ carefully test your C extension modules with Python 2.5. %====================================================================== -\section{Porting to Python 2.5} +\section{Porting to Python 2.5\label{porting}} This section lists previously described changes that may require changes to your code: @@ -2023,7 +2185,7 @@ freed with the corresponding family's \cfunction{*_Free()} function. The author would like to thank the following people for offering suggestions, corrections and assistance with various drafts of this -article: Phillip J. Eby, Kent Johnson, Martin von~L\"owis, Gustavo -Niemeyer, Mike Rovner, Thomas Wouters. +article: Phillip J. Eby, Kent Johnson, Martin von~L\"owis, Fredrik Lundh, +Gustavo Niemeyer, James Pryor, Mike Rovner, Scott Weikart, Thomas Wouters. \end{document} diff --git a/Grammar/Grammar b/Grammar/Grammar index a613de6..7a7f6bc 100644 --- a/Grammar/Grammar +++ b/Grammar/Grammar @@ -50,7 +50,7 @@ yield_stmt: yield_expr raise_stmt: 'raise' [test [',' test [',' test]]] import_stmt: import_name | import_from import_name: 'import' dotted_as_names -import_from: ('from' ('.'* dotted_name | '.') +import_from: ('from' ('.'* dotted_name | '.'+) 'import' ('*' | '(' import_as_names ')' | import_as_names)) import_as_name: NAME ['as' NAME] dotted_as_name: dotted_name ['as' NAME] diff --git a/Include/Python.h b/Include/Python.h index fffc688..ca16c64 100644 --- a/Include/Python.h +++ b/Include/Python.h @@ -35,7 +35,9 @@ #endif #include +#ifndef DONT_HAVE_ERRNO_H #include +#endif #include #ifdef HAVE_UNISTD_H #include diff --git a/Include/code.h b/Include/code.h index 23d9e17..432ec8e 100644 --- a/Include/code.h +++ b/Include/code.h @@ -24,6 +24,7 @@ typedef struct { PyObject *co_name; /* string (name, for reference) */ int co_firstlineno; /* first source line number */ PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + void *co_zombieframe; /* for optimization only (see frameobject.c) */ } PyCodeObject; /* Masks for co_flags above */ diff --git a/Include/frameobject.h b/Include/frameobject.h index 7dc14e3..cce598b 100644 --- a/Include/frameobject.h +++ b/Include/frameobject.h @@ -26,7 +26,16 @@ typedef struct _frame { to the current stack top. */ PyObject **f_stacktop; PyObject *f_trace; /* Trace function */ + + /* If an exception is raised in this frame, the next three are used to + * record the exception info (if any) originally in the thread state. See + * comments before set_exc_info() -- it's not obvious. + * Invariant: if _type is NULL, then so are _value and _traceback. + * Desired invariant: all three are NULL, or all three are non-NULL. That + * one isn't currently true, but "should be". + */ PyObject *f_exc_type, *f_exc_value, *f_exc_traceback; + PyThreadState *f_tstate; int f_lasti; /* Last instruction if called */ /* As of 2.3 f_lineno is only valid when tracing is active (i.e. when @@ -36,10 +45,6 @@ typedef struct _frame { in this scope */ int f_iblock; /* index in f_blockstack */ PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */ - int f_nlocals; /* number of locals */ - int f_ncells; - int f_nfreevars; - int f_stacksize; /* size of value stack */ PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */ } PyFrameObject; diff --git a/Include/longobject.h b/Include/longobject.h index 77544ef..eef4e9b 100644 --- a/Include/longobject.h +++ b/Include/longobject.h @@ -25,6 +25,7 @@ PyAPI_FUNC(unsigned long) PyLong_AsUnsignedLongMask(PyObject *); PyAPI_FUNC(Py_ssize_t) _PyLong_AsSsize_t(PyObject *); PyAPI_FUNC(PyObject *) _PyLong_FromSize_t(size_t); PyAPI_FUNC(PyObject *) _PyLong_FromSsize_t(Py_ssize_t); +PyAPI_DATA(int) _PyLong_DigitValue[256]; /* _PyLong_AsScaledDouble returns a double x and an exponent e such that the true value is approximately equal to x * 2**(SHIFT*e). e is >= 0. diff --git a/Include/osdefs.h b/Include/osdefs.h index 8190a75..6937659 100644 --- a/Include/osdefs.h +++ b/Include/osdefs.h @@ -37,8 +37,12 @@ extern "C" { /* Max pathname length */ #ifndef MAXPATHLEN +#if defined(PATH_MAX) && PATH_MAX > 1024 +#define MAXPATHLEN PATH_MAX +#else #define MAXPATHLEN 1024 #endif +#endif /* Search path entry delimiter */ #ifndef DELIM diff --git a/Include/pyerrors.h b/Include/pyerrors.h index 0e7718c..6006ac7 100644 --- a/Include/pyerrors.h +++ b/Include/pyerrors.h @@ -4,6 +4,72 @@ extern "C" { #endif +/* Error objects */ + +typedef struct { + PyObject_HEAD + PyObject *dict; + PyObject *args; + PyObject *message; +} PyBaseExceptionObject; + +typedef struct { + PyObject_HEAD + PyObject *dict; + PyObject *args; + PyObject *message; + PyObject *msg; + PyObject *filename; + PyObject *lineno; + PyObject *offset; + PyObject *text; + PyObject *print_file_and_line; +} PySyntaxErrorObject; + +#ifdef Py_USING_UNICODE +typedef struct { + PyObject_HEAD + PyObject *dict; + PyObject *args; + PyObject *message; + PyObject *encoding; + PyObject *object; + PyObject *start; + PyObject *end; + PyObject *reason; +} PyUnicodeErrorObject; +#endif + +typedef struct { + PyObject_HEAD + PyObject *dict; + PyObject *args; + PyObject *message; + PyObject *code; +} PySystemExitObject; + +typedef struct { + PyObject_HEAD + PyObject *dict; + PyObject *args; + PyObject *message; + PyObject *myerrno; + PyObject *strerror; + PyObject *filename; +} PyEnvironmentErrorObject; + +#ifdef MS_WINDOWS +typedef struct { + PyObject_HEAD + PyObject *dict; + PyObject *args; + PyObject *message; + PyObject *myerrno; + PyObject *strerror; + PyObject *filename; + PyObject *winerror; +} PyWindowsErrorObject; +#endif /* Error handling definitions */ @@ -97,10 +163,9 @@ PyAPI_DATA(PyObject *) PyExc_UserWarning; PyAPI_DATA(PyObject *) PyExc_DeprecationWarning; PyAPI_DATA(PyObject *) PyExc_PendingDeprecationWarning; PyAPI_DATA(PyObject *) PyExc_SyntaxWarning; -/* PyExc_OverflowWarning will go away for Python 2.5 */ -PyAPI_DATA(PyObject *) PyExc_OverflowWarning; PyAPI_DATA(PyObject *) PyExc_RuntimeWarning; PyAPI_DATA(PyObject *) PyExc_FutureWarning; +PyAPI_DATA(PyObject *) PyExc_ImportWarning; /* Convenience functions */ diff --git a/Include/pyport.h b/Include/pyport.h index 2bce415..74ce993 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -137,6 +137,43 @@ typedef Py_intptr_t Py_ssize_t; # endif #endif +/* Py_LOCAL can be used instead of static to get the fastest possible calling + * convention for functions that are local to a given module. + * + * Py_LOCAL_INLINE does the same thing, and also explicitly requests inlining, + * for platforms that support that. + * + * If PY_LOCAL_AGGRESSIVE is defined before python.h is included, more + * "aggressive" inlining/optimizaion is enabled for the entire module. This + * may lead to code bloat, and may slow things down for those reasons. It may + * also lead to errors, if the code relies on pointer aliasing. Use with + * care. + * + * NOTE: You can only use this for functions that are entirely local to a + * module; functions that are exported via method tables, callbacks, etc, + * should keep using static. + */ + +#undef USE_INLINE /* XXX - set via configure? */ + +#if defined(_MSC_VER) +#if defined(PY_LOCAL_AGGRESSIVE) +/* enable more aggressive optimization for visual studio */ +#pragma optimize("agtw", on) +#endif +/* ignore warnings if the compiler decides not to inline a function */ +#pragma warning(disable: 4710) +/* fastest possible local call under MSVC */ +#define Py_LOCAL(type) static type __fastcall +#define Py_LOCAL_INLINE(type) static __inline type __fastcall +#elif defined(USE_INLINE) +#define Py_LOCAL(type) static type +#define Py_LOCAL_INLINE(type) static inline type +#else +#define Py_LOCAL(type) static type +#define Py_LOCAL_INLINE(type) static type +#endif + #include #include /* Moved here from the math section, before extern "C" */ @@ -295,6 +332,15 @@ extern "C" { #define Py_IS_INFINITY(X) ((X) && (X)*0.5 == (X)) #endif +/* Py_IS_FINITE(X) + * Return 1 if float or double arg is neither infinite nor NAN, else 0. + * Some compilers (e.g. VisualStudio) have intrisics for this, so a special + * macro for this particular test is useful + */ +#ifndef Py_IS_FINITE +#define Py_IS_FINITE(X) (!Py_IS_INFINITY(X) && !Py_IS_NAN(X)) +#endif + /* HUGE_VAL is supposed to expand to a positive double infinity. Python * uses Py_HUGE_VAL instead because some platforms are broken in this * respect. We used to embed code in pyport.h to try to worm around that, @@ -685,4 +731,16 @@ typedef struct fd_set { #pragma error_messages (off,E_END_OF_LOOP_CODE_NOT_REACHED) #endif +/* + * Older Microsoft compilers don't support the C99 long long literal suffixes, + * so these will be defined in PC/pyconfig.h for those compilers. + */ +#ifndef Py_LL +#define Py_LL(x) x##LL +#endif + +#ifndef Py_ULL +#define Py_ULL(x) Py_LL(x##U) +#endif + #endif /* Py_PYPORT_H */ diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h index 9012257..0531aed 100644 --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -184,11 +184,13 @@ typedef PY_UNICODE_TYPE Py_UNICODE; # define PyUnicode_GetMax PyUnicodeUCS2_GetMax # define PyUnicode_GetSize PyUnicodeUCS2_GetSize # define PyUnicode_Join PyUnicodeUCS2_Join +# define PyUnicode_Partition PyUnicodeUCS2_Partition +# define PyUnicode_RPartition PyUnicodeUCS2_RPartition +# define PyUnicode_RSplit PyUnicodeUCS2_RSplit # define PyUnicode_Replace PyUnicodeUCS2_Replace # define PyUnicode_Resize PyUnicodeUCS2_Resize # define PyUnicode_SetDefaultEncoding PyUnicodeUCS2_SetDefaultEncoding # define PyUnicode_Split PyUnicodeUCS2_Split -# define PyUnicode_RSplit PyUnicodeUCS2_RSplit # define PyUnicode_Splitlines PyUnicodeUCS2_Splitlines # define PyUnicode_Tailmatch PyUnicodeUCS2_Tailmatch # define PyUnicode_Translate PyUnicodeUCS2_Translate @@ -259,6 +261,9 @@ typedef PY_UNICODE_TYPE Py_UNICODE; # define PyUnicode_GetMax PyUnicodeUCS4_GetMax # define PyUnicode_GetSize PyUnicodeUCS4_GetSize # define PyUnicode_Join PyUnicodeUCS4_Join +# define PyUnicode_Partition PyUnicodeUCS4_Partition +# define PyUnicode_RPartition PyUnicodeUCS4_RPartition +# define PyUnicode_RSplit PyUnicodeUCS4_RSplit # define PyUnicode_Replace PyUnicodeUCS4_Replace # define PyUnicode_Resize PyUnicodeUCS4_Resize # define PyUnicode_SetDefaultEncoding PyUnicodeUCS4_SetDefaultEncoding @@ -352,17 +357,27 @@ typedef PY_UNICODE_TYPE Py_UNICODE; Py_UNICODE_ISDIGIT(ch) || \ Py_UNICODE_ISNUMERIC(ch)) -#define Py_UNICODE_COPY(target, source, length)\ - (memcpy((target), (source), (length)*sizeof(Py_UNICODE))) +/* memcpy has a considerable setup overhead on many platforms; use a + loop for short strings (the "16" below is pretty arbitary) */ +#define Py_UNICODE_COPY(target, source, length) do\ + {Py_ssize_t i_; Py_UNICODE *t_ = (target); const Py_UNICODE *s_ = (source);\ + if (length > 16)\ + memcpy(t_, s_, (length)*sizeof(Py_UNICODE));\ + else\ + for (i_ = 0; i_ < (length); i_++) t_[i_] = s_[i_];\ + } while (0) #define Py_UNICODE_FILL(target, value, length) do\ - {int i; for (i = 0; i < (length); i++) (target)[i] = (value);}\ - while (0) + {Py_ssize_t i_; Py_UNICODE *t_ = (target); Py_UNICODE v_ = (value);\ + for (i_ = 0; i_ < (length); i_++) t_[i_] = v_;\ + } while (0) -#define Py_UNICODE_MATCH(string, offset, substring)\ - ((*((string)->str + (offset)) == *((substring)->str)) &&\ - !memcmp((string)->str + (offset), (substring)->str,\ - (substring)->length*sizeof(Py_UNICODE))) +/* check if substring matches at given offset. the offset must be + valid, and the substring must not be empty */ +#define Py_UNICODE_MATCH(string, offset, substring) \ + ((*((string)->str + (offset)) == *((substring)->str)) && \ + ((*((string)->str + (offset) + (substring)->length-1) == *((substring)->str + (substring)->length-1))) && \ + !memcmp((string)->str + (offset), (substring)->str, (substring)->length*sizeof(Py_UNICODE))) #ifdef __cplusplus extern "C" { @@ -1008,6 +1023,21 @@ PyAPI_FUNC(PyObject*) PyUnicode_Splitlines( int keepends /* If true, line end markers are included */ ); +/* Partition a string using a given separator. */ + +PyAPI_FUNC(PyObject*) PyUnicode_Partition( + PyObject *s, /* String to partition */ + PyObject *sep /* String separator */ + ); + +/* Partition a string using a given separator, searching from the end of the + string. */ + +PyAPI_FUNC(PyObject*) PyUnicode_RPartition( + PyObject *s, /* String to partition */ + PyObject *sep /* String separator */ + ); + /* Split a string giving a list of Unicode strings. If sep is NULL, splitting will be done at all whitespace diff --git a/Lib/UserString.py b/Lib/UserString.py index 94eb66d..473ee88 100755 --- a/Lib/UserString.py +++ b/Lib/UserString.py @@ -102,6 +102,8 @@ class UserString: return self.__class__(self.data.ljust(width, *args)) def lower(self): return self.__class__(self.data.lower()) def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + def partition(self, sep): + return self.data.partition(sep) def replace(self, old, new, maxsplit=-1): return self.__class__(self.data.replace(old, new, maxsplit)) def rfind(self, sub, start=0, end=sys.maxint): @@ -110,6 +112,8 @@ class UserString: return self.data.rindex(sub, start, end) def rjust(self, width, *args): return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars)) def split(self, sep=None, maxsplit=-1): return self.data.split(sep, maxsplit) diff --git a/Lib/_LWPCookieJar.py b/Lib/_LWPCookieJar.py index 6d5ce18..2a4fa7b 100644 --- a/Lib/_LWPCookieJar.py +++ b/Lib/_LWPCookieJar.py @@ -11,10 +11,11 @@ libwww-perl, I hope. """ -import time, re, logging -from cookielib import (reraise_unmasked_exceptions, FileCookieJar, LoadError, - Cookie, MISSING_FILENAME_TEXT, join_header_words, split_header_words, - iso2time, time2isoz) +import time, re +from cookielib import (_warn_unhandled_exception, FileCookieJar, LoadError, + Cookie, MISSING_FILENAME_TEXT, + join_header_words, split_header_words, + iso2time, time2isoz) def lwp_cookie_str(cookie): """Return string representation of Cookie in an the LWP cookie file format. @@ -92,7 +93,8 @@ class LWPCookieJar(FileCookieJar): def _really_load(self, f, filename, ignore_discard, ignore_expires): magic = f.readline() if not re.search(self.magic_re, magic): - msg = "%s does not seem to contain cookies" % filename + msg = ("%r does not look like a Set-Cookie3 (LWP) format " + "file" % filename) raise LoadError(msg) now = time.time() @@ -159,6 +161,10 @@ class LWPCookieJar(FileCookieJar): if not ignore_expires and c.is_expired(now): continue self.set_cookie(c) - except: - reraise_unmasked_exceptions((IOError,)) - raise LoadError("invalid Set-Cookie3 format file %s" % filename) + + except IOError: + raise + except Exception: + _warn_unhandled_exception() + raise LoadError("invalid Set-Cookie3 format file %r: %r" % + (filename, line)) diff --git a/Lib/_MozillaCookieJar.py b/Lib/_MozillaCookieJar.py index 4f2f375..1776b93 100644 --- a/Lib/_MozillaCookieJar.py +++ b/Lib/_MozillaCookieJar.py @@ -1,9 +1,9 @@ """Mozilla / Netscape cookie loading / saving.""" -import re, time, logging +import re, time -from cookielib import (reraise_unmasked_exceptions, FileCookieJar, LoadError, - Cookie, MISSING_FILENAME_TEXT) +from cookielib import (_warn_unhandled_exception, FileCookieJar, LoadError, + Cookie, MISSING_FILENAME_TEXT) class MozillaCookieJar(FileCookieJar): """ @@ -51,7 +51,7 @@ class MozillaCookieJar(FileCookieJar): if not re.search(self.magic_re, magic): f.close() raise LoadError( - "%s does not look like a Netscape format cookies file" % + "%r does not look like a Netscape format cookies file" % filename) try: @@ -104,9 +104,11 @@ class MozillaCookieJar(FileCookieJar): continue self.set_cookie(c) - except: - reraise_unmasked_exceptions((IOError,)) - raise LoadError("invalid Netscape format file %s: %s" % + except IOError: + raise + except Exception: + _warn_unhandled_exception() + raise LoadError("invalid Netscape format cookies file %r: %r" % (filename, line)) def save(self, filename=None, ignore_discard=False, ignore_expires=False): diff --git a/Lib/bdb.py b/Lib/bdb.py index 08b48c3..0c56b63 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -473,7 +473,9 @@ class Breakpoint: def disable(self): self.enabled = 0 - def bpprint(self): + def bpprint(self, out=None): + if out is None: + out = sys.stdout if self.temporary: disp = 'del ' else: @@ -482,17 +484,17 @@ class Breakpoint: disp = disp + 'yes ' else: disp = disp + 'no ' - print '%-4dbreakpoint %s at %s:%d' % (self.number, disp, - self.file, self.line) + print >>out, '%-4dbreakpoint %s at %s:%d' % (self.number, disp, + self.file, self.line) if self.cond: - print '\tstop only if %s' % (self.cond,) + print >>out, '\tstop only if %s' % (self.cond,) if self.ignore: - print '\tignore next %d hits' % (self.ignore) + print >>out, '\tignore next %d hits' % (self.ignore) if (self.hits): if (self.hits > 1): ss = 's' else: ss = '' - print ('\tbreakpoint already hit %d time%s' % - (self.hits, ss)) + print >>out, ('\tbreakpoint already hit %d time%s' % + (self.hits, ss)) # -----------end of Breakpoint class---------- diff --git a/Lib/binhex.py b/Lib/binhex.py index 16985fb..4f3882a 100644 --- a/Lib/binhex.py +++ b/Lib/binhex.py @@ -217,7 +217,11 @@ class BinHex: def _writecrc(self): # XXXX Should this be here?? # self.crc = binascii.crc_hqx('\0\0', self.crc) - self.ofp.write(struct.pack('>h', self.crc)) + if self.crc < 0: + fmt = '>h' + else: + fmt = '>H' + self.ofp.write(struct.pack(fmt, self.crc)) self.crc = 0 def write(self, data): diff --git a/Lib/bsddb/test/test_thread.py b/Lib/bsddb/test/test_thread.py index 44e3e9c..31964f0 100644 --- a/Lib/bsddb/test/test_thread.py +++ b/Lib/bsddb/test/test_thread.py @@ -24,6 +24,12 @@ try: except ImportError: have_threads = False +try: + WindowsError +except NameError: + class WindowsError(Exception): + pass + import unittest from test_all import verbose diff --git a/Lib/calendar.py b/Lib/calendar.py index 7800aae..00948ef 100644 --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -484,9 +484,6 @@ class TimeEncoding: def __init__(self, locale): self.locale = locale - def __context__(self): - return self - def __enter__(self): self.oldlocale = locale.setlocale(locale.LC_TIME, self.locale) return locale.getlocale(locale.LC_TIME)[1] diff --git a/Lib/codeop.py b/Lib/codeop.py index daa7eb8..5616d92 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -95,15 +95,7 @@ def _maybe_compile(compiler, source, filename, symbol): if code: return code - try: - e1 = err1.__dict__ - except AttributeError: - e1 = err1 - try: - e2 = err2.__dict__ - except AttributeError: - e2 = err2 - if not code1 and e1 == e2: + if not code1 and repr(err1) == repr(err2): raise SyntaxError, err1 def _compile(source, filename, symbol): diff --git a/Lib/compiler/pycodegen.py b/Lib/compiler/pycodegen.py index aac2dda..c093128 100644 --- a/Lib/compiler/pycodegen.py +++ b/Lib/compiler/pycodegen.py @@ -831,8 +831,6 @@ class CodeGenerator: self.__with_count += 1 self.set_lineno(node) self.visit(node.expr) - self.emit('LOAD_ATTR', '__context__') - self.emit('CALL_FUNCTION', 0) self.emit('DUP_TOP') self.emit('LOAD_ATTR', '__exit__') self._implicitNameOp('STORE', exitvar) diff --git a/Lib/compiler/transformer.py b/Lib/compiler/transformer.py index 604c57a..96bcce3 100644 --- a/Lib/compiler/transformer.py +++ b/Lib/compiler/transformer.py @@ -841,17 +841,15 @@ class Transformer: names.append(self.com_fpdef(node)) i = i + 1 - if i >= len(nodelist): - break - - if nodelist[i][0] == token.EQUAL: + if i < len(nodelist) and nodelist[i][0] == token.EQUAL: defaults.append(self.com_node(nodelist[i + 1])) i = i + 2 elif len(defaults): - # XXX This should be a syntax error. - # Treat "(a=1, b)" as "(a=1, b=None)" - defaults.append(Const(None)) + # we have already seen an argument with default, but here + # came one without + raise SyntaxError, "non-default argument follows default argument" + # skip the comma i = i + 1 return names, defaults, flags diff --git a/Lib/contextlib.py b/Lib/contextlib.py index aa5335d..a807c42 100644 --- a/Lib/contextlib.py +++ b/Lib/contextlib.py @@ -10,9 +10,6 @@ class GeneratorContextManager(object): def __init__(self, gen): self.gen = gen - def __context__(self): - return self - def __enter__(self): try: return self.gen.next() @@ -88,7 +85,7 @@ def contextmanager(func): @contextmanager -def nested(*contexts): +def nested(*managers): """Support multiple context managers in a single with-statement. Code like this: @@ -109,8 +106,7 @@ def nested(*contexts): exc = (None, None, None) try: try: - for context in contexts: - mgr = context.__context__() + for mgr in managers: exit = mgr.__exit__ enter = mgr.__enter__ vars.append(enter()) @@ -127,12 +123,14 @@ def nested(*contexts): except: exc = sys.exc_info() if exc != (None, None, None): - raise + # Don't rely on sys.exc_info() still containing + # the right information. Another exception may + # have been raised and caught by an exit method + raise exc[0], exc[1], exc[2] -@contextmanager -def closing(thing): - """Context manager to automatically close something at the end of a block. +class closing(object): + """Context to automatically close something at the end of a block. Code like this: @@ -148,7 +146,9 @@ def closing(thing): f.close() """ - try: - yield thing - finally: - thing.close() + def __init__(self, thing): + self.thing = thing + def __enter__(self): + return self.thing + def __exit__(self, *exc_info): + self.thing.close() diff --git a/Lib/cookielib.py b/Lib/cookielib.py index f0a89a5..e8fee0e 100644 --- a/Lib/cookielib.py +++ b/Lib/cookielib.py @@ -7,9 +7,9 @@ Docstrings, comments and debug strings in this code refer to the attributes of the HTTP cookie system as cookie-attributes, to distinguish them clearly from Python attributes. -Class diagram (note that the classes which do not derive from -FileCookieJar are not distributed with the Python standard library, but -are available from http://wwwsearch.sf.net/): +Class diagram (note that BSDDBCookieJar and the MSIE* classes are not +distributed with the Python standard library, but are available from +http://wwwsearch.sf.net/): CookieJar____ / \ \ @@ -25,7 +25,10 @@ are available from http://wwwsearch.sf.net/): """ -import sys, re, urlparse, copy, time, urllib, logging +__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy', + 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar'] + +import re, urlparse, copy, time, urllib try: import threading as _threading except ImportError: @@ -33,21 +36,27 @@ except ImportError: import httplib # only for the default HTTP port from calendar import timegm -debug = logging.getLogger("cookielib").debug +debug = False # set to True to enable debugging via the logging module +logger = None + +def _debug(*args): + if not debug: + return + global logger + if not logger: + import logging + logger = logging.getLogger("cookielib") + return logger.debug(*args) + DEFAULT_HTTP_PORT = str(httplib.HTTP_PORT) MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar " "instance initialised with one)") -def reraise_unmasked_exceptions(unmasked=()): +def _warn_unhandled_exception(): # There are a few catch-all except: statements in this module, for - # catching input that's bad in unexpected ways. - # This function re-raises some exceptions we don't want to trap. - unmasked = unmasked + (KeyboardInterrupt, SystemExit, MemoryError) - etype = sys.exc_info()[0] - if issubclass(etype, unmasked): - raise - # swallowed an exception + # catching input that's bad in unexpected ways. Warn if any + # exceptions are caught there. import warnings, traceback, StringIO f = StringIO.StringIO() traceback.print_exc(None, f) @@ -613,7 +622,7 @@ def request_port(request): try: int(port) except ValueError: - debug("nonnumeric port: '%s'", port) + _debug("nonnumeric port: '%s'", port) return None else: port = DEFAULT_HTTP_PORT @@ -904,7 +913,7 @@ class DefaultCookiePolicy(CookiePolicy): strict about which cookies to accept). """ - debug(" - checking cookie %s=%s", cookie.name, cookie.value) + _debug(" - checking cookie %s=%s", cookie.name, cookie.value) assert cookie.name is not None @@ -920,25 +929,25 @@ class DefaultCookiePolicy(CookiePolicy): if cookie.version is None: # Version is always set to 0 by parse_ns_headers if it's a Netscape # cookie, so this must be an invalid RFC 2965 cookie. - debug(" Set-Cookie2 without version attribute (%s=%s)", - cookie.name, cookie.value) + _debug(" Set-Cookie2 without version attribute (%s=%s)", + cookie.name, cookie.value) return False if cookie.version > 0 and not self.rfc2965: - debug(" RFC 2965 cookies are switched off") + _debug(" RFC 2965 cookies are switched off") return False elif cookie.version == 0 and not self.netscape: - debug(" Netscape cookies are switched off") + _debug(" Netscape cookies are switched off") return False return True def set_ok_verifiability(self, cookie, request): if request.is_unverifiable() and is_third_party(request): if cookie.version > 0 and self.strict_rfc2965_unverifiable: - debug(" third-party RFC 2965 cookie during " + _debug(" third-party RFC 2965 cookie during " "unverifiable transaction") return False elif cookie.version == 0 and self.strict_ns_unverifiable: - debug(" third-party Netscape cookie during " + _debug(" third-party Netscape cookie during " "unverifiable transaction") return False return True @@ -948,7 +957,7 @@ class DefaultCookiePolicy(CookiePolicy): # servers that know both V0 and V1 protocols. if (cookie.version == 0 and self.strict_ns_set_initial_dollar and cookie.name.startswith("$")): - debug(" illegal name (starts with '$'): '%s'", cookie.name) + _debug(" illegal name (starts with '$'): '%s'", cookie.name) return False return True @@ -958,33 +967,36 @@ class DefaultCookiePolicy(CookiePolicy): if ((cookie.version > 0 or (cookie.version == 0 and self.strict_ns_set_path)) and not req_path.startswith(cookie.path)): - debug(" path attribute %s is not a prefix of request " - "path %s", cookie.path, req_path) + _debug(" path attribute %s is not a prefix of request " + "path %s", cookie.path, req_path) return False return True def set_ok_domain(self, cookie, request): if self.is_blocked(cookie.domain): - debug(" domain %s is in user block-list", cookie.domain) + _debug(" domain %s is in user block-list", cookie.domain) return False if self.is_not_allowed(cookie.domain): - debug(" domain %s is not in user allow-list", cookie.domain) + _debug(" domain %s is not in user allow-list", cookie.domain) return False if cookie.domain_specified: req_host, erhn = eff_request_host(request) domain = cookie.domain if self.strict_domain and (domain.count(".") >= 2): + # XXX This should probably be compared with the Konqueror + # (kcookiejar.cpp) and Mozilla implementations, but it's a + # losing battle. i = domain.rfind(".") j = domain.rfind(".", 0, i) if j == 0: # domain like .foo.bar tld = domain[i+1:] sld = domain[j+1:i] - if (sld.lower() in ( - "co", "ac", - "com", "edu", "org", "net", "gov", "mil", "int") and - len(tld) == 2): + if sld.lower() in ("co", "ac", "com", "edu", "org", "net", + "gov", "mil", "int", "aero", "biz", "cat", "coop", + "info", "jobs", "mobi", "museum", "name", "pro", + "travel", "eu") and len(tld) == 2: # domain like .co.uk - debug(" country-code second level domain %s", domain) + _debug(" country-code second level domain %s", domain) return False if domain.startswith("."): undotted_domain = domain[1:] @@ -992,30 +1004,30 @@ class DefaultCookiePolicy(CookiePolicy): undotted_domain = domain embedded_dots = (undotted_domain.find(".") >= 0) if not embedded_dots and domain != ".local": - debug(" non-local domain %s contains no embedded dot", - domain) + _debug(" non-local domain %s contains no embedded dot", + domain) return False if cookie.version == 0: if (not erhn.endswith(domain) and (not erhn.startswith(".") and not ("."+erhn).endswith(domain))): - debug(" effective request-host %s (even with added " - "initial dot) does not end end with %s", - erhn, domain) + _debug(" effective request-host %s (even with added " + "initial dot) does not end end with %s", + erhn, domain) return False if (cookie.version > 0 or (self.strict_ns_domain & self.DomainRFC2965Match)): if not domain_match(erhn, domain): - debug(" effective request-host %s does not domain-match " - "%s", erhn, domain) + _debug(" effective request-host %s does not domain-match " + "%s", erhn, domain) return False if (cookie.version > 0 or (self.strict_ns_domain & self.DomainStrictNoDots)): host_prefix = req_host[:-len(domain)] if (host_prefix.find(".") >= 0 and not IPV4_RE.search(req_host)): - debug(" host prefix %s for domain %s contains a dot", - host_prefix, domain) + _debug(" host prefix %s for domain %s contains a dot", + host_prefix, domain) return False return True @@ -1030,13 +1042,13 @@ class DefaultCookiePolicy(CookiePolicy): try: int(p) except ValueError: - debug(" bad port %s (not numeric)", p) + _debug(" bad port %s (not numeric)", p) return False if p == req_port: break else: - debug(" request port (%s) not found in %s", - req_port, cookie.port) + _debug(" request port (%s) not found in %s", + req_port, cookie.port) return False return True @@ -1049,7 +1061,7 @@ class DefaultCookiePolicy(CookiePolicy): """ # Path has already been checked by .path_return_ok(), and domain # blocking done by .domain_return_ok(). - debug(" - checking cookie %s=%s", cookie.name, cookie.value) + _debug(" - checking cookie %s=%s", cookie.name, cookie.value) for n in "version", "verifiability", "secure", "expires", "port", "domain": fn_name = "return_ok_"+n @@ -1060,34 +1072,34 @@ class DefaultCookiePolicy(CookiePolicy): def return_ok_version(self, cookie, request): if cookie.version > 0 and not self.rfc2965: - debug(" RFC 2965 cookies are switched off") + _debug(" RFC 2965 cookies are switched off") return False elif cookie.version == 0 and not self.netscape: - debug(" Netscape cookies are switched off") + _debug(" Netscape cookies are switched off") return False return True def return_ok_verifiability(self, cookie, request): if request.is_unverifiable() and is_third_party(request): if cookie.version > 0 and self.strict_rfc2965_unverifiable: - debug(" third-party RFC 2965 cookie during unverifiable " - "transaction") + _debug(" third-party RFC 2965 cookie during unverifiable " + "transaction") return False elif cookie.version == 0 and self.strict_ns_unverifiable: - debug(" third-party Netscape cookie during unverifiable " - "transaction") + _debug(" third-party Netscape cookie during unverifiable " + "transaction") return False return True def return_ok_secure(self, cookie, request): if cookie.secure and request.get_type() != "https": - debug(" secure cookie with non-secure request") + _debug(" secure cookie with non-secure request") return False return True def return_ok_expires(self, cookie, request): if cookie.is_expired(self._now): - debug(" cookie expired") + _debug(" cookie expired") return False return True @@ -1100,8 +1112,8 @@ class DefaultCookiePolicy(CookiePolicy): if p == req_port: break else: - debug(" request port %s does not match cookie port %s", - req_port, cookie.port) + _debug(" request port %s does not match cookie port %s", + req_port, cookie.port) return False return True @@ -1113,17 +1125,17 @@ class DefaultCookiePolicy(CookiePolicy): if (cookie.version == 0 and (self.strict_ns_domain & self.DomainStrictNonDomain) and not cookie.domain_specified and domain != erhn): - debug(" cookie with unspecified domain does not string-compare " - "equal to request domain") + _debug(" cookie with unspecified domain does not string-compare " + "equal to request domain") return False if cookie.version > 0 and not domain_match(erhn, domain): - debug(" effective request-host name %s does not domain-match " - "RFC 2965 cookie domain %s", erhn, domain) + _debug(" effective request-host name %s does not domain-match " + "RFC 2965 cookie domain %s", erhn, domain) return False if cookie.version == 0 and not ("."+erhn).endswith(domain): - debug(" request-host %s does not match Netscape cookie domain " - "%s", req_host, domain) + _debug(" request-host %s does not match Netscape cookie domain " + "%s", req_host, domain) return False return True @@ -1136,24 +1148,24 @@ class DefaultCookiePolicy(CookiePolicy): if not erhn.startswith("."): erhn = "."+erhn if not (req_host.endswith(domain) or erhn.endswith(domain)): - #debug(" request domain %s does not match cookie domain %s", - # req_host, domain) + #_debug(" request domain %s does not match cookie domain %s", + # req_host, domain) return False if self.is_blocked(domain): - debug(" domain %s is in user block-list", domain) + _debug(" domain %s is in user block-list", domain) return False if self.is_not_allowed(domain): - debug(" domain %s is not in user allow-list", domain) + _debug(" domain %s is not in user allow-list", domain) return False return True def path_return_ok(self, path, request): - debug("- checking cookie path=%s", path) + _debug("- checking cookie path=%s", path) req_path = request_path(request) if not req_path.startswith(path): - debug(" %s does not path-match %s", req_path, path) + _debug(" %s does not path-match %s", req_path, path) return False return True @@ -1215,7 +1227,7 @@ class CookieJar: cookies = [] if not self._policy.domain_return_ok(domain, request): return [] - debug("Checking %s for cookies to return", domain) + _debug("Checking %s for cookies to return", domain) cookies_by_path = self._cookies[domain] for path in cookies_by_path.keys(): if not self._policy.path_return_ok(path, request): @@ -1223,9 +1235,9 @@ class CookieJar: cookies_by_name = cookies_by_path[path] for cookie in cookies_by_name.values(): if not self._policy.return_ok(cookie, request): - debug(" not returning cookie") + _debug(" not returning cookie") continue - debug(" it's a match") + _debug(" it's a match") cookies.append(cookie) return cookies @@ -1302,7 +1314,7 @@ class CookieJar: The Cookie2 header is also added unless policy.hide_cookie2 is true. """ - debug("add_cookie_header") + _debug("add_cookie_header") self._cookies_lock.acquire() self._policy._now = self._now = int(time.time()) @@ -1379,7 +1391,7 @@ class CookieJar: continue if k == "domain": if v is None: - debug(" missing value for domain attribute") + _debug(" missing value for domain attribute") bad_cookie = True break # RFC 2965 section 3.3.3 @@ -1389,7 +1401,7 @@ class CookieJar: # Prefer max-age to expires (like Mozilla) continue if v is None: - debug(" missing or invalid value for expires " + _debug(" missing or invalid value for expires " "attribute: treating as session cookie") continue if k == "max-age": @@ -1397,7 +1409,7 @@ class CookieJar: try: v = int(v) except ValueError: - debug(" missing or invalid (non-numeric) value for " + _debug(" missing or invalid (non-numeric) value for " "max-age attribute") bad_cookie = True break @@ -1410,7 +1422,7 @@ class CookieJar: if (k in value_attrs) or (k in boolean_attrs): if (v is None and k not in ("port", "comment", "commenturl")): - debug(" missing value for %s attribute" % k) + _debug(" missing value for %s attribute" % k) bad_cookie = True break standard[k] = v @@ -1496,8 +1508,8 @@ class CookieJar: self.clear(domain, path, name) except KeyError: pass - debug("Expiring cookie, domain='%s', path='%s', name='%s'", - domain, path, name) + _debug("Expiring cookie, domain='%s', path='%s', name='%s'", + domain, path, name) return None return Cookie(version, @@ -1552,8 +1564,8 @@ class CookieJar: try: cookies = self._cookies_from_attrs_set( split_header_words(rfc2965_hdrs), request) - except: - reraise_unmasked_exceptions() + except Exception: + _warn_unhandled_exception() cookies = [] if ns_hdrs and netscape: @@ -1561,8 +1573,8 @@ class CookieJar: # RFC 2109 and Netscape cookies ns_cookies = self._cookies_from_attrs_set( parse_ns_headers(ns_hdrs), request) - except: - reraise_unmasked_exceptions() + except Exception: + _warn_unhandled_exception() ns_cookies = [] self._process_rfc2109_cookies(ns_cookies) @@ -1612,13 +1624,13 @@ class CookieJar: def extract_cookies(self, response, request): """Extract cookies from response, where allowable given the request.""" - debug("extract_cookies: %s", response.info()) + _debug("extract_cookies: %s", response.info()) self._cookies_lock.acquire() self._policy._now = self._now = int(time.time()) for cookie in self.make_cookies(response, request): if self._policy.set_ok(cookie, request): - debug(" setting cookie: %s", cookie) + _debug(" setting cookie: %s", cookie) self.set_cookie(cookie) self._cookies_lock.release() diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index 28ac180..f2ddbaa 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -3,7 +3,7 @@ import os as _os, sys as _sys from itertools import chain as _chain -__version__ = "0.9.9.4" +__version__ = "0.9.9.6" from _ctypes import Union, Structure, Array from _ctypes import _Pointer @@ -23,8 +23,6 @@ if _os.name in ("nt", "ce"): from _ctypes import FUNCFLAG_CDECL as _FUNCFLAG_CDECL, \ FUNCFLAG_PYTHONAPI as _FUNCFLAG_PYTHONAPI -from ctypes._loader import LibraryLoader - """ WINOLEAPI -> HRESULT WINOLEAPI_(type) @@ -72,9 +70,11 @@ def CFUNCTYPE(restype, *argtypes): The function prototype can be called in three ways to create a callable object: - prototype(funct) - returns a C callable function calling funct - prototype(vtbl_index, method_name[, paramflags]) - a Python callable that calls a COM method - prototype(funct_name, dll[, paramflags]) - a Python callable that calls an exported function in a dll + prototype(integer address) -> foreign function + prototype(callable) -> create and return a C callable function from callable + prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method + prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal + prototype((function name, dll object)[, paramflags]) -> foreign function exported by name """ try: return _c_functype_cache[(restype, argtypes)] @@ -352,6 +352,23 @@ if _os.name in ("nt", "ce"): _flags_ = _FUNCFLAG_STDCALL _restype_ = HRESULT +class LibraryLoader(object): + def __init__(self, dlltype): + self._dlltype = dlltype + + def __getattr__(self, name): + if name[0] == '_': + raise AttributeError(name) + dll = self._dlltype(name) + setattr(self, name, dll) + return dll + + def __getitem__(self, name): + return getattr(self, name) + + def LoadLibrary(self, name): + return self._dlltype(name) + cdll = LibraryLoader(CDLL) pydll = LibraryLoader(PyDLL) @@ -402,7 +419,12 @@ def PYFUNCTYPE(restype, *argtypes): _restype_ = restype _flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI return CFunctionType -cast = PYFUNCTYPE(py_object, c_void_p, py_object)(_cast_addr) +_cast = PYFUNCTYPE(py_object, c_void_p, py_object)(_cast_addr) + +def cast(obj, typ): + result = _cast(obj, typ) + result.__keepref = obj + return result _string_at = CFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr) def string_at(ptr, size=0): diff --git a/Lib/ctypes/_loader.py b/Lib/ctypes/_loader.py deleted file mode 100644 index 7a48c1c..0000000 --- a/Lib/ctypes/_loader.py +++ /dev/null @@ -1,262 +0,0 @@ -import sys, os -import ctypes - -if os.name in ("nt", "ce"): - from _ctypes import LoadLibrary as dlopen -else: - from _ctypes import dlopen -from _ctypes import RTLD_LOCAL, RTLD_GLOBAL - -# _findLib(name) returns an iterable of possible names for a library. -if os.name in ("nt", "ce"): - def _findLib(name): - return [name] - -if os.name == "posix" and sys.platform == "darwin": - from ctypes.macholib.dyld import dyld_find as _dyld_find - def _findLib(name): - possible = ['lib%s.dylib' % name, - '%s.dylib' % name, - '%s.framework/%s' % (name, name)] - for name in possible: - try: - return [_dyld_find(name)] - except ValueError: - continue - return [] - -elif os.name == "posix": - # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump - import re, tempfile - - def _findLib_gcc(name): - expr = '[^\(\)\s]*lib%s\.[^\(\)\s]*' % name - cmd = 'if type gcc &>/dev/null; then CC=gcc; else CC=cc; fi;' \ - '$CC -Wl,-t -o /dev/null 2>&1 -l' + name - try: - fdout, outfile = tempfile.mkstemp() - fd = os.popen(cmd) - trace = fd.read() - err = fd.close() - finally: - try: - os.unlink(outfile) - except OSError, e: - if e.errno != errno.ENOENT: - raise - res = re.search(expr, trace) - if not res: - return None - return res.group(0) - - def _findLib_ld(name): - expr = '/[^\(\)\s]*lib%s\.[^\(\)\s]*' % name - res = re.search(expr, os.popen('/sbin/ldconfig -p 2>/dev/null').read()) - if not res: - cmd = 'ldd %s 2>/dev/null' % sys.executable - res = re.search(expr, os.popen(cmd).read()) - if not res: - return None - return res.group(0) - - def _get_soname(f): - cmd = "objdump -p -j .dynamic 2>/dev/null " + f - res = re.search(r'\sSONAME\s+([^\s]+)', os.popen(cmd).read()) - if not res: - return f - return res.group(1) - - def _findLib(name): - lib = _findLib_ld(name) - if not lib: - lib = _findLib_gcc(name) - if not lib: - return [name] - return [_get_soname(lib)] - -class LibraryLoader(object): - """Loader for shared libraries. - - Shared libraries are accessed when compiling/linking a program, - and when the program is run. The purpose of the 'find' method is - to locate a library similar to what the compiler does (on machines - with several versions of a shared library the most recent should - be loaded), while 'load' acts like when the program is run, and - uses the runtime loader directly. 'load_version' works like - 'load' but tries to be platform independend (for cases where this - makes sense). Loading via attribute access is a shorthand - notation especially useful for interactive use.""" - - - def __init__(self, dlltype, mode=RTLD_LOCAL): - """Create a library loader instance which loads libraries by - creating an instance of 'dlltype'. 'mode' can be RTLD_LOCAL - or RTLD_GLOBAL, it is ignored on Windows. - """ - self._dlltype = dlltype - self._mode = mode - - def load(self, libname, mode=None): - """Load and return the library with the given libname. On - most systems 'libname' is the filename of the shared library; - when it's not a pathname it will be searched in a system - dependend list of locations (on many systems additional search - paths can be specified by an environment variable). Sometimes - the extension (like '.dll' on Windows) can be omitted. - - 'mode' allows to override the default flags specified in the - constructor, it is ignored on Windows. - """ - if mode is None: - mode = self._mode - return self._load(libname, mode) - - def load_library(self, libname, mode=None): - """Load and return the library with the given libname. This - method passes the specified 'libname' directly to the - platform's library loading function (dlopen, or LoadLibrary). - - 'mode' allows to override the default flags specified in the - constructor, it is ignored on Windows. - """ - if mode is None: - mode = self._mode - return self._dlltype(libname, mode) - - # alias name for backwards compatiblity - LoadLibrary = load_library - - # Helpers for load and load_version - assembles a filename from name and filename - if os.name in ("nt", "ce"): - # Windows (XXX what about cygwin?) - def _plat_load_version(self, name, version, mode): - # not sure if this makes sense - if version is not None: - return self.load(name + version, mode) - return self.load(name, mode) - - _load = load_library - - elif os.name == "posix" and sys.platform == "darwin": - # Mac OS X - def _plat_load_version(self, name, version, mode): - if version: - return self.load("lib%s.%s.dylib" % (name, version), mode) - return self.load("lib%s.dylib" % name, mode) - - def _load(self, libname, mode): - # _dyld_find raises ValueError, convert this into OSError - try: - pathname = _dyld_find(libname) - except ValueError: - raise OSError("Library %s could not be found" % libname) - return self.load_library(pathname, mode) - - elif os.name == "posix": - # Posix - def _plat_load_version(self, name, version, mode): - if version: - return self.load("lib%s.so.%s" % (name, version), mode) - return self.load("lib%s.so" % name, mode) - - _load = load_library - - else: - # Others, TBD - def _plat_load_version(self, name, version, mode=None): - return self.load(name, mode) - - _load = load_library - - def load_version(self, name, version=None, mode=None): - """Build a (system dependend) filename from 'name' and - 'version', then load and return it. 'name' is the library - name without any prefix like 'lib' and suffix like '.so' or - '.dylib'. This method should be used if a library is - available on different platforms, using the particular naming - convention of each platform. - - 'mode' allows to override the default flags specified in the - constructor, it is ignored on Windows. - """ - return self._plat_load_version(name, version, mode) - - def find(self, name, mode=None): - """Try to find a library, load and return it. 'name' is the - library name without any prefix like 'lib', suffix like '.so', - '.dylib' or version number (this is the form used for the - posix linker option '-l'). - - 'mode' allows to override the default flags specified in the - constructor, it is ignored on Windows. - - On windows, this method does the same as the 'load' method. - - On other platforms, this function might call other programs - like the compiler to find the library. When using ctypes to - write a shared library wrapping, consider using .load() or - .load_version() instead. - """ - for libname in _findLib(name): - try: - return self.load(libname, mode) - except OSError: - continue - raise OSError("Library %r not found" % name) - - def __getattr__(self, name): - """Load a library via attribute access. Calls - .load_version(). The result is cached.""" - if name.startswith("_"): - raise AttributeError(name) - dll = self.load_version(name) - setattr(self, name, dll) - return dll - -################################################################ -# test code - -class CDLL(object): - def __init__(self, name, mode): - self._handle = dlopen(name, mode) - self._name = name - - def __repr__(self): - return "<%s '%s', handle %x at %x>" % \ - (self.__class__.__name__, self._name, - (self._handle & (sys.maxint*2 + 1)), - id(self)) - -cdll = LibraryLoader(CDLL) - -def test(): - if os.name == "nt": - print cdll.msvcrt - print cdll.load("msvcrt") - # load_version looks more like an artefact: - print cdll.load_version("msvcr", "t") - print cdll.find("msvcrt") - - if os.name == "posix": - # find and load_version - print cdll.find("m") - print cdll.find("c") - print cdll.load_version("crypto", "0.9.7") - - # getattr - print cdll.m - print cdll.bz2 - - # load - if sys.platform == "darwin": - print cdll.load("libm.dylib") - print cdll.load("libcrypto.dylib") - print cdll.load("libSystem.dylib") - print cdll.load("System.framework/System") - else: - print cdll.load("libm.so") - print cdll.load("libcrypt.so") - print cdll.find("crypt") - -if __name__ == "__main__": - test() diff --git a/Lib/ctypes/test/test_bitfields.py b/Lib/ctypes/test/test_bitfields.py index 54ea839..92c4669 100644 --- a/Lib/ctypes/test/test_bitfields.py +++ b/Lib/ctypes/test/test_bitfields.py @@ -24,7 +24,7 @@ class BITS(Structure): ("R", c_short, 6), ("S", c_short, 7)] -func = cdll.load(_ctypes_test.__file__).unpack_bitfields +func = CDLL(_ctypes_test.__file__).unpack_bitfields func.argtypes = POINTER(BITS), c_char ##for n in "ABCDEFGHIMNOPQRS": diff --git a/Lib/ctypes/test/test_byteswap.py b/Lib/ctypes/test/test_byteswap.py index d0ada40..1f68992 100644 --- a/Lib/ctypes/test/test_byteswap.py +++ b/Lib/ctypes/test/test_byteswap.py @@ -15,7 +15,7 @@ def bin(s): class Test(unittest.TestCase): def X_test(self): - print sys.byteorder + print >> sys.stderr, sys.byteorder for i in range(32): bits = BITS() setattr(bits, "i%s" % i, 1) diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py index a6ee150..9d96a54 100644 --- a/Lib/ctypes/test/test_callbacks.py +++ b/Lib/ctypes/test/test_callbacks.py @@ -115,7 +115,7 @@ class SampleCallbacksTestCase(unittest.TestCase): def test_integrate(self): # Derived from some then non-working code, posted by David Foster - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) # The function prototype called by 'integrate': double func(double); CALLBACK = CFUNCTYPE(c_double, c_double) diff --git a/Lib/ctypes/test/test_cast.py b/Lib/ctypes/test/test_cast.py index 6f25feb..821ce3f 100644 --- a/Lib/ctypes/test/test_cast.py +++ b/Lib/ctypes/test/test_cast.py @@ -23,33 +23,24 @@ class Test(unittest.TestCase): def test_address2pointer(self): array = (c_int * 3)(42, 17, 2) - # on AMD64, sizeof(int) == 4 and sizeof(void *) == 8. - # By default, cast would convert a Python int (or long) into - # a C int, which would be too short to represent a pointer - # on this platform. - - # So we have to wrap the address into a c_void_p for this to work. - # - # XXX Better would be to hide the differences in the cast function. address = addressof(array) ptr = cast(c_void_p(address), POINTER(c_int)) self.failUnlessEqual([ptr[i] for i in range(3)], [42, 17, 2]) + ptr = cast(address, POINTER(c_int)) + self.failUnlessEqual([ptr[i] for i in range(3)], [42, 17, 2]) + def test_ptr2array(self): array = (c_int * 3)(42, 17, 2) -## # Hm, already tested above. -## ptr = cast(array, POINTER(c_int)) -## self.failUnlessEqual([ptr[i] for i in range(3)], [42, 17, 2]) + from sys import getrefcount -# print cast(addressof(array), c_int * 3)[:] -## ptr = cast(addressof(ptr) - -## print ptr[0], ptr[1], ptr[2] -## ptr = POINTER(c_int).from_address(addressof(array)) -## # XXX this crashes: -## print ptr[0], ptr[1], ptr[2] + before = getrefcount(array) + ptr = cast(array, POINTER(c_int)) + self.failUnlessEqual(getrefcount(array), before + 1) + del ptr + self.failUnlessEqual(getrefcount(array), before) if __name__ == "__main__": unittest.main() diff --git a/Lib/ctypes/test/test_cfuncs.py b/Lib/ctypes/test/test_cfuncs.py index 6e0798d..9d8db1f 100644 --- a/Lib/ctypes/test/test_cfuncs.py +++ b/Lib/ctypes/test/test_cfuncs.py @@ -7,7 +7,7 @@ from ctypes import * import _ctypes_test class CFunctions(unittest.TestCase): - _dll = cdll.load(_ctypes_test.__file__) + _dll = CDLL(_ctypes_test.__file__) def S(self): return c_longlong.in_dll(self._dll, "last_tf_arg_s").value diff --git a/Lib/ctypes/test/test_checkretval.py b/Lib/ctypes/test/test_checkretval.py index 344d0bc..e055c49 100644 --- a/Lib/ctypes/test/test_checkretval.py +++ b/Lib/ctypes/test/test_checkretval.py @@ -14,7 +14,7 @@ class Test(unittest.TestCase): def test_checkretval(self): import _ctypes_test - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) self.failUnlessEqual(42, dll._testfunc_p_p(42)) dll._testfunc_p_p.restype = CHECKED diff --git a/Lib/ctypes/test/test_find.py b/Lib/ctypes/test/test_find.py new file mode 100644 index 0000000..810467f --- /dev/null +++ b/Lib/ctypes/test/test_find.py @@ -0,0 +1,104 @@ +import unittest +import os, sys +from ctypes import * +from ctypes.util import find_library +from ctypes.test import is_resource_enabled + +if sys.platform == "win32": + lib_gl = find_library("OpenGL32") + lib_glu = find_library("Glu32") + lib_glut = find_library("glut32") + lib_gle = None +elif sys.platform == "darwin": + lib_gl = lib_glu = find_library("OpenGL") + lib_glut = find_library("GLUT") + lib_gle = None +else: + lib_gl = find_library("GL") + lib_glu = find_library("GLU") + lib_glut = find_library("glut") + lib_gle = find_library("gle") + +## print, for debugging +if is_resource_enabled("printing"): + if lib_gl or lib_glu or lib_glut or lib_gle: + print "OpenGL libraries:" + for item in (("GL", lib_gl), + ("GLU", lib_glu), + ("glut", lib_glut), + ("gle", lib_gle)): + print "\t", item + + +# On some systems, loading the OpenGL libraries needs the RTLD_GLOBAL mode. +class Test_OpenGL_libs(unittest.TestCase): + def setUp(self): + self.gl = self.glu = self.gle = self.glut = None + if lib_gl: + self.gl = CDLL(lib_gl, mode=RTLD_GLOBAL) + if lib_glu: + self.glu = CDLL(lib_glu, RTLD_GLOBAL) + if lib_glut: + # On some systems, additional libraries seem to be + # required, loading glut fails with + # "OSError: /usr/lib/libglut.so.3: undefined symbol: XGetExtensionVersion" + # I cannot figure out how to repair the test on these + # systems (red hat), so we ignore it when the glut or gle + # libraries cannot be loaded. See also: + # https://sourceforge.net/tracker/?func=detail&atid=105470&aid=1478253&group_id=5470 + # http://mail.python.org/pipermail/python-dev/2006-May/064789.html + try: + self.glut = CDLL(lib_glut) + except OSError: + pass + if lib_gle: + try: + self.gle = CDLL(lib_gle) + except OSError: + pass + + if lib_gl: + def test_gl(self): + if self.gl: + self.gl.glClearIndex + + if lib_glu: + def test_glu(self): + if self.glu: + self.glu.gluBeginCurve + + if lib_glut: + def test_glut(self): + if self.glut: + self.glut.glutWireTetrahedron + + if lib_gle: + def test_gle(self): + if self.gle: + self.gle.gleGetJoinStyle + +##if os.name == "posix" and sys.platform != "darwin": + +## # On platforms where the default shared library suffix is '.so', +## # at least some libraries can be loaded as attributes of the cdll +## # object, since ctypes now tries loading the lib again +## # with '.so' appended of the first try fails. +## # +## # Won't work for libc, unfortunately. OTOH, it isn't +## # needed for libc since this is already mapped into the current +## # process (?) +## # +## # On MAC OSX, it won't work either, because dlopen() needs a full path, +## # and the default suffix is either none or '.dylib'. + +## class LoadLibs(unittest.TestCase): +## def test_libm(self): +## import math +## libm = cdll.libm +## sqrt = libm.sqrt +## sqrt.argtypes = (c_double,) +## sqrt.restype = c_double +## self.failUnlessEqual(sqrt(2), math.sqrt(2)) + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/ctypes/test/test_funcptr.py b/Lib/ctypes/test/test_funcptr.py index 89b93c4..7ea873f 100644 --- a/Lib/ctypes/test/test_funcptr.py +++ b/Lib/ctypes/test/test_funcptr.py @@ -8,7 +8,7 @@ except NameError: WINFUNCTYPE = CFUNCTYPE import _ctypes_test -lib = cdll.load(_ctypes_test.__file__) +lib = CDLL(_ctypes_test.__file__) class CFuncPtrTestCase(unittest.TestCase): def test_basic(self): diff --git a/Lib/ctypes/test/test_functions.py b/Lib/ctypes/test/test_functions.py index ada9def..bfa0cad 100644 --- a/Lib/ctypes/test/test_functions.py +++ b/Lib/ctypes/test/test_functions.py @@ -15,9 +15,9 @@ except NameError: WINFUNCTYPE = CFUNCTYPE import _ctypes_test -dll = cdll.load(_ctypes_test.__file__) +dll = CDLL(_ctypes_test.__file__) if sys.platform == "win32": - windll = windll.load(_ctypes_test.__file__) + windll = WinDLL(_ctypes_test.__file__) class POINT(Structure): _fields_ = [("x", c_int), ("y", c_int)] diff --git a/Lib/ctypes/test/test_libc.py b/Lib/ctypes/test/test_libc.py index 8fd2789..c39f350 100644 --- a/Lib/ctypes/test/test_libc.py +++ b/Lib/ctypes/test/test_libc.py @@ -4,7 +4,7 @@ import unittest from ctypes import * import _ctypes_test -lib = cdll.load(_ctypes_test.__file__) +lib = CDLL(_ctypes_test.__file__) class LibTest(unittest.TestCase): def test_sqrt(self): diff --git a/Lib/ctypes/test/test_loading.py b/Lib/ctypes/test/test_loading.py index 4558417..45585ae 100644 --- a/Lib/ctypes/test/test_loading.py +++ b/Lib/ctypes/test/test_loading.py @@ -1,6 +1,8 @@ from ctypes import * import sys, unittest import os, StringIO +from ctypes.util import find_library +from ctypes.test import is_resource_enabled libc_name = None if os.name == "nt": @@ -18,39 +20,49 @@ else: libc_name = line.split()[4] else: libc_name = line.split()[2] -## print "libc_name is", libc_name break +if is_resource_enabled("printing"): + print "libc_name is", libc_name + class LoaderTest(unittest.TestCase): unknowndll = "xxrandomnamexx" if libc_name is not None: def test_load(self): - cdll.load(libc_name) - cdll.load(os.path.basename(libc_name)) - self.assertRaises(OSError, cdll.load, self.unknowndll) + CDLL(libc_name) + CDLL(os.path.basename(libc_name)) + self.assertRaises(OSError, CDLL, self.unknowndll) if libc_name is not None and os.path.basename(libc_name) == "libc.so.6": def test_load_version(self): - cdll.load_version("c", "6") + cdll.LoadLibrary("libc.so.6") # linux uses version, libc 9 should not exist - self.assertRaises(OSError, cdll.load_version, "c", "9") - self.assertRaises(OSError, cdll.load_version, self.unknowndll, "") + self.assertRaises(OSError, cdll.LoadLibrary, "libc.so.9") + self.assertRaises(OSError, cdll.LoadLibrary, self.unknowndll) - def test_find(self): - name = "c" - cdll.find(name) - self.assertRaises(OSError, cdll.find, self.unknowndll) + def test_find(self): + for name in ("c", "m"): + lib = find_library(name) + if lib: + cdll.LoadLibrary(lib) + CDLL(lib) if os.name in ("nt", "ce"): def test_load_library(self): + if is_resource_enabled("printing"): + print find_library("kernel32") + print find_library("user32") + if os.name == "nt": - windll.load_library("kernel32").GetModuleHandleW + windll.kernel32.GetModuleHandleW + windll["kernel32"].GetModuleHandleW windll.LoadLibrary("kernel32").GetModuleHandleW WinDLL("kernel32").GetModuleHandleW elif os.name == "ce": - windll.load_library("coredll").GetModuleHandleW + windll.coredll.GetModuleHandleW + windll["coredll"].GetModuleHandleW windll.LoadLibrary("coredll").GetModuleHandleW WinDLL("coredll").GetModuleHandleW diff --git a/Lib/ctypes/test/test_pointers.py b/Lib/ctypes/test/test_pointers.py index 3a324a6..600bb75 100644 --- a/Lib/ctypes/test/test_pointers.py +++ b/Lib/ctypes/test/test_pointers.py @@ -20,7 +20,7 @@ class PointersTestCase(unittest.TestCase): self.failUnlessRaises(TypeError, A, c_ulong(33)) def test_pass_pointers(self): - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) func = dll._testfunc_p_p func.restype = c_long @@ -35,7 +35,7 @@ class PointersTestCase(unittest.TestCase): self.failUnlessEqual(res[0], 12345678) def test_change_pointers(self): - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) func = dll._testfunc_p_p i = c_int(87654) @@ -70,7 +70,7 @@ class PointersTestCase(unittest.TestCase): return 0 callback = PROTOTYPE(func) - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) # This function expects a function pointer, # and calls this with an integer pointer as parameter. # The int pointer points to a table containing the numbers 1..10 @@ -133,30 +133,9 @@ class PointersTestCase(unittest.TestCase): self.failUnlessEqual(p[0], 42) self.failUnlessEqual(p.contents.value, 42) - def test_incomplete(self): - lpcell = POINTER("cell") - class cell(Structure): - _fields_ = [("value", c_int), - ("next", lpcell)] - SetPointerType(lpcell, cell) - - # Make a structure containing a pointer to itself: - c = cell() - c.value = 42 - c.next = pointer(c) - - result = [] - for i in range(8): - result.append(c.value) - c = c.next[0] - self.failUnlessEqual(result, [42] * 8) - - from ctypes import _pointer_type_cache - del _pointer_type_cache[cell] - def test_charpp( self ): """Test that a character pointer-to-pointer is correctly passed""" - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) func = dll._testfunc_c_p_p func.restype = c_char_p argv = (c_char_p * 2)() diff --git a/Lib/ctypes/test/test_posix.py b/Lib/ctypes/test/test_posix.py deleted file mode 100644 index fe0a40a..0000000 --- a/Lib/ctypes/test/test_posix.py +++ /dev/null @@ -1,40 +0,0 @@ -import unittest, os, sys -from ctypes import * - -if os.name == "posix" and sys.platform == "linux2": - # I don't really know on which platforms this works, - # later it should use the find_library stuff to avoid - # hardcoding the names. - - class TestRTLD_GLOBAL(unittest.TestCase): - def test_GL(self): - if os.path.exists('/usr/lib/libGL.so'): - cdll.load('libGL.so', mode=RTLD_GLOBAL) - if os.path.exists('/usr/lib/libGLU.so'): - cdll.load('libGLU.so') - -##if os.name == "posix" and sys.platform != "darwin": - -## # On platforms where the default shared library suffix is '.so', -## # at least some libraries can be loaded as attributes of the cdll -## # object, since ctypes now tries loading the lib again -## # with '.so' appended of the first try fails. -## # -## # Won't work for libc, unfortunately. OTOH, it isn't -## # needed for libc since this is already mapped into the current -## # process (?) -## # -## # On MAC OSX, it won't work either, because dlopen() needs a full path, -## # and the default suffix is either none or '.dylib'. - -## class LoadLibs(unittest.TestCase): -## def test_libm(self): -## import math -## libm = cdll.libm -## sqrt = libm.sqrt -## sqrt.argtypes = (c_double,) -## sqrt.restype = c_double -## self.failUnlessEqual(sqrt(2), math.sqrt(2)) - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/ctypes/test/test_prototypes.py b/Lib/ctypes/test/test_prototypes.py index 47f5da1..aaaa47a 100644 --- a/Lib/ctypes/test/test_prototypes.py +++ b/Lib/ctypes/test/test_prototypes.py @@ -22,7 +22,7 @@ import unittest # In this case, there would have to be an additional reference to the argument... import _ctypes_test -testdll = cdll.load(_ctypes_test.__file__) +testdll = CDLL(_ctypes_test.__file__) # Return machine address `a` as a (possibly long) non-negative integer. # Starting with Python 2.5, id(anything) is always non-negative, and diff --git a/Lib/ctypes/test/test_python_api.py b/Lib/ctypes/test/test_python_api.py index c29b721..78e0231 100644 --- a/Lib/ctypes/test/test_python_api.py +++ b/Lib/ctypes/test/test_python_api.py @@ -1,5 +1,6 @@ from ctypes import * import unittest, sys +from ctypes.test import is_resource_enabled ################################################################ # This section should be moved into ctypes\__init__.py, when it's ready. @@ -33,20 +34,24 @@ class PythonAPITestCase(unittest.TestCase): del pyob self.failUnlessEqual(grc(s), refcnt) - def test_PyInt_Long(self): - ref42 = grc(42) - pythonapi.PyInt_FromLong.restype = py_object - self.failUnlessEqual(pythonapi.PyInt_FromLong(42), 42) + if is_resource_enabled("refcount"): + # This test is unreliable, because it is possible that code in + # unittest changes the refcount of the '42' integer. So, it + # is disabled by default. + def test_PyInt_Long(self): + ref42 = grc(42) + pythonapi.PyInt_FromLong.restype = py_object + self.failUnlessEqual(pythonapi.PyInt_FromLong(42), 42) - self.failUnlessEqual(grc(42), ref42) + self.failUnlessEqual(grc(42), ref42) - pythonapi.PyInt_AsLong.argtypes = (py_object,) - pythonapi.PyInt_AsLong.restype = c_long + pythonapi.PyInt_AsLong.argtypes = (py_object,) + pythonapi.PyInt_AsLong.restype = c_long - res = pythonapi.PyInt_AsLong(42) - self.failUnlessEqual(grc(res), ref42 + 1) - del res - self.failUnlessEqual(grc(42), ref42) + res = pythonapi.PyInt_AsLong(42) + self.failUnlessEqual(grc(res), ref42 + 1) + del res + self.failUnlessEqual(grc(42), ref42) def test_PyObj_FromPtr(self): s = "abc def ghi jkl" diff --git a/Lib/ctypes/test/test_refcounts.py b/Lib/ctypes/test/test_refcounts.py index 0c62bf2..448f292 100644 --- a/Lib/ctypes/test/test_refcounts.py +++ b/Lib/ctypes/test/test_refcounts.py @@ -6,7 +6,7 @@ MyCallback = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int) OtherCallback = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_ulonglong) import _ctypes_test -dll = ctypes.cdll.load(_ctypes_test.__file__) +dll = ctypes.CDLL(_ctypes_test.__file__) class RefcountTestCase(unittest.TestCase): diff --git a/Lib/ctypes/test/test_returnfuncptrs.py b/Lib/ctypes/test/test_returnfuncptrs.py index ef1f6fd..88dccf2 100644 --- a/Lib/ctypes/test/test_returnfuncptrs.py +++ b/Lib/ctypes/test/test_returnfuncptrs.py @@ -8,7 +8,7 @@ class ReturnFuncPtrTestCase(unittest.TestCase): def test_with_prototype(self): # The _ctypes_test shared lib/dll exports quite some functions for testing. # The get_strchr function returns a *pointer* to the C strchr function. - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) get_strchr = dll.get_strchr get_strchr.restype = CFUNCTYPE(c_char_p, c_char_p, c_char) strchr = get_strchr() @@ -18,7 +18,7 @@ class ReturnFuncPtrTestCase(unittest.TestCase): self.assertRaises(TypeError, strchr, "abcdef") def test_without_prototype(self): - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) get_strchr = dll.get_strchr # the default 'c_int' would not work on systems where sizeof(int) != sizeof(void *) get_strchr.restype = c_void_p diff --git a/Lib/ctypes/test/test_slicing.py b/Lib/ctypes/test/test_slicing.py index 306c585..08c811e 100644 --- a/Lib/ctypes/test/test_slicing.py +++ b/Lib/ctypes/test/test_slicing.py @@ -37,18 +37,21 @@ class SlicesTestCase(unittest.TestCase): def test_char_ptr(self): s = "abcdefghijklmnopqrstuvwxyz\0" - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) dll.my_strdup.restype = POINTER(c_char) + dll.my_free.restype = None res = dll.my_strdup(s) self.failUnlessEqual(res[:len(s)], s) import operator self.assertRaises(TypeError, operator.setslice, res, 0, 5, u"abcde") + dll.my_free(res) dll.my_strdup.restype = POINTER(c_byte) res = dll.my_strdup(s) self.failUnlessEqual(res[:len(s)-1], range(ord("a"), ord("z")+1)) + dll.my_free(res) def test_char_array(self): s = "abcdefghijklmnopqrstuvwxyz\0" @@ -65,15 +68,17 @@ class SlicesTestCase(unittest.TestCase): def test_wchar_ptr(self): s = u"abcdefghijklmnopqrstuvwxyz\0" - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) dll.my_wcsdup.restype = POINTER(c_wchar) dll.my_wcsdup.argtypes = POINTER(c_wchar), + dll.my_free.restype = None res = dll.my_wcsdup(s) self.failUnlessEqual(res[:len(s)], s) import operator self.assertRaises(TypeError, operator.setslice, res, 0, 5, u"abcde") + dll.my_free(res) if sizeof(c_wchar) == sizeof(c_short): dll.my_wcsdup.restype = POINTER(c_short) @@ -81,8 +86,11 @@ class SlicesTestCase(unittest.TestCase): dll.my_wcsdup.restype = POINTER(c_int) elif sizeof(c_wchar) == sizeof(c_long): dll.my_wcsdup.restype = POINTER(c_long) + else: + return res = dll.my_wcsdup(s) self.failUnlessEqual(res[:len(s)-1], range(ord("a"), ord("z")+1)) + dll.my_free(res) ################################################################ diff --git a/Lib/ctypes/test/test_stringptr.py b/Lib/ctypes/test/test_stringptr.py index 183a60c..6ee6ae0 100644 --- a/Lib/ctypes/test/test_stringptr.py +++ b/Lib/ctypes/test/test_stringptr.py @@ -3,7 +3,7 @@ from ctypes import * import _ctypes_test -lib = cdll.load(_ctypes_test.__file__) +lib = CDLL(_ctypes_test.__file__) class StringPtrTestCase(unittest.TestCase): diff --git a/Lib/ctypes/test/test_structures.py b/Lib/ctypes/test/test_structures.py index b6eaac4..5340f79 100644 --- a/Lib/ctypes/test/test_structures.py +++ b/Lib/ctypes/test/test_structures.py @@ -294,20 +294,20 @@ class StructureTestCase(unittest.TestCase): # In Python 2.5, Exception is a new-style class, and the repr changed if issubclass(Exception, object): self.failUnlessEqual(msg, - "(Phone) : " + "(Phone) : " "expected string or Unicode object, int found") else: self.failUnlessEqual(msg, - "(Phone) exceptions.TypeError: " + "(Phone) TypeError: " "expected string or Unicode object, int found") cls, msg = self.get_except(Person, "Someone", ("a", "b", "c")) self.failUnlessEqual(cls, RuntimeError) if issubclass(Exception, object): self.failUnlessEqual(msg, - "(Phone) : too many initializers") + "(Phone) : too many initializers") else: - self.failUnlessEqual(msg, "(Phone) exceptions.ValueError: too many initializers") + self.failUnlessEqual(msg, "(Phone) ValueError: too many initializers") def get_except(self, func, *args): diff --git a/Lib/ctypes/test/test_unicode.py b/Lib/ctypes/test/test_unicode.py index bb39746..78c5cf8 100644 --- a/Lib/ctypes/test/test_unicode.py +++ b/Lib/ctypes/test/test_unicode.py @@ -8,7 +8,7 @@ except AttributeError: pass else: import _ctypes_test - dll = ctypes.cdll.load(_ctypes_test.__file__) + dll = ctypes.CDLL(_ctypes_test.__file__) wcslen = dll.my_wcslen wcslen.argtypes = [ctypes.c_wchar_p] @@ -66,7 +66,7 @@ else: self.failUnlessEqual(buf[:], u"ab\0\0\0\0") import _ctypes_test - func = ctypes.cdll.load(_ctypes_test.__file__)._testfunc_p_p + func = ctypes.CDLL(_ctypes_test.__file__)._testfunc_p_p class StringTestCase(UnicodeTestCase): def setUp(self): diff --git a/Lib/ctypes/test/test_values.py b/Lib/ctypes/test/test_values.py index 1f25f9b..7ba3e21 100644 --- a/Lib/ctypes/test/test_values.py +++ b/Lib/ctypes/test/test_values.py @@ -10,7 +10,7 @@ import _ctypes_test class ValuesTestCase(unittest.TestCase): def test_an_integer(self): - ctdll = cdll.load(_ctypes_test.__file__) + ctdll = CDLL(_ctypes_test.__file__) an_integer = c_int.in_dll(ctdll, "an_integer") x = an_integer.value self.failUnlessEqual(x, ctdll.get_an_integer()) @@ -18,7 +18,7 @@ class ValuesTestCase(unittest.TestCase): self.failUnlessEqual(x*2, ctdll.get_an_integer()) def test_undefined(self): - ctdll = cdll.load(_ctypes_test.__file__) + ctdll = CDLL(_ctypes_test.__file__) self.assertRaises(ValueError, c_int.in_dll, ctdll, "Undefined_Symbol") class Win_ValuesTestCase(unittest.TestCase): diff --git a/Lib/ctypes/test/test_win32.py b/Lib/ctypes/test/test_win32.py index 3d0b825..8247d37 100644 --- a/Lib/ctypes/test/test_win32.py +++ b/Lib/ctypes/test/test_win32.py @@ -54,7 +54,7 @@ class Structures(unittest.TestCase): ("right", c_long), ("bottom", c_long)] - dll = cdll.load(_ctypes_test.__file__) + dll = CDLL(_ctypes_test.__file__) pt = POINT(10, 10) rect = RECT(0, 0, 20, 20) diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py new file mode 100644 index 0000000..d756c1c --- /dev/null +++ b/Lib/ctypes/util.py @@ -0,0 +1,122 @@ +import sys, os +import ctypes + +# find_library(name) returns the pathname of a library, or None. +if os.name == "nt": + def find_library(name): + # See MSDN for the REAL search order. + for directory in os.environ['PATH'].split(os.pathsep): + fname = os.path.join(directory, name) + if os.path.exists(fname): + return fname + if fname.lower().endswith(".dll"): + continue + fname = fname + ".dll" + if os.path.exists(fname): + return fname + return None + +if os.name == "ce": + # search path according to MSDN: + # - absolute path specified by filename + # - The .exe launch directory + # - the Windows directory + # - ROM dll files (where are they?) + # - OEM specified search path: HKLM\Loader\SystemPath + def find_library(name): + return name + +if os.name == "posix" and sys.platform == "darwin": + from ctypes.macholib.dyld import dyld_find as _dyld_find + def find_library(name): + possible = ['lib%s.dylib' % name, + '%s.dylib' % name, + '%s.framework/%s' % (name, name)] + for name in possible: + try: + return _dyld_find(name) + except ValueError: + continue + return None + +elif os.name == "posix": + # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump + import re, tempfile + + def _findLib_gcc(name): + expr = '[^\(\)\s]*lib%s\.[^\(\)\s]*' % name + cmd = 'if type gcc &>/dev/null; then CC=gcc; else CC=cc; fi;' \ + '$CC -Wl,-t -o /dev/null 2>&1 -l' + name + try: + fdout, outfile = tempfile.mkstemp() + fd = os.popen(cmd) + trace = fd.read() + err = fd.close() + finally: + try: + os.unlink(outfile) + except OSError, e: + if e.errno != errno.ENOENT: + raise + res = re.search(expr, trace) + if not res: + return None + return res.group(0) + + def _findLib_ld(name): + expr = '/[^\(\)\s]*lib%s\.[^\(\)\s]*' % name + res = re.search(expr, os.popen('/sbin/ldconfig -p 2>/dev/null').read()) + if not res: + # Hm, this works only for libs needed by the python executable. + cmd = 'ldd %s 2>/dev/null' % sys.executable + res = re.search(expr, os.popen(cmd).read()) + if not res: + return None + return res.group(0) + + def _get_soname(f): + cmd = "objdump -p -j .dynamic 2>/dev/null " + f + res = re.search(r'\sSONAME\s+([^\s]+)', os.popen(cmd).read()) + if not res: + return None + return res.group(1) + + def find_library(name): + lib = _findLib_ld(name) or _findLib_gcc(name) + if not lib: + return None + return _get_soname(lib) + +################################################################ +# test code + +def test(): + from ctypes import cdll + if os.name == "nt": + print cdll.msvcrt + print cdll.load("msvcrt") + print find_library("msvcrt") + + if os.name == "posix": + # find and load_version + print find_library("m") + print find_library("c") + print find_library("bz2") + + # getattr +## print cdll.m +## print cdll.bz2 + + # load + if sys.platform == "darwin": + print cdll.LoadLibrary("libm.dylib") + print cdll.LoadLibrary("libcrypto.dylib") + print cdll.LoadLibrary("libSystem.dylib") + print cdll.LoadLibrary("System.framework/System") + else: + print cdll.LoadLibrary("libm.so") + print cdll.LoadLibrary("libcrypt.so") + print find_library("crypt") + +if __name__ == "__main__": + test() diff --git a/Lib/decimal.py b/Lib/decimal.py index 9815ab3..210db52 100644 --- a/Lib/decimal.py +++ b/Lib/decimal.py @@ -731,7 +731,7 @@ class Decimal(object): """x.__hash__() <==> hash(x)""" # Decimal integers must hash the same as the ints # Non-integer decimals are normalized and hashed as strings - # Normalization assures that hast(100E-1) == hash(10) + # Normalization assures that hash(100E-1) == hash(10) if self._is_special: if self._isnan(): raise TypeError('Cannot hash a NaN value.') @@ -2246,7 +2246,7 @@ class Context(object): s.append('traps=[' + ', '.join([t.__name__ for t, v in self.traps.items() if v]) + ']') return ', '.join(s) + ')' - def __context__(self): + def get_manager(self): return ContextManager(self.copy()) def clear_flags(self): diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py index 6dad757..1349abe 100644 --- a/Lib/distutils/ccompiler.py +++ b/Lib/distutils/ccompiler.py @@ -15,7 +15,6 @@ from distutils.spawn import spawn from distutils.file_util import move_file from distutils.dir_util import mkpath from distutils.dep_util import newer_pairwise, newer_group -from distutils.sysconfig import python_build from distutils.util import split_quoted, execute from distutils import log @@ -368,7 +367,7 @@ class CCompiler: # Get the list of expected output (object) files objects = self.object_filenames(sources, - strip_dir=python_build, + strip_dir=0, output_dir=outdir) assert len(objects) == len(sources) @@ -475,8 +474,7 @@ class CCompiler: which source files can be skipped. """ # Get the list of expected output (object) files - objects = self.object_filenames(sources, strip_dir=python_build, - output_dir=output_dir) + objects = self.object_filenames(sources, output_dir=output_dir) assert len(objects) == len(sources) if self.force: diff --git a/Lib/distutils/command/bdist_msi.py b/Lib/distutils/command/bdist_msi.py index f05d66c..75db877 100644 --- a/Lib/distutils/command/bdist_msi.py +++ b/Lib/distutils/command/bdist_msi.py @@ -1,5 +1,5 @@ # -*- coding: iso-8859-1 -*- -# Copyright (C) 2005 Martin v. Löwis +# Copyright (C) 2005, 2006 Martin v. Löwis # Licensed to PSF under a Contributor Agreement. # The bdist_wininst command proper # based on bdist_wininst @@ -16,7 +16,7 @@ from distutils.version import StrictVersion from distutils.errors import DistutilsOptionError from distutils import log import msilib -from msilib import schema, sequence, uisample +from msilib import schema, sequence, text from msilib import Directory, Feature, Dialog, add_data class PyDialog(Dialog): @@ -374,8 +374,8 @@ class bdist_msi (Command): ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250), ("ProgressDlg", None, 1280)]) - add_data(db, 'ActionText', uisample.ActionText) - add_data(db, 'UIText', uisample.UIText) + add_data(db, 'ActionText', text.ActionText) + add_data(db, 'UIText', text.UIText) ##################################################################### # Standard dialogs: FatalError, UserExit, ExitDialog fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title, @@ -502,9 +502,9 @@ class bdist_msi (Command): seldlg.back("< Back", None, active=0) c = seldlg.next("Next >", "Cancel") - c.event("SetTargetPath", "TARGETDIR", order=1) - c.event("SpawnWaitDialog", "WaitForCostingDlg", order=2) - c.event("EndDialog", "Return", order=3) + c.event("SetTargetPath", "TARGETDIR", ordering=1) + c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=2) + c.event("EndDialog", "Return", ordering=3) c = seldlg.cancel("Cancel", "DirectoryCombo") c.event("SpawnDialog", "CancelDlg") @@ -561,7 +561,7 @@ class bdist_msi (Command): c = whichusers.next("Next >", "Cancel") c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1) - c.event("EndDialog", "Return", order = 2) + c.event("EndDialog", "Return", ordering = 2) c = whichusers.cancel("Cancel", "AdminInstall") c.event("SpawnDialog", "CancelDlg") diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py index 5771252..9626710 100644 --- a/Lib/distutils/command/build_ext.py +++ b/Lib/distutils/command/build_ext.py @@ -689,6 +689,11 @@ class build_ext (Command): # don't extend ext.libraries, it may be shared with other # extensions, it is a reference to the original list return ext.libraries + [pythonlib, "m"] + extra + + elif sys.platform == 'darwin': + # Don't use the default code below + return ext.libraries + else: from distutils import sysconfig if sysconfig.get_config_var('Py_ENABLE_SHARED'): diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py index 6f4ce81..4a9ed39 100644 --- a/Lib/distutils/command/upload.py +++ b/Lib/distutils/command/upload.py @@ -6,7 +6,7 @@ from distutils.errors import * from distutils.core import Command from distutils.spawn import spawn from distutils import log -from md5 import md5 +from hashlib import md5 import os import socket import platform diff --git a/Lib/distutils/msvccompiler.py b/Lib/distutils/msvccompiler.py index f88f365..d24d0ac 100644 --- a/Lib/distutils/msvccompiler.py +++ b/Lib/distutils/msvccompiler.py @@ -618,7 +618,7 @@ class MSVCCompiler (CCompiler) : "but the expected registry settings are not present.\n" "You must at least run the Visual Studio GUI once " "so that these entries are created.") - break + break return [] def set_path_env_var(self, name): diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py index 49536f0..e1397a1 100644 --- a/Lib/distutils/sysconfig.py +++ b/Lib/distutils/sysconfig.py @@ -366,8 +366,8 @@ def _init_posix(): # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so # it needs to be compatible. # If it isn't set we set it to the configure-time value - if sys.platform == 'darwin' and g.has_key('CONFIGURE_MACOSX_DEPLOYMENT_TARGET'): - cfg_target = g['CONFIGURE_MACOSX_DEPLOYMENT_TARGET'] + if sys.platform == 'darwin' and g.has_key('MACOSX_DEPLOYMENT_TARGET'): + cfg_target = g['MACOSX_DEPLOYMENT_TARGET'] cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '') if cur_target == '': cur_target = cfg_target @@ -500,6 +500,21 @@ def get_config_vars(*args): _config_vars['prefix'] = PREFIX _config_vars['exec_prefix'] = EXEC_PREFIX + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On Mac OS X before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS'): + flags = _config_vars[key] + flags = re.sub('-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]* ', ' ', flags) + _config_vars[key] = flags + if args: vals = [] for name in args: diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py index 56998c3..324819d 100644 --- a/Lib/distutils/unixccompiler.py +++ b/Lib/distutils/unixccompiler.py @@ -42,6 +42,48 @@ from distutils import log # should just happily stuff them into the preprocessor/compiler/linker # options and carry on. +def _darwin_compiler_fixup(compiler_so, cc_args): + """ + This function will strip '-isysroot PATH' and '-arch ARCH' from the + compile flags if the user has specified one them in extra_compile_flags. + + This is needed because '-arch ARCH' adds another architecture to the + build, without a way to remove an architecture. Furthermore GCC will + barf if multiple '-isysroot' arguments are present. + """ + stripArch = stripSysroot = 0 + + compiler_so = list(compiler_so) + kernel_version = os.uname()[2] # 8.4.3 + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # OSX before 10.4.0, these don't support -arch and -isysroot at + # all. + stripArch = stripSysroot = True + else: + stripArch = '-arch' in cc_args + stripSysroot = '-isysroot' in cc_args + + if stripArch: + while 1: + try: + index = compiler_so.index('-arch') + # Strip this argument and the next one: + del compiler_so[index:index+2] + except ValueError: + break + + if stripSysroot: + try: + index = compiler_so.index('-isysroot') + # Strip this argument and the next one: + del compiler_so[index:index+1] + except ValueError: + pass + + return compiler_so + class UnixCCompiler(CCompiler): compiler_type = 'unix' @@ -108,8 +150,11 @@ class UnixCCompiler(CCompiler): raise CompileError, msg def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so + if sys.platform == 'darwin': + compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs) try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + + self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) except DistutilsExecError, msg: raise CompileError, msg @@ -172,7 +217,22 @@ class UnixCCompiler(CCompiler): else: linker = self.linker_so[:] if target_lang == "c++" and self.compiler_cxx: - linker[0] = self.compiler_cxx[0] + # skip over environment variable settings if /usr/bin/env + # is used to set up the linker's environment. + # This is needed on OSX. Note: this assumes that the + # normal and C++ compiler have the same environment + # settings. + i = 0 + if os.path.basename(linker[0]) == "env": + i = 1 + while '=' in linker[i]: + i = i + 1 + + linker[i] = self.compiler_cxx[i] + + if sys.platform == 'darwin': + linker = _darwin_compiler_fixup(linker, ld_args) + self.spawn(linker + ld_args) except DistutilsExecError, msg: raise LinkError, msg diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py index 889bf13..1265f4c 100644 --- a/Lib/distutils/util.py +++ b/Lib/distutils/util.py @@ -45,6 +45,7 @@ def get_platform (): osname = string.lower(osname) osname = string.replace(osname, '/', '') machine = string.replace(machine, ' ', '_') + machine = string.replace(machine, '/', '-') if osname[:5] == "linux": # At least on Linux/Intel, 'machine' is the processor -- @@ -66,6 +67,54 @@ def get_platform (): m = rel_re.match(release) if m: release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + from distutils.sysconfig import get_config_vars + cfgvars = get_config_vars() + + macver = os.environ.get('MACOSX_DEPLOYMENT_TARGET') + if not macver: + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if not macver: + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + m = re.search( + r'ProductUserVisibleVersion\s*' + + r'(.*?)', f.read()) + f.close() + if m is not None: + macver = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if macver: + from distutils.sysconfig import get_config_vars + release = macver + osname = "macosx" + + + if (release + '.') < '10.4.' and \ + get_config_vars().get('UNIVERSALSDK', '').strip(): + # The universal build will build fat binaries, but not on + # systems before 10.4 + machine = 'fat' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + machine = 'ppc' return "%s-%s-%s" % (osname, release, machine) diff --git a/Lib/doctest.py b/Lib/doctest.py index 70c355a..857bc1a 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -54,6 +54,7 @@ __all__ = [ 'DONT_ACCEPT_BLANKLINE', 'NORMALIZE_WHITESPACE', 'ELLIPSIS', + 'SKIP', 'IGNORE_EXCEPTION_DETAIL', 'COMPARISON_FLAGS', 'REPORT_UDIFF', @@ -128,20 +129,21 @@ warnings.filterwarnings("ignore", "is_private", DeprecationWarning, OPTIONFLAGS_BY_NAME = {} def register_optionflag(name): - flag = 1 << len(OPTIONFLAGS_BY_NAME) - OPTIONFLAGS_BY_NAME[name] = flag - return flag + # Create a new flag unless `name` is already known. + return OPTIONFLAGS_BY_NAME.setdefault(name, 1 << len(OPTIONFLAGS_BY_NAME)) DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1') DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE') NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE') ELLIPSIS = register_optionflag('ELLIPSIS') +SKIP = register_optionflag('SKIP') IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL') COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 | DONT_ACCEPT_BLANKLINE | NORMALIZE_WHITESPACE | ELLIPSIS | + SKIP | IGNORE_EXCEPTION_DETAIL) REPORT_UDIFF = register_optionflag('REPORT_UDIFF') @@ -350,7 +352,7 @@ class _OutputRedirectingPdb(pdb.Pdb): """ def __init__(self, out): self.__out = out - pdb.Pdb.__init__(self) + pdb.Pdb.__init__(self, stdout=out) def trace_dispatch(self, *args): # Redirect stdout to the given stream. @@ -1233,6 +1235,10 @@ class DocTestRunner: else: self.optionflags &= ~optionflag + # If 'SKIP' is set, then skip this example. + if self.optionflags & SKIP: + continue + # Record that we started this example. tries += 1 if not quiet: @@ -1792,6 +1798,7 @@ def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None, DONT_ACCEPT_BLANKLINE NORMALIZE_WHITESPACE ELLIPSIS + SKIP IGNORE_EXCEPTION_DETAIL REPORT_UDIFF REPORT_CDIFF @@ -1914,6 +1921,7 @@ def testfile(filename, module_relative=True, name=None, package=None, DONT_ACCEPT_BLANKLINE NORMALIZE_WHITESPACE ELLIPSIS + SKIP IGNORE_EXCEPTION_DETAIL REPORT_UDIFF REPORT_CDIFF diff --git a/Lib/dummy_thread.py b/Lib/dummy_thread.py index d69d840..21fd03f 100644 --- a/Lib/dummy_thread.py +++ b/Lib/dummy_thread.py @@ -118,9 +118,6 @@ class LockType(object): def __exit__(self, typ, val, tb): self.release() - def __context__(self): - return self - def release(self): """Release the dummy lock.""" # XXX Perhaps shouldn't actually bother to test? Could lead diff --git a/Lib/easy_install.py b/Lib/easy_install.py deleted file mode 100644 index d87e984..0000000 --- a/Lib/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py index 5821ddf..a08c43e 100644 --- a/Lib/email/_parseaddr.py +++ b/Lib/email/_parseaddr.py @@ -367,6 +367,7 @@ class AddrlistClass: break elif allowcomments and self.field[self.pos] == '(': slist.append(self.getcomment()) + continue # have already advanced pos from getcomment elif self.field[self.pos] == '\\': quote = True else: diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py index d977693..a197a36 100644 --- a/Lib/email/test/test_email.py +++ b/Lib/email/test/test_email.py @@ -2215,6 +2215,12 @@ class TestMiscellaneous(TestEmailBase): ['foo: ;', '"Jason R. Mastaler" ']), [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]) + def test_getaddresses_embedded_comment(self): + """Test proper handling of a nested comment""" + eq = self.assertEqual + addrs = Utils.getaddresses(['User ((nested comment)) ']) + eq(addrs[0][1], 'foo@bar.com') + def test_utils_quote_unquote(self): eq = self.assertEqual msg = Message() diff --git a/Lib/email/test/test_email_renamed.py b/Lib/email/test/test_email_renamed.py index 4ac2ee9..95d06cb 100644 --- a/Lib/email/test/test_email_renamed.py +++ b/Lib/email/test/test_email_renamed.py @@ -2221,6 +2221,12 @@ class TestMiscellaneous(TestEmailBase): ['foo: ;', '"Jason R. Mastaler" ']), [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]) + def test_getaddresses_embedded_comment(self): + """Test proper handling of a nested comment""" + eq = self.assertEqual + addrs = utils.getaddresses(['User ((nested comment)) ']) + eq(addrs[0][1], 'foo@bar.com') + def test_utils_quote_unquote(self): eq = self.assertEqual msg = Message() diff --git a/Lib/gzip.py b/Lib/gzip.py index 3c1ebf2..860accc 100644 --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -107,6 +107,8 @@ class GzipFile: self.extrabuf = "" self.extrasize = 0 self.filename = filename + # Starts small, scales exponentially + self.min_readsize = 100 elif mode[0:1] == 'w' or mode[0:1] == 'a': self.mode = WRITE @@ -381,32 +383,35 @@ class GzipFile: self.read(count % 1024) def readline(self, size=-1): - if size < 0: size = sys.maxint + if size < 0: + size = sys.maxint + readsize = self.min_readsize + else: + readsize = size bufs = [] - readsize = min(100, size) # Read from the file in small chunks - while True: - if size == 0: - return "".join(bufs) # Return resulting line - + while size != 0: c = self.read(readsize) i = c.find('\n') - if size is not None: - # We set i=size to break out of the loop under two - # conditions: 1) there's no newline, and the chunk is - # larger than size, or 2) there is a newline, but the - # resulting line would be longer than 'size'. - if i==-1 and len(c) > size: i=size-1 - elif size <= i: i = size -1 + + # We set i=size to break out of the loop under two + # conditions: 1) there's no newline, and the chunk is + # larger than size, or 2) there is a newline, but the + # resulting line would be longer than 'size'. + if (size <= i) or (i == -1 and len(c) > size): + i = size - 1 if i >= 0 or c == '': - bufs.append(c[:i+1]) # Add portion of last chunk - self._unread(c[i+1:]) # Push back rest of chunk - return ''.join(bufs) # Return resulting line + bufs.append(c[:i + 1]) # Add portion of last chunk + self._unread(c[i + 1:]) # Push back rest of chunk + break # Append chunk to list, decrease 'size', bufs.append(c) size = size - len(c) readsize = min(size, readsize * 2) + if readsize > self.min_readsize: + self.min_readsize = min(readsize, self.min_readsize * 2, 512) + return ''.join(bufs) # Return resulting line def readlines(self, sizehint=0): # Negative numbers result in reading all the lines diff --git a/Lib/httplib.py b/Lib/httplib.py index b4bd536..36381de 100644 --- a/Lib/httplib.py +++ b/Lib/httplib.py @@ -796,11 +796,20 @@ class HTTPConnection: nil, netloc, nil, nil, nil = urlsplit(url) if netloc: - self.putheader('Host', netloc.encode("idna")) - elif self.port == HTTP_PORT: - self.putheader('Host', self.host.encode("idna")) + try: + netloc_enc = netloc.encode("ascii") + except UnicodeEncodeError: + netloc_enc = netloc.encode("idna") + self.putheader('Host', netloc_enc) else: - self.putheader('Host', "%s:%s" % (self.host.encode("idna"), self.port)) + try: + host_enc = self.host.encode("ascii") + except UnicodeEncodeError: + host_enc = self.host.encode("idna") + if self.port == HTTP_PORT: + self.putheader('Host', host_enc) + else: + self.putheader('Host', "%s:%s" % (host_enc, self.port)) # note: we are assuming that clients will not attempt to set these # headers since *this* library must deal with the diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 8163330..25e5d40 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,3 +1,8 @@ +What's New in IDLE 1.2a2? +========================= + +*Release date: 27-APR-2006* + What's New in IDLE 1.2a1? ========================= diff --git a/Lib/idlelib/configHelpSourceEdit.py b/Lib/idlelib/configHelpSourceEdit.py index 3db1e0a..8924f79 100644 --- a/Lib/idlelib/configHelpSourceEdit.py +++ b/Lib/idlelib/configHelpSourceEdit.py @@ -151,6 +151,7 @@ class GetHelpSourceDialog(Toplevel): pass else: # Mac Safari insists on using the URI form for local files + self.result = list(self.result) self.result[1] = "file://" + path self.destroy() diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py index fbde56c..b7deb3f 100644 --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1 +1 @@ -IDLE_VERSION = "1.2a1" +IDLE_VERSION = "1.2a2" diff --git a/Lib/imputil.py b/Lib/imputil.py index e6ad7ec..8a49bb1 100644 --- a/Lib/imputil.py +++ b/Lib/imputil.py @@ -131,9 +131,12 @@ class ImportManager: if importer: return importer._finish_import(top_module, parts[1:], fromlist) - # Grrr, some people "import os.path" + # Grrr, some people "import os.path" or do "from os.path import ..." if len(parts) == 2 and hasattr(top_module, parts[1]): - return top_module + if fromlist: + return getattr(top_module, parts[1]) + else: + return top_module # If the importer does not exist, then we have to bail. A missing # importer means that something else imported the module, and we have diff --git a/Lib/inspect.py b/Lib/inspect.py index 2e4d987..bf7f006 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -353,7 +353,13 @@ def getsourcefile(object): if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix: # Looks like a binary file. We want to only return a text file. return None - if os.path.exists(filename) or hasattr(getmodule(object), '__loader__'): + if os.path.exists(filename): + return filename + # Ugly but necessary - '' and '' mean that getmodule() + # would infinitely recurse, because they're not real files nor loadable + # Note that this means that writing a PEP 302 loader that uses '<' + # at the start of a filename is now not a good idea. :( + if filename[:1]!='<' and hasattr(getmodule(object), '__loader__'): return filename def getabsfile(object): @@ -406,7 +412,11 @@ def findsource(object): in the file and the line number indexes a line in that list. An IOError is raised if the source code cannot be retrieved.""" file = getsourcefile(object) or getfile(object) - lines = linecache.getlines(file, getmodule(object).__dict__) + module = getmodule(object) + if module: + lines = linecache.getlines(file, module.__dict__) + else: + lines = linecache.getlines(file) if not lines: raise IOError('could not get source code') diff --git a/Lib/lib-tk/turtle.py b/Lib/lib-tk/turtle.py index a395613..d68e405 100644 --- a/Lib/lib-tk/turtle.py +++ b/Lib/lib-tk/turtle.py @@ -1,8 +1,24 @@ # LogoMation-like turtle graphics +""" +Turtle graphics is a popular way for introducing programming to +kids. It was part of the original Logo programming language developed +by Wally Feurzeig and Seymour Papert in 1966. + +Imagine a robotic turtle starting at (0, 0) in the x-y plane. Give it +the command turtle.forward(15), and it moves (on-screen!) 15 pixels in +the direction it is facing, drawing a line as it moves. Give it the +command turtle.left(25), and it rotates in-place 25 degrees clockwise. + +By combining together these and similar commands, intricate shapes and +pictures can easily be drawn. +""" + from math import * # Also for export import Tkinter +speeds = ['fastest', 'fast', 'normal', 'slow', 'slowest'] + class Error(Exception): pass @@ -13,17 +29,42 @@ class RawPen: self._items = [] self._tracing = 1 self._arrow = 0 + self._delay = 10 # default delay for drawing self.degrees() self.reset() def degrees(self, fullcircle=360.0): + """ Set angle measurement units to degrees. + + Example: + >>> turtle.degrees() + """ self._fullcircle = fullcircle self._invradian = pi / (fullcircle * 0.5) def radians(self): + """ Set the angle measurement units to radians. + + Example: + >>> turtle.radians() + """ self.degrees(2.0*pi) def reset(self): + """ Clear the screen, re-center the pen, and set variables to + the default values. + + Example: + >>> turtle.position() + [0.0, -22.0] + >>> turtle.heading() + 100.0 + >>> turtle.reset() + >>> turtle.position() + [0.0, 0.0] + >>> turtle.heading() + 0.0 + """ canvas = self._canvas self._canvas.update() width = canvas.winfo_width() @@ -45,6 +86,11 @@ class RawPen: canvas._root().tkraise() def clear(self): + """ Clear the screen. The turtle does not move. + + Example: + >>> turtle.clear() + """ self.fill(0) canvas = self._canvas items = self._items @@ -55,37 +101,130 @@ class RawPen: self._draw_turtle() def tracer(self, flag): + """ Set tracing on if flag is True, and off if it is False. + Tracing means line are drawn more slowly, with an + animation of an arrow along the line. + + Example: + >>> turtle.tracer(False) # turns off Tracer + """ self._tracing = flag if not self._tracing: self._delete_turtle() self._draw_turtle() def forward(self, distance): + """ Go forward distance steps. + + Example: + >>> turtle.position() + [0.0, 0.0] + >>> turtle.forward(25) + >>> turtle.position() + [25.0, 0.0] + >>> turtle.forward(-75) + >>> turtle.position() + [-50.0, 0.0] + """ x0, y0 = start = self._position x1 = x0 + distance * cos(self._angle*self._invradian) y1 = y0 - distance * sin(self._angle*self._invradian) self._goto(x1, y1) def backward(self, distance): + """ Go backwards distance steps. + + The turtle's heading does not change. + + Example: + >>> turtle.position() + [0.0, 0.0] + >>> turtle.backward(30) + >>> turtle.position() + [-30.0, 0.0] + """ self.forward(-distance) def left(self, angle): + """ Turn left angle units (units are by default degrees, + but can be set via the degrees() and radians() functions.) + + When viewed from above, the turning happens in-place around + its front tip. + + Example: + >>> turtle.heading() + 22 + >>> turtle.left(45) + >>> turtle.heading() + 67.0 + """ self._angle = (self._angle + angle) % self._fullcircle self._draw_turtle() def right(self, angle): + """ Turn right angle units (units are by default degrees, + but can be set via the degrees() and radians() functions.) + + When viewed from above, the turning happens in-place around + its front tip. + + Example: + >>> turtle.heading() + 22 + >>> turtle.right(45) + >>> turtle.heading() + 337.0 + """ self.left(-angle) def up(self): + """ Pull the pen up -- no drawing when moving. + + Example: + >>> turtle.up() + """ self._drawing = 0 def down(self): + """ Put the pen down -- draw when moving. + + Example: + >>> turtle.down() + """ self._drawing = 1 def width(self, width): + """ Set the line to thickness to width. + + Example: + >>> turtle.width(10) + """ self._width = float(width) def color(self, *args): + """ Set the pen color. + + Three input formats are allowed: + + color(s) + s is a Tk specification string, such as "red" or "yellow" + + color((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range [0..1] + + color(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range [0..1] + + Example: + + >>> turtle.color('brown') + >>> tup = (0.2, 0.8, 0.55) + >>> turtle.color(tup) + >>> turtle.color(0, .5, 0) + """ if not args: raise Error, "no color arguments" if len(args) == 1: @@ -118,11 +257,20 @@ class RawPen: self._color = color self._draw_turtle() - def write(self, arg, move=0): - x, y = start = self._position + def write(self, text, move=False): + """ Write text at the current pen position. + + If move is true, the pen is moved to the bottom-right corner + of the text. By default, move is False. + + Example: + >>> turtle.write('The race is on!') + >>> turtle.write('Home = (0, 0)', True) + """ + x, y = self._position x = x-1 # correction -- calibrated for Windows item = self._canvas.create_text(x, y, - text=str(arg), anchor="sw", + text=str(text), anchor="sw", fill=self._color) self._items.append(item) if move: @@ -131,6 +279,20 @@ class RawPen: self._draw_turtle() def fill(self, flag): + """ Call fill(1) before drawing the shape you + want to fill, and fill(0) when done. + + Example: + >>> turtle.fill(1) + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.fill(0) + """ if self._filling: path = tuple(self._path) smooth = self._filling < 0 @@ -139,7 +301,6 @@ class RawPen: {'fill': self._color, 'smooth': smooth}) self._items.append(item) - self._canvas.lower(item) if self._tofill: for item in self._tofill: self._canvas.itemconfigure(item, fill=self._color) @@ -151,16 +312,62 @@ class RawPen: self._path.append(self._position) self.forward(0) + def begin_fill(self): + """ Called just before drawing a shape to be filled. + + Example: + >>> turtle.begin_fill() + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.end_fill() + """ + self.fill(1) + + def end_fill(self): + """ Called after drawing a shape to be filled. + + Example: + >>> turtle.begin_fill() + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.left(90) + >>> turtle.forward(100) + >>> turtle.end_fill() + """ + self.fill(0) + def circle(self, radius, extent=None): + """ Draw a circle with given radius. + The center is radius units left of the turtle; extent + determines which part of the circle is drawn. If not given, + the entire circle is drawn. + + If extent is not a full circle, one endpoint of the arc is the + current pen position. The arc is drawn in a counter clockwise + direction if radius is positive, otherwise in a clockwise + direction. In the process, the direction of the turtle is + changed by the amount of the extent. + + >>> turtle.circle(50) + >>> turtle.circle(120, 180) # half a circle + """ if extent is None: extent = self._fullcircle x0, y0 = self._position xc = x0 - radius * sin(self._angle * self._invradian) yc = y0 - radius * cos(self._angle * self._invradian) if radius >= 0.0: - start = self._angle - 90.0 + start = self._angle - (self._fullcircle / 4.0) else: - start = self._angle + 90.0 + start = self._angle + (self._fullcircle / 4.0) extent = -extent if self._filling: if abs(extent) >= self._fullcircle: @@ -202,40 +409,145 @@ class RawPen: self._draw_turtle() def heading(self): + """ Return the turtle's current heading. + + Example: + >>> turtle.heading() + 67.0 + """ return self._angle def setheading(self, angle): + """ Set the turtle facing the given angle. + + Here are some common directions in degrees: + + 0 - east + 90 - north + 180 - west + 270 - south + + Example: + >>> turtle.setheading(90) + >>> turtle.heading() + 90 + >>> turtle.setheading(128) + >>> turtle.heading() + 128 + """ self._angle = angle self._draw_turtle() def window_width(self): + """ Returns the width of the turtle window. + + Example: + >>> turtle.window_width() + 640 + """ width = self._canvas.winfo_width() if width <= 1: # the window isn't managed by a geometry manager width = self._canvas['width'] return width def window_height(self): + """ Return the height of the turtle window. + + Example: + >>> turtle.window_height() + 768 + """ height = self._canvas.winfo_height() if height <= 1: # the window isn't managed by a geometry manager height = self._canvas['height'] return height def position(self): + """ Return the current (x, y) location of the turtle. + + Example: + >>> turtle.position() + [0.0, 240.0] + """ x0, y0 = self._origin x1, y1 = self._position return [x1-x0, -y1+y0] def setx(self, xpos): + """ Set the turtle's x coordinate to be xpos. + + Example: + >>> turtle.position() + [10.0, 240.0] + >>> turtle.setx(10) + >>> turtle.position() + [10.0, 240.0] + """ x0, y0 = self._origin x1, y1 = self._position self._goto(x0+xpos, y1) def sety(self, ypos): + """ Set the turtle's y coordinate to be ypos. + + Example: + >>> turtle.position() + [0.0, 0.0] + >>> turtle.sety(-22) + >>> turtle.position() + [0.0, -22.0] + """ x0, y0 = self._origin x1, y1 = self._position self._goto(x1, y0-ypos) + def towards(self, *args): + """Returs the angle, which corresponds to the line + from turtle-position to point (x,y). + + Argument can be two coordinates or one pair of coordinates + or a RawPen/Pen instance. + + Example: + >>> turtle.position() + [10.0, 10.0] + >>> turtle.towards(0,0) + 225.0 + """ + if len(args) == 2: + x, y = args + else: + arg = args[0] + if isinstance(arg, RawPen): + x, y = arg.position() + else: + x, y = arg + x0, y0 = self.position() + dx = x - x0 + dy = y - y0 + return (atan2(dy,dx) / self._invradian) % self._fullcircle + def goto(self, *args): + """ Go to the given point. + + If the pen is down, then a line will be drawn. The turtle's + orientation does not change. + + Two input formats are accepted: + + goto(x, y) + go to point (x, y) + + goto((x, y)) + go to point (x, y) + + Example: + >>> turtle.position() + [0.0, 0.0] + >>> turtle.goto(50, -45) + >>> turtle.position() + [50.0, -45.0] + """ if len(args) == 1: try: x, y = args[0] @@ -250,7 +562,7 @@ class RawPen: self._goto(x0+x, y0-y) def _goto(self, x1, y1): - x0, y0 = start = self._position + x0, y0 = self._position self._position = map(float, (x1, y1)) if self._filling: self._path.append(self._position) @@ -270,7 +582,7 @@ class RawPen: self._canvas.coords(item, x0, y0, x, y) self._draw_turtle((x,y)) self._canvas.update() - self._canvas.after(10) + self._canvas.after(self._delay) # in case nhops==0 self._canvas.coords(item, x0, y0, x1, y1) self._canvas.itemconfigure(item, arrow="none") @@ -285,7 +597,42 @@ class RawPen: self._items.append(item) self._draw_turtle() - def _draw_turtle(self,position=[]): + def speed(self, speed): + """ Set the turtle's speed. + + speed must one of these five strings: + + 'fastest' is a 0 ms delay + 'fast' is a 5 ms delay + 'normal' is a 10 ms delay + 'slow' is a 15 ms delay + 'slowest' is a 20 ms delay + + Example: + >>> turtle.speed('slow') + """ + try: + speed = speed.strip().lower() + self._delay = speeds.index(speed) * 5 + except: + raise ValueError("%r is not a valid speed. speed must be " + "one of %s" % (speed, speeds)) + + + def delay(self, delay): + """ Set the drawing delay in milliseconds. + + This is intended to allow finer control of the drawing speed + than the speed() method + + Example: + >>> turtle.delay(15) + """ + if int(delay) < 0: + raise ValueError("delay must be greater than or equal to 0") + self._delay = int(delay) + + def _draw_turtle(self, position=[]): if not self._tracing: return if position == []: @@ -305,13 +652,17 @@ class RawPen: def _delete_turtle(self): if self._arrow != 0: self._canvas.delete(self._arrow) - self._arrow = 0 - + self._arrow = 0 _root = None _canvas = None _pen = None +_width = 0.50 # 50% of window width +_height = 0.75 # 75% of window height +_startx = None +_starty = None +_title = "Turtle Graphics" # default title class Pen(RawPen): @@ -320,10 +671,15 @@ class Pen(RawPen): if _root is None: _root = Tkinter.Tk() _root.wm_protocol("WM_DELETE_WINDOW", self._destroy) + _root.title(_title) + if _canvas is None: # XXX Should have scroll bars _canvas = Tkinter.Canvas(_root, background="white") _canvas.pack(expand=1, fill="both") + + setup(width=_width, height= _height, startx=_startx, starty=_starty) + RawPen.__init__(self, _canvas) def _destroy(self): @@ -335,13 +691,18 @@ class Pen(RawPen): _canvas = None root.destroy() - def _getpen(): global _pen - pen = _pen - if not pen: - _pen = pen = Pen() - return pen + if not _pen: + _pen = Pen() + return _pen + +class Turtle(Pen): + pass + +"""For documentation of the following functions see + the RawPen methods with the same names +""" def degrees(): _getpen().degrees() def radians(): _getpen().radians() @@ -358,6 +719,8 @@ def width(width): _getpen().width(width) def color(*args): _getpen().color(*args) def write(arg, move=0): _getpen().write(arg, move) def fill(flag): _getpen().fill(flag) +def begin_fill(): _getpen().begin_fill() +def end_fill(): _getpen.end_fill() def circle(radius, extent=None): _getpen().circle(radius, extent) def goto(*args): _getpen().goto(*args) def heading(): return _getpen().heading() @@ -367,6 +730,106 @@ def window_width(): return _getpen().window_width() def window_height(): return _getpen().window_height() def setx(xpos): _getpen().setx(xpos) def sety(ypos): _getpen().sety(ypos) +def towards(*args): return _getpen().towards(*args) + +def done(): _root.mainloop() +def delay(delay): return _getpen().delay(delay) +def speed(speed): return _getpen().speed(speed) + +for methodname in dir(RawPen): + """ copies RawPen docstrings to module functions of same name """ + if not methodname.startswith("_"): + eval(methodname).__doc__ = RawPen.__dict__[methodname].__doc__ + + +def setup(**geometry): + """ Sets the size and position of the main window. + + Keywords are width, height, startx and starty + + width: either a size in pixels or a fraction of the screen. + Default is 50% of screen. + height: either the height in pixels or a fraction of the screen. + Default is 75% of screen. + + Setting either width or height to None before drawing will force + use of default geometry as in older versions of turtle.py + + startx: starting position in pixels from the left edge of the screen. + Default is to center window. Setting startx to None is the default + and centers window horizontally on screen. + + starty: starting position in pixels from the top edge of the screen. + Default is to center window. Setting starty to None is the default + and centers window vertically on screen. + + Examples: + >>> setup (width=200, height=200, startx=0, starty=0) + + sets window to 200x200 pixels, in upper left of screen + + >>> setup(width=.75, height=0.5, startx=None, starty=None) + + sets window to 75% of screen by 50% of screen and centers + + >>> setup(width=None) + + forces use of default geometry as in older versions of turtle.py + """ + + global _width, _height, _startx, _starty + + width = geometry.get('width',_width) + if width >= 0 or width == None: + _width = width + else: + raise ValueError, "width can not be less than 0" + + height = geometry.get('height',_height) + if height >= 0 or height == None: + _height = height + else: + raise ValueError, "height can not be less than 0" + + startx = geometry.get('startx', _startx) + if startx >= 0 or startx == None: + _startx = _startx + else: + raise ValueError, "startx can not be less than 0" + + starty = geometry.get('starty', _starty) + if starty >= 0 or starty == None: + _starty = starty + else: + raise ValueError, "startx can not be less than 0" + + + if _root and _width and _height: + if 0 < _width <= 1: + _width = _root.winfo_screenwidth() * +width + if 0 < _height <= 1: + _height = _root.winfo_screenheight() * _height + + # center window on screen + if _startx is None: + _startx = (_root.winfo_screenwidth() - _width) / 2 + + if _starty is None: + _starty = (_root.winfo_screenheight() - _height) / 2 + + _root.geometry("%dx%d+%d+%d" % (_width, _height, _startx, _starty)) + +def title(title): + """ set the window title. + + By default this is set to 'Turtle Graphics' + + Example: + >>> title("My Window") + """ + + global _title + _title = title def demo(): reset() @@ -417,10 +880,94 @@ def demo(): forward(20) right(90) fill(0) + tracer(1) # more text write("end") - if __name__ == '__main__': - _root.mainloop() + +def demo2(): + # exercises some new and improved features + speed('fast') + width(3) + + # draw a segmented half-circle + setheading(towards(0,0)) + x,y = position() + r = (x**2+y**2)**.5/2.0 + right(90) + pendown = True + for i in range(18): + if pendown: + up() + pendown = False + else: + down() + pendown = True + circle(r,10) + sleep(2) + + reset() + left(90) + + # draw a series of triangles + l = 10 + color("green") + width(3) + left(180) + sp = 5 + for i in range(-2,16): + if i > 0: + color(1.0-0.05*i,0,0.05*i) + fill(1) + color("green") + for j in range(3): + forward(l) + left(120) + l += 10 + left(15) + if sp > 0: + sp = sp-1 + speed(speeds[sp]) + color(0.25,0,0.75) + fill(0) + color("green") + + left(130) + up() + forward(90) + color("red") + speed('fastest') + down(); + + # create a second turtle and make the original pursue and catch it + turtle=Turtle() + turtle.reset() + turtle.left(90) + turtle.speed('normal') + turtle.up() + turtle.goto(280,40) + turtle.left(24) + turtle.down() + turtle.speed('fast') + turtle.color("blue") + turtle.width(2) + speed('fastest') + + # turn default turtle towards new turtle object + setheading(towards(turtle)) + while ( abs(position()[0]-turtle.position()[0])>4 or + abs(position()[1]-turtle.position()[1])>4): + turtle.forward(3.5) + turtle.left(0.6) + # turn default turtle towards new turtle object + setheading(towards(turtle)) + forward(4) + write("CAUGHT! ", move=True) + + if __name__ == '__main__': + from time import sleep demo() + sleep(3) + demo2() + done() diff --git a/Lib/locale.py b/Lib/locale.py index cfc69b1..fd549bb 100644 --- a/Lib/locale.py +++ b/Lib/locale.py @@ -88,13 +88,16 @@ except ImportError: ### Number formatting APIs # Author: Martin von Loewis +# improved by Georg Brandl #perform the grouping from right to left -def _group(s): - conv=localeconv() - grouping=conv['grouping'] - if not grouping:return (s, 0) - result="" +def _group(s, monetary=False): + conv = localeconv() + thousands_sep = conv[monetary and 'mon_thousands_sep' or 'thousands_sep'] + grouping = conv[monetary and 'mon_grouping' or 'grouping'] + if not grouping: + return (s, 0) + result = "" seps = 0 spaces = "" if s[-1] == ' ': @@ -103,63 +106,142 @@ def _group(s): s = s[:sp] while s and grouping: # if grouping is -1, we are done - if grouping[0]==CHAR_MAX: + if grouping[0] == CHAR_MAX: break # 0: re-use last group ad infinitum - elif grouping[0]!=0: + elif grouping[0] != 0: #process last group - group=grouping[0] - grouping=grouping[1:] + group = grouping[0] + grouping = grouping[1:] if result: - result=s[-group:]+conv['thousands_sep']+result + result = s[-group:] + thousands_sep + result seps += 1 else: - result=s[-group:] - s=s[:-group] + result = s[-group:] + s = s[:-group] if s and s[-1] not in "0123456789": # the leading string is only spaces and signs - return s+result+spaces,seps + return s + result + spaces, seps if not result: - return s+spaces,seps + return s + spaces, seps if s: - result=s+conv['thousands_sep']+result + result = s + thousands_sep + result seps += 1 - return result+spaces,seps - -def format(f,val,grouping=0): - """Formats a value in the same way that the % formatting would use, + return result + spaces, seps + +def format(percent, value, grouping=False, monetary=False, *additional): + """Returns the locale-aware substitution of a %? specifier + (percent). + + additional is for format strings which contain one or more + '*' modifiers.""" + # this is only for one-percent-specifier strings and this should be checked + if percent[0] != '%': + raise ValueError("format() must be given exactly one %char " + "format specifier") + if additional: + formatted = percent % ((value,) + additional) + else: + formatted = percent % value + # floats and decimal ints need special action! + if percent[-1] in 'eEfFgG': + seps = 0 + parts = formatted.split('.') + if grouping: + parts[0], seps = _group(parts[0], monetary=monetary) + decimal_point = localeconv()[monetary and 'mon_decimal_point' + or 'decimal_point'] + formatted = decimal_point.join(parts) + while seps: + sp = formatted.find(' ') + if sp == -1: break + formatted = formatted[:sp] + formatted[sp+1:] + seps -= 1 + elif percent[-1] in 'diu': + if grouping: + formatted = _group(formatted, monetary=monetary)[0] + return formatted + +import re, operator +_percent_re = re.compile(r'%(?:\((?P.*?)\))?' + r'(?P[-#0-9 +*.hlL]*?)[eEfFgGdiouxXcrs%]') + +def format_string(f, val, grouping=False): + """Formats a string in the same way that the % formatting would use, but takes the current locale into account. Grouping is applied if the third parameter is true.""" - result = f % val - fields = result.split(".") - seps = 0 - if grouping: - fields[0],seps=_group(fields[0]) - if len(fields)==2: - result = fields[0]+localeconv()['decimal_point']+fields[1] - elif len(fields)==1: - result = fields[0] + percents = list(_percent_re.finditer(f)) + new_f = _percent_re.sub('%s', f) + + if isinstance(val, tuple): + new_val = list(val) + i = 0 + for perc in percents: + starcount = perc.group('modifiers').count('*') + new_val[i] = format(perc.group(), new_val[i], grouping, False, *new_val[i+1:i+1+starcount]) + del new_val[i+1:i+1+starcount] + i += (1 + starcount) + val = tuple(new_val) + elif operator.isMappingType(val): + for perc in percents: + key = perc.group("key") + val[key] = format(perc.group(), val[key], grouping) else: - raise Error, "Too many decimal points in result string" + # val is a single value + val = format(percents[0].group(), val, grouping) + + return new_f % val + +def currency(val, symbol=True, grouping=False, international=False): + """Formats val according to the currency settings + in the current locale.""" + conv = localeconv() - while seps: - # If the number was formatted for a specific width, then it - # might have been filled with spaces to the left or right. If - # so, kill as much spaces as there where separators. - # Leading zeroes as fillers are not yet dealt with, as it is - # not clear how they should interact with grouping. - sp = result.find(" ") - if sp==-1:break - result = result[:sp]+result[sp+1:] - seps -= 1 + # check for illegal values + digits = conv[international and 'int_frac_digits' or 'frac_digits'] + if digits == 127: + raise ValueError("Currency formatting is not possible using " + "the 'C' locale.") + + s = format('%%.%if' % digits, abs(val), grouping, monetary=True) + # '<' and '>' are markers if the sign must be inserted between symbol and value + s = '<' + s + '>' + + if symbol: + smb = conv[international and 'int_curr_symbol' or 'currency_symbol'] + precedes = conv[val<0 and 'n_cs_precedes' or 'p_cs_precedes'] + separated = conv[val<0 and 'n_sep_by_space' or 'p_sep_by_space'] + + if precedes: + s = smb + (separated and ' ' or '') + s + else: + s = s + (separated and ' ' or '') + smb + + sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn'] + sign = conv[val<0 and 'negative_sign' or 'positive_sign'] + + if sign_pos == 0: + s = '(' + s + ')' + elif sign_pos == 1: + s = sign + s + elif sign_pos == 2: + s = s + sign + elif sign_pos == 3: + s = s.replace('<', sign) + elif sign_pos == 4: + s = s.replace('>', sign) + else: + # the default if nothing specified; + # this should be the most fitting sign position + s = sign + s - return result + return s.replace('<', '').replace('>', '') def str(val): """Convert float to integer, taking the locale into account.""" - return format("%.12g",val) + return format("%.12g", val) -def atof(string,func=float): +def atof(string, func=float): "Parses a string as a float according to the locale settings." #First, get rid of the grouping ts = localeconv()['thousands_sep'] @@ -179,10 +261,10 @@ def atoi(str): def _test(): setlocale(LC_ALL, "") #do grouping - s1=format("%d", 123456789,1) + s1 = format("%d", 123456789,1) print s1, "is", atoi(s1) #standard formatting - s1=str(3.14) + s1 = str(3.14) print s1, "is", atof(s1) ### Locale name aliasing engine diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 9798931..c65d07f 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -40,7 +40,7 @@ except ImportError: thread = None __author__ = "Vinay Sajip " -__status__ = "beta" +__status__ = "production" __version__ = "0.4.9.9" __date__ = "06 February 2006" diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index 8e569a7..e0da254 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -44,6 +44,8 @@ DEFAULT_HTTP_LOGGING_PORT = 9022 DEFAULT_SOAP_LOGGING_PORT = 9023 SYSLOG_UDP_PORT = 514 +_MIDNIGHT = 24 * 60 * 60 # number of seconds in a day + class BaseRotatingHandler(logging.FileHandler): """ Base class for handlers that rotate log files at a certain point. @@ -217,12 +219,8 @@ class TimedRotatingFileHandler(BaseRotatingHandler): currentMinute = t[4] currentSecond = t[5] # r is the number of seconds left between now and midnight - if (currentMinute == 0) and (currentSecond == 0): - r = (24 - currentHour) * 60 * 60 # number of hours in seconds - else: - r = (23 - currentHour) * 60 * 60 - r = r + (59 - currentMinute) * 60 # plus the number of minutes (in secs) - r = r + (60 - currentSecond) # plus the number of seconds + r = _MIDNIGHT - ((currentHour * 60 + currentMinute) * 60 + + currentSecond) self.rolloverAt = currentTime + r # If we are rolling over on a certain day, add in the number of days until # the next rollover, but offset by 1 since we just calculated the time diff --git a/Lib/mailbox.py b/Lib/mailbox.py index c89c1a4..bb115e1 100755 --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -1,93 +1,1907 @@ #! /usr/bin/env python -"""Classes to handle Unix style, MMDF style, and MH style mailboxes.""" +"""Read/write support for Maildir, mbox, MH, Babyl, and MMDF mailboxes.""" - -import rfc822 +import sys import os +import time +import calendar +import socket +import errno +import copy +import email +import email.Message +import email.Generator +import rfc822 +import StringIO +try: + import fnctl +except ImportError: + fcntl = None -__all__ = ["UnixMailbox","MmdfMailbox","MHMailbox","Maildir","BabylMailbox", - "PortableUnixMailbox"] +__all__ = [ 'Mailbox', 'Maildir', 'mbox', 'MH', 'Babyl', 'MMDF', + 'Message', 'MaildirMessage', 'mboxMessage', 'MHMessage', + 'BabylMessage', 'MMDFMessage', 'UnixMailbox', + 'PortableUnixMailbox', 'MmdfMailbox', 'MHMailbox', 'BabylMailbox' ] -class _Mailbox: +class Mailbox: + """A group of messages in a particular place.""" - def __init__(self, fp, factory=rfc822.Message): - self.fp = fp - self.seekp = 0 - self.factory = factory + def __init__(self, path, factory=None, create=True): + """Initialize a Mailbox instance.""" + self._path = os.path.abspath(os.path.expanduser(path)) + self._factory = factory + + def add(self, message): + """Add message and return assigned key.""" + raise NotImplementedError('Method must be implemented by subclass') + + def remove(self, key): + """Remove the keyed message; raise KeyError if it doesn't exist.""" + raise NotImplementedError('Method must be implemented by subclass') + + def __delitem__(self, key): + self.remove(key) + + def discard(self, key): + """If the keyed message exists, remove it.""" + try: + self.remove(key) + except KeyError: + pass + + def __setitem__(self, key, message): + """Replace the keyed message; raise KeyError if it doesn't exist.""" + raise NotImplementedError('Method must be implemented by subclass') + + def get(self, key, default=None): + """Return the keyed message, or default if it doesn't exist.""" + try: + return self.__getitem__(key) + except KeyError: + return default + + def __getitem__(self, key): + """Return the keyed message; raise KeyError if it doesn't exist.""" + if not self._factory: + return self.get_message(key) + else: + return self._factory(self.get_file(key)) + + def get_message(self, key): + """Return a Message representation or raise a KeyError.""" + raise NotImplementedError('Method must be implemented by subclass') + + def get_string(self, key): + """Return a string representation or raise a KeyError.""" + raise NotImplementedError('Method must be implemented by subclass') + + def get_file(self, key): + """Return a file-like representation or raise a KeyError.""" + raise NotImplementedError('Method must be implemented by subclass') + + def iterkeys(self): + """Return an iterator over keys.""" + raise NotImplementedError('Method must be implemented by subclass') + + def keys(self): + """Return a list of keys.""" + return list(self.iterkeys()) + + def itervalues(self): + """Return an iterator over all messages.""" + for key in self.iterkeys(): + try: + value = self[key] + except KeyError: + continue + yield value def __iter__(self): - return iter(self.next, None) + return self.itervalues() + + def values(self): + """Return a list of messages. Memory intensive.""" + return list(self.itervalues()) + + def iteritems(self): + """Return an iterator over (key, message) tuples.""" + for key in self.iterkeys(): + try: + value = self[key] + except KeyError: + continue + yield (key, value) + + def items(self): + """Return a list of (key, message) tuples. Memory intensive.""" + return list(self.iteritems()) + + def has_key(self, key): + """Return True if the keyed message exists, False otherwise.""" + raise NotImplementedError('Method must be implemented by subclass') + + def __contains__(self, key): + return self.has_key(key) + + def __len__(self): + """Return a count of messages in the mailbox.""" + raise NotImplementedError('Method must be implemented by subclass') + + def clear(self): + """Delete all messages.""" + for key in self.iterkeys(): + self.discard(key) + + def pop(self, key, default=None): + """Delete the keyed message and return it, or default.""" + try: + result = self[key] + except KeyError: + return default + self.discard(key) + return result + + def popitem(self): + """Delete an arbitrary (key, message) pair and return it.""" + for key in self.iterkeys(): + return (key, self.pop(key)) # This is only run once. + else: + raise KeyError('No messages in mailbox') + + def update(self, arg=None): + """Change the messages that correspond to certain keys.""" + if hasattr(arg, 'iteritems'): + source = arg.iteritems() + elif hasattr(arg, 'items'): + source = arg.items() + else: + source = arg + bad_key = False + for key, message in source: + try: + self[key] = message + except KeyError: + bad_key = True + if bad_key: + raise KeyError('No message with key(s)') + + def flush(self): + """Write any pending changes to the disk.""" + raise NotImplementedError('Method must be implemented by subclass') + + def lock(self): + """Lock the mailbox.""" + raise NotImplementedError('Method must be implemented by subclass') + + def unlock(self): + """Unlock the mailbox if it is locked.""" + raise NotImplementedError('Method must be implemented by subclass') + + def close(self): + """Flush and close the mailbox.""" + raise NotImplementedError('Method must be implemented by subclass') + def _dump_message(self, message, target, mangle_from_=False): + # Most files are opened in binary mode to allow predictable seeking. + # To get native line endings on disk, the user-friendly \n line endings + # used in strings and by email.Message are translated here. + """Dump message contents to target file.""" + if isinstance(message, email.Message.Message): + buffer = StringIO.StringIO() + gen = email.Generator.Generator(buffer, mangle_from_, 0) + gen.flatten(message) + buffer.seek(0) + target.write(buffer.read().replace('\n', os.linesep)) + elif isinstance(message, str): + if mangle_from_: + message = message.replace('\nFrom ', '\n>From ') + message = message.replace('\n', os.linesep) + target.write(message) + elif hasattr(message, 'read'): + while True: + line = message.readline() + if line == '': + break + if mangle_from_ and line.startswith('From '): + line = '>From ' + line[5:] + line = line.replace('\n', os.linesep) + target.write(line) + else: + raise TypeError('Invalid message type: %s' % type(message)) + + +class Maildir(Mailbox): + """A qmail-style Maildir mailbox.""" + + colon = ':' + + def __init__(self, dirname, factory=rfc822.Message, create=True): + """Initialize a Maildir instance.""" + Mailbox.__init__(self, dirname, factory, create) + if not os.path.exists(self._path): + if create: + os.mkdir(self._path, 0700) + os.mkdir(os.path.join(self._path, 'tmp'), 0700) + os.mkdir(os.path.join(self._path, 'new'), 0700) + os.mkdir(os.path.join(self._path, 'cur'), 0700) + else: + raise NoSuchMailboxError(self._path) + self._toc = {} + + def add(self, message): + """Add message and return assigned key.""" + tmp_file = self._create_tmp() + try: + self._dump_message(message, tmp_file) + finally: + tmp_file.close() + if isinstance(message, MaildirMessage): + subdir = message.get_subdir() + suffix = self.colon + message.get_info() + if suffix == self.colon: + suffix = '' + else: + subdir = 'new' + suffix = '' + uniq = os.path.basename(tmp_file.name).split(self.colon)[0] + dest = os.path.join(self._path, subdir, uniq + suffix) + os.rename(tmp_file.name, dest) + if isinstance(message, MaildirMessage): + os.utime(dest, (os.path.getatime(dest), message.get_date())) + return uniq + + def remove(self, key): + """Remove the keyed message; raise KeyError if it doesn't exist.""" + os.remove(os.path.join(self._path, self._lookup(key))) + + def discard(self, key): + """If the keyed message exists, remove it.""" + # This overrides an inapplicable implementation in the superclass. + try: + self.remove(key) + except KeyError: + pass + except OSError, e: + if e.errno != errno.ENOENT: + raise + + def __setitem__(self, key, message): + """Replace the keyed message; raise KeyError if it doesn't exist.""" + old_subpath = self._lookup(key) + temp_key = self.add(message) + temp_subpath = self._lookup(temp_key) + if isinstance(message, MaildirMessage): + # temp's subdir and suffix were specified by message. + dominant_subpath = temp_subpath + else: + # temp's subdir and suffix were defaults from add(). + dominant_subpath = old_subpath + subdir = os.path.dirname(dominant_subpath) + if self.colon in dominant_subpath: + suffix = self.colon + dominant_subpath.split(self.colon)[-1] + else: + suffix = '' + self.discard(key) + new_path = os.path.join(self._path, subdir, key + suffix) + os.rename(os.path.join(self._path, temp_subpath), new_path) + if isinstance(message, MaildirMessage): + os.utime(new_path, (os.path.getatime(new_path), + message.get_date())) + + def get_message(self, key): + """Return a Message representation or raise a KeyError.""" + subpath = self._lookup(key) + f = open(os.path.join(self._path, subpath), 'r') + try: + msg = MaildirMessage(f) + finally: + f.close() + subdir, name = os.path.split(subpath) + msg.set_subdir(subdir) + if self.colon in name: + msg.set_info(name.split(self.colon)[-1]) + msg.set_date(os.path.getmtime(os.path.join(self._path, subpath))) + return msg + + def get_string(self, key): + """Return a string representation or raise a KeyError.""" + f = open(os.path.join(self._path, self._lookup(key)), 'r') + try: + return f.read() + finally: + f.close() + + def get_file(self, key): + """Return a file-like representation or raise a KeyError.""" + f = open(os.path.join(self._path, self._lookup(key)), 'rb') + return _ProxyFile(f) + + def iterkeys(self): + """Return an iterator over keys.""" + self._refresh() + for key in self._toc: + try: + self._lookup(key) + except KeyError: + continue + yield key + + def has_key(self, key): + """Return True if the keyed message exists, False otherwise.""" + self._refresh() + return key in self._toc + + def __len__(self): + """Return a count of messages in the mailbox.""" + self._refresh() + return len(self._toc) + + def flush(self): + """Write any pending changes to disk.""" + return # Maildir changes are always written immediately. + + def lock(self): + """Lock the mailbox.""" + return + + def unlock(self): + """Unlock the mailbox if it is locked.""" + return + + def close(self): + """Flush and close the mailbox.""" + return + + def list_folders(self): + """Return a list of folder names.""" + result = [] + for entry in os.listdir(self._path): + if len(entry) > 1 and entry[0] == '.' and \ + os.path.isdir(os.path.join(self._path, entry)): + result.append(entry[1:]) + return result + + def get_folder(self, folder): + """Return a Maildir instance for the named folder.""" + return Maildir(os.path.join(self._path, '.' + folder), create=False) + + def add_folder(self, folder): + """Create a folder and return a Maildir instance representing it.""" + path = os.path.join(self._path, '.' + folder) + result = Maildir(path) + maildirfolder_path = os.path.join(path, 'maildirfolder') + if not os.path.exists(maildirfolder_path): + os.close(os.open(maildirfolder_path, os.O_CREAT | os.O_WRONLY)) + return result + + def remove_folder(self, folder): + """Delete the named folder, which must be empty.""" + path = os.path.join(self._path, '.' + folder) + for entry in os.listdir(os.path.join(path, 'new')) + \ + os.listdir(os.path.join(path, 'cur')): + if len(entry) < 1 or entry[0] != '.': + raise NotEmptyError('Folder contains message(s): %s' % folder) + for entry in os.listdir(path): + if entry != 'new' and entry != 'cur' and entry != 'tmp' and \ + os.path.isdir(os.path.join(path, entry)): + raise NotEmptyError("Folder contains subdirectory '%s': %s" % + (folder, entry)) + for root, dirs, files in os.walk(path, topdown=False): + for entry in files: + os.remove(os.path.join(root, entry)) + for entry in dirs: + os.rmdir(os.path.join(root, entry)) + os.rmdir(path) + + def clean(self): + """Delete old files in "tmp".""" + now = time.time() + for entry in os.listdir(os.path.join(self._path, 'tmp')): + path = os.path.join(self._path, 'tmp', entry) + if now - os.path.getatime(path) > 129600: # 60 * 60 * 36 + os.remove(path) + + _count = 1 # This is used to generate unique file names. + + def _create_tmp(self): + """Create a file in the tmp subdirectory and open and return it.""" + now = time.time() + hostname = socket.gethostname() + if '/' in hostname: + hostname = hostname.replace('/', r'\057') + if ':' in hostname: + hostname = hostname.replace(':', r'\072') + uniq = "%s.M%sP%sQ%s.%s" % (int(now), int(now % 1 * 1e6), os.getpid(), + Maildir._count, hostname) + path = os.path.join(self._path, 'tmp', uniq) + try: + os.stat(path) + except OSError, e: + if e.errno == errno.ENOENT: + Maildir._count += 1 + return open(path, 'wb+') + else: + raise + else: + raise ExternalClashError('Name clash prevented file creation: %s' % + path) + + def _refresh(self): + """Update table of contents mapping.""" + self._toc = {} + for subdir in ('new', 'cur'): + for entry in os.listdir(os.path.join(self._path, subdir)): + uniq = entry.split(self.colon)[0] + self._toc[uniq] = os.path.join(subdir, entry) + + def _lookup(self, key): + """Use TOC to return subpath for given key, or raise a KeyError.""" + try: + if os.path.exists(os.path.join(self._path, self._toc[key])): + return self._toc[key] + except KeyError: + pass + self._refresh() + try: + return self._toc[key] + except KeyError: + raise KeyError('No message with key: %s' % key) + + # This method is for backward compatibility only. def next(self): - while 1: - self.fp.seek(self.seekp) + """Return the next message in a one-time iteration.""" + if not hasattr(self, '_onetime_keys'): + self._onetime_keys = self.iterkeys() + while True: try: - self._search_start() - except EOFError: - self.seekp = self.fp.tell() + return self[self._onetime_keys.next()] + except StopIteration: return None - start = self.fp.tell() - self._search_end() - self.seekp = stop = self.fp.tell() - if start != stop: + except KeyError: + continue + + +class _singlefileMailbox(Mailbox): + """A single-file mailbox.""" + + def __init__(self, path, factory=None, create=True): + """Initialize a single-file mailbox.""" + Mailbox.__init__(self, path, factory, create) + try: + f = open(self._path, 'rb+') + except IOError, e: + if e.errno == errno.ENOENT: + if create: + f = open(self._path, 'wb+') + else: + raise NoSuchMailboxError(self._path) + elif e.errno == errno.EACCES: + f = open(self._path, 'rb') + else: + raise + self._file = f + self._toc = None + self._next_key = 0 + self._pending = False # No changes require rewriting the file. + self._locked = False + + def add(self, message): + """Add message and return assigned key.""" + self._lookup() + self._toc[self._next_key] = self._append_message(message) + self._next_key += 1 + self._pending = True + return self._next_key - 1 + + def remove(self, key): + """Remove the keyed message; raise KeyError if it doesn't exist.""" + self._lookup(key) + del self._toc[key] + self._pending = True + + def __setitem__(self, key, message): + """Replace the keyed message; raise KeyError if it doesn't exist.""" + self._lookup(key) + self._toc[key] = self._append_message(message) + self._pending = True + + def iterkeys(self): + """Return an iterator over keys.""" + self._lookup() + for key in self._toc.keys(): + yield key + + def has_key(self, key): + """Return True if the keyed message exists, False otherwise.""" + self._lookup() + return key in self._toc + + def __len__(self): + """Return a count of messages in the mailbox.""" + self._lookup() + return len(self._toc) + + def lock(self): + """Lock the mailbox.""" + if not self._locked: + _lock_file(self._file) + self._locked = True + + def unlock(self): + """Unlock the mailbox if it is locked.""" + if self._locked: + _unlock_file(self._file) + self._locked = False + + def flush(self): + """Write any pending changes to disk.""" + if not self._pending: + return + self._lookup() + new_file = _create_temporary(self._path) + try: + new_toc = {} + self._pre_mailbox_hook(new_file) + for key in sorted(self._toc.keys()): + start, stop = self._toc[key] + self._file.seek(start) + self._pre_message_hook(new_file) + new_start = new_file.tell() + while True: + buffer = self._file.read(min(4096, + stop - self._file.tell())) + if buffer == '': + break + new_file.write(buffer) + new_toc[key] = (new_start, new_file.tell()) + self._post_message_hook(new_file) + except: + new_file.close() + os.remove(new_file.name) + raise + new_file.close() + self._file.close() + try: + os.rename(new_file.name, self._path) + except OSError, e: + if e.errno == errno.EEXIST: + os.remove(self._path) + os.rename(new_file.name, self._path) + else: + raise + self._file = open(self._path, 'rb+') + self._toc = new_toc + self._pending = False + if self._locked: + _lock_file(new_file, dotlock=False) + + def _pre_mailbox_hook(self, f): + """Called before writing the mailbox to file f.""" + return + + def _pre_message_hook(self, f): + """Called before writing each message to file f.""" + return + + def _post_message_hook(self, f): + """Called after writing each message to file f.""" + return + + def close(self): + """Flush and close the mailbox.""" + self.flush() + if self._locked: + self.unlock() + self._file.close() + + def _lookup(self, key=None): + """Return (start, stop) or raise KeyError.""" + if self._toc is None: + self._generate_toc() + if key is not None: + try: + return self._toc[key] + except KeyError: + raise KeyError('No message with key: %s' % key) + + def _append_message(self, message): + """Append message to mailbox and return (start, stop) offsets.""" + self._file.seek(0, 2) + self._pre_message_hook(self._file) + offsets = self._install_message(message) + self._post_message_hook(self._file) + self._file.flush() + return offsets + + + +class _mboxMMDF(_singlefileMailbox): + """An mbox or MMDF mailbox.""" + + _mangle_from_ = True + + def get_message(self, key): + """Return a Message representation or raise a KeyError.""" + start, stop = self._lookup(key) + self._file.seek(start) + from_line = self._file.readline().replace(os.linesep, '') + string = self._file.read(stop - self._file.tell()) + msg = self._message_factory(string.replace(os.linesep, '\n')) + msg.set_from(from_line[5:]) + return msg + + def get_string(self, key, from_=False): + """Return a string representation or raise a KeyError.""" + start, stop = self._lookup(key) + self._file.seek(start) + if not from_: + self._file.readline() + string = self._file.read(stop - self._file.tell()) + return string.replace(os.linesep, '\n') + + def get_file(self, key, from_=False): + """Return a file-like representation or raise a KeyError.""" + start, stop = self._lookup(key) + self._file.seek(start) + if not from_: + self._file.readline() + return _PartialFile(self._file, self._file.tell(), stop) + + def _install_message(self, message): + """Format a message and blindly write to self._file.""" + from_line = None + if isinstance(message, str) and message.startswith('From '): + newline = message.find('\n') + if newline != -1: + from_line = message[:newline] + message = message[newline + 1:] + else: + from_line = message + message = '' + elif isinstance(message, _mboxMMDFMessage): + from_line = 'From ' + message.get_from() + elif isinstance(message, email.Message.Message): + from_line = message.get_unixfrom() # May be None. + if from_line is None: + from_line = 'From MAILER-DAEMON %s' % time.asctime(time.gmtime()) + start = self._file.tell() + self._file.write(from_line + os.linesep) + self._dump_message(message, self._file, self._mangle_from_) + stop = self._file.tell() + return (start, stop) + + +class mbox(_mboxMMDF): + """A classic mbox mailbox.""" + + _mangle_from_ = True + + def __init__(self, path, factory=None, create=True): + """Initialize an mbox mailbox.""" + self._message_factory = mboxMessage + _mboxMMDF.__init__(self, path, factory, create) + + def _pre_message_hook(self, f): + """Called before writing each message to file f.""" + if f.tell() != 0: + f.write(os.linesep) + + def _generate_toc(self): + """Generate key-to-(start, stop) table of contents.""" + starts, stops = [], [] + self._file.seek(0) + while True: + line_pos = self._file.tell() + line = self._file.readline() + if line.startswith('From '): + if len(stops) < len(starts): + stops.append(line_pos - len(os.linesep)) + starts.append(line_pos) + elif line == '': + stops.append(line_pos) break - return self.factory(_Subfile(self.fp, start, stop)) + self._toc = dict(enumerate(zip(starts, stops))) + self._next_key = len(self._toc) -class _Subfile: +class MMDF(_mboxMMDF): + """An MMDF mailbox.""" - def __init__(self, fp, start, stop): - self.fp = fp - self.start = start - self.stop = stop - self.pos = self.start + def __init__(self, path, factory=None, create=True): + """Initialize an MMDF mailbox.""" + self._message_factory = MMDFMessage + _mboxMMDF.__init__(self, path, factory, create) + def _pre_message_hook(self, f): + """Called before writing each message to file f.""" + f.write('\001\001\001\001' + os.linesep) - def _read(self, length, read_function): - if self.pos >= self.stop: - return '' - remaining = self.stop - self.pos - if length is None or length < 0 or length > remaining: - length = remaining - self.fp.seek(self.pos) - data = read_function(length) - self.pos = self.fp.tell() - return data - - def read(self, length = None): - return self._read(length, self.fp.read) - - def readline(self, length = None): - return self._read(length, self.fp.readline) - - def readlines(self, sizehint = -1): - lines = [] - while 1: - line = self.readline() - if not line: + def _post_message_hook(self, f): + """Called after writing each message to file f.""" + f.write(os.linesep + '\001\001\001\001' + os.linesep) + + def _generate_toc(self): + """Generate key-to-(start, stop) table of contents.""" + starts, stops = [], [] + self._file.seek(0) + next_pos = 0 + while True: + line_pos = next_pos + line = self._file.readline() + next_pos = self._file.tell() + if line.startswith('\001\001\001\001' + os.linesep): + starts.append(next_pos) + while True: + line_pos = next_pos + line = self._file.readline() + next_pos = self._file.tell() + if line == '\001\001\001\001' + os.linesep: + stops.append(line_pos - len(os.linesep)) + break + elif line == '': + stops.append(line_pos) + break + elif line == '': + break + self._toc = dict(enumerate(zip(starts, stops))) + self._next_key = len(self._toc) + + +class MH(Mailbox): + """An MH mailbox.""" + + def __init__(self, path, factory=None, create=True): + """Initialize an MH instance.""" + Mailbox.__init__(self, path, factory, create) + if not os.path.exists(self._path): + if create: + os.mkdir(self._path, 0700) + os.close(os.open(os.path.join(self._path, '.mh_sequences'), + os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0600)) + else: + raise NoSuchMailboxError(self._path) + self._locked = False + + def add(self, message): + """Add message and return assigned key.""" + keys = self.keys() + if len(keys) == 0: + new_key = 1 + else: + new_key = max(keys) + 1 + new_path = os.path.join(self._path, str(new_key)) + f = _create_carefully(new_path) + try: + if self._locked: + _lock_file(f) + try: + self._dump_message(message, f) + if isinstance(message, MHMessage): + self._dump_sequences(message, new_key) + finally: + if self._locked: + _unlock_file(f) + finally: + f.close() + return new_key + + def remove(self, key): + """Remove the keyed message; raise KeyError if it doesn't exist.""" + path = os.path.join(self._path, str(key)) + try: + f = open(path, 'rb+') + except IOError, e: + if e.errno == errno.ENOENT: + raise KeyError('No message with key: %s' % key) + else: + raise + try: + if self._locked: + _lock_file(f) + try: + f.close() + os.remove(os.path.join(self._path, str(key))) + finally: + if self._locked: + _unlock_file(f) + finally: + f.close() + + def __setitem__(self, key, message): + """Replace the keyed message; raise KeyError if it doesn't exist.""" + path = os.path.join(self._path, str(key)) + try: + f = open(path, 'rb+') + except IOError, e: + if e.errno == errno.ENOENT: + raise KeyError('No message with key: %s' % key) + else: + raise + try: + if self._locked: + _lock_file(f) + try: + os.close(os.open(path, os.O_WRONLY | os.O_TRUNC)) + self._dump_message(message, f) + if isinstance(message, MHMessage): + self._dump_sequences(message, key) + finally: + if self._locked: + _unlock_file(f) + finally: + f.close() + + def get_message(self, key): + """Return a Message representation or raise a KeyError.""" + try: + if self._locked: + f = open(os.path.join(self._path, str(key)), 'r+') + else: + f = open(os.path.join(self._path, str(key)), 'r') + except IOError, e: + if e.errno == errno.ENOENT: + raise KeyError('No message with key: %s' % key) + else: + raise + try: + if self._locked: + _lock_file(f) + try: + msg = MHMessage(f) + finally: + if self._locked: + _unlock_file(f) + finally: + f.close() + for name, key_list in self.get_sequences(): + if key in key_list: + msg.add_sequence(name) + return msg + + def get_string(self, key): + """Return a string representation or raise a KeyError.""" + try: + if self._locked: + f = open(os.path.join(self._path, str(key)), 'r+') + else: + f = open(os.path.join(self._path, str(key)), 'r') + except IOError, e: + if e.errno == errno.ENOENT: + raise KeyError('No message with key: %s' % key) + else: + raise + try: + if self._locked: + _lock_file(f) + try: + return f.read() + finally: + if self._locked: + _unlock_file(f) + finally: + f.close() + + def get_file(self, key): + """Return a file-like representation or raise a KeyError.""" + try: + f = open(os.path.join(self._path, str(key)), 'rb') + except IOError, e: + if e.errno == errno.ENOENT: + raise KeyError('No message with key: %s' % key) + else: + raise + return _ProxyFile(f) + + def iterkeys(self): + """Return an iterator over keys.""" + return iter(sorted(int(entry) for entry in os.listdir(self._path) + if entry.isdigit())) + + def has_key(self, key): + """Return True if the keyed message exists, False otherwise.""" + return os.path.exists(os.path.join(self._path, str(key))) + + def __len__(self): + """Return a count of messages in the mailbox.""" + return len(list(self.iterkeys())) + + def lock(self): + """Lock the mailbox.""" + if not self._locked: + self._file = open(os.path.join(self._path, '.mh_sequences'), 'rb+') + _lock_file(self._file) + self._locked = True + + def unlock(self): + """Unlock the mailbox if it is locked.""" + if self._locked: + _unlock_file(self._file) + self._file.close() + del self._file + self._locked = False + + def flush(self): + """Write any pending changes to the disk.""" + return + + def close(self): + """Flush and close the mailbox.""" + if self._locked: + self.unlock() + + def list_folders(self): + """Return a list of folder names.""" + result = [] + for entry in os.listdir(self._path): + if os.path.isdir(os.path.join(self._path, entry)): + result.append(entry) + return result + + def get_folder(self, folder): + """Return an MH instance for the named folder.""" + return MH(os.path.join(self._path, folder), create=False) + + def add_folder(self, folder): + """Create a folder and return an MH instance representing it.""" + return MH(os.path.join(self._path, folder)) + + def remove_folder(self, folder): + """Delete the named folder, which must be empty.""" + path = os.path.join(self._path, folder) + entries = os.listdir(path) + if entries == ['.mh_sequences']: + os.remove(os.path.join(path, '.mh_sequences')) + elif entries == []: + pass + else: + raise NotEmptyError('Folder not empty: %s' % self._path) + os.rmdir(path) + + def get_sequences(self): + """Return a name-to-key-list dictionary to define each sequence.""" + results = {} + f = open(os.path.join(self._path, '.mh_sequences'), 'r') + try: + all_keys = set(self.keys()) + for line in f: + try: + name, contents = line.split(':') + keys = set() + for spec in contents.split(): + if spec.isdigit(): + keys.add(int(spec)) + else: + start, stop = (int(x) for x in spec.split('-')) + keys.update(range(start, stop + 1)) + results[name] = [key for key in sorted(keys) \ + if key in all_keys] + if len(results[name]) == 0: + del results[name] + except ValueError: + raise FormatError('Invalid sequence specification: %s' % + line.rstrip()) + finally: + f.close() + return results + + def set_sequences(self, sequences): + """Set sequences using the given name-to-key-list dictionary.""" + f = open(os.path.join(self._path, '.mh_sequences'), 'r+') + try: + os.close(os.open(f.name, os.O_WRONLY | os.O_TRUNC)) + for name, keys in sequences.iteritems(): + if len(keys) == 0: + continue + f.write('%s:' % name) + prev = None + completing = False + for key in sorted(set(keys)): + if key - 1 == prev: + if not completing: + completing = True + f.write('-') + elif completing: + completing = False + f.write('%s %s' % (prev, key)) + else: + f.write(' %s' % key) + prev = key + if completing: + f.write(str(prev) + '\n') + else: + f.write('\n') + finally: + f.close() + + def pack(self): + """Re-name messages to eliminate numbering gaps. Invalidates keys.""" + sequences = self.get_sequences() + prev = 0 + changes = [] + for key in self.iterkeys(): + if key - 1 != prev: + changes.append((key, prev + 1)) + f = open(os.path.join(self._path, str(key)), 'r+') + try: + if self._locked: + _lock_file(f) + try: + if hasattr(os, 'link'): + os.link(os.path.join(self._path, str(key)), + os.path.join(self._path, str(prev + 1))) + os.unlink(os.path.join(self._path, str(key))) + else: + f.close() + os.rename(os.path.join(self._path, str(key)), + os.path.join(self._path, str(prev + 1))) + finally: + if self._locked: + _unlock_file(f) + finally: + f.close() + prev += 1 + self._next_key = prev + 1 + if len(changes) == 0: + return + for name, key_list in sequences.items(): + for old, new in changes: + if old in key_list: + key_list[key_list.index(old)] = new + self.set_sequences(sequences) + + def _dump_sequences(self, message, key): + """Inspect a new MHMessage and update sequences appropriately.""" + pending_sequences = message.get_sequences() + all_sequences = self.get_sequences() + for name, key_list in all_sequences.iteritems(): + if name in pending_sequences: + key_list.append(key) + elif key in key_list: + del key_list[key_list.index(key)] + for sequence in pending_sequences: + if sequence not in all_sequences: + all_sequences[sequence] = [key] + self.set_sequences(all_sequences) + + +class Babyl(_singlefileMailbox): + """An Rmail-style Babyl mailbox.""" + + _special_labels = frozenset(('unseen', 'deleted', 'filed', 'answered', + 'forwarded', 'edited', 'resent')) + + def __init__(self, path, factory=None, create=True): + """Initialize a Babyl mailbox.""" + _singlefileMailbox.__init__(self, path, factory, create) + self._labels = {} + + def add(self, message): + """Add message and return assigned key.""" + key = _singlefileMailbox.add(self, message) + if isinstance(message, BabylMessage): + self._labels[key] = message.get_labels() + return key + + def remove(self, key): + """Remove the keyed message; raise KeyError if it doesn't exist.""" + _singlefileMailbox.remove(self, key) + if key in self._labels: + del self._labels[key] + + def __setitem__(self, key, message): + """Replace the keyed message; raise KeyError if it doesn't exist.""" + _singlefileMailbox.__setitem__(self, key, message) + if isinstance(message, BabylMessage): + self._labels[key] = message.get_labels() + + def get_message(self, key): + """Return a Message representation or raise a KeyError.""" + start, stop = self._lookup(key) + self._file.seek(start) + self._file.readline() # Skip '1,' line specifying labels. + original_headers = StringIO.StringIO() + while True: + line = self._file.readline() + if line == '*** EOOH ***' + os.linesep or line == '': + break + original_headers.write(line.replace(os.linesep, '\n')) + visible_headers = StringIO.StringIO() + while True: + line = self._file.readline() + if line == os.linesep or line == '': + break + visible_headers.write(line.replace(os.linesep, '\n')) + body = self._file.read(stop - self._file.tell()).replace(os.linesep, + '\n') + msg = BabylMessage(original_headers.getvalue() + body) + msg.set_visible(visible_headers.getvalue()) + if key in self._labels: + msg.set_labels(self._labels[key]) + return msg + + def get_string(self, key): + """Return a string representation or raise a KeyError.""" + start, stop = self._lookup(key) + self._file.seek(start) + self._file.readline() # Skip '1,' line specifying labels. + original_headers = StringIO.StringIO() + while True: + line = self._file.readline() + if line == '*** EOOH ***' + os.linesep or line == '': break - lines.append(line) - if sizehint >= 0: - sizehint = sizehint - len(line) + original_headers.write(line.replace(os.linesep, '\n')) + while True: + line = self._file.readline() + if line == os.linesep or line == '': + break + return original_headers.getvalue() + \ + self._file.read(stop - self._file.tell()).replace(os.linesep, + '\n') + + def get_file(self, key): + """Return a file-like representation or raise a KeyError.""" + return StringIO.StringIO(self.get_string(key).replace('\n', + os.linesep)) + + def get_labels(self): + """Return a list of user-defined labels in the mailbox.""" + self._lookup() + labels = set() + for label_list in self._labels.values(): + labels.update(label_list) + labels.difference_update(self._special_labels) + return list(labels) + + def _generate_toc(self): + """Generate key-to-(start, stop) table of contents.""" + starts, stops = [], [] + self._file.seek(0) + next_pos = 0 + label_lists = [] + while True: + line_pos = next_pos + line = self._file.readline() + next_pos = self._file.tell() + if line == '\037\014' + os.linesep: + if len(stops) < len(starts): + stops.append(line_pos - len(os.linesep)) + starts.append(next_pos) + labels = [label.strip() for label + in self._file.readline()[1:].split(',') + if label.strip() != ''] + label_lists.append(labels) + elif line == '\037' or line == '\037' + os.linesep: + if len(stops) < len(starts): + stops.append(line_pos - len(os.linesep)) + elif line == '': + stops.append(line_pos - len(os.linesep)) + break + self._toc = dict(enumerate(zip(starts, stops))) + self._labels = dict(enumerate(label_lists)) + self._next_key = len(self._toc) + + def _pre_mailbox_hook(self, f): + """Called before writing the mailbox to file f.""" + f.write('BABYL OPTIONS:%sVersion: 5%sLabels:%s%s\037' % + (os.linesep, os.linesep, ','.join(self.get_labels()), + os.linesep)) + + def _pre_message_hook(self, f): + """Called before writing each message to file f.""" + f.write('\014' + os.linesep) + + def _post_message_hook(self, f): + """Called after writing each message to file f.""" + f.write(os.linesep + '\037') + + def _install_message(self, message): + """Write message contents and return (start, stop).""" + start = self._file.tell() + if isinstance(message, BabylMessage): + special_labels = [] + labels = [] + for label in message.get_labels(): + if label in self._special_labels: + special_labels.append(label) + else: + labels.append(label) + self._file.write('1') + for label in special_labels: + self._file.write(', ' + label) + self._file.write(',,') + for label in labels: + self._file.write(' ' + label + ',') + self._file.write(os.linesep) + else: + self._file.write('1,,' + os.linesep) + if isinstance(message, email.Message.Message): + orig_buffer = StringIO.StringIO() + orig_generator = email.Generator.Generator(orig_buffer, False, 0) + orig_generator.flatten(message) + orig_buffer.seek(0) + while True: + line = orig_buffer.readline() + self._file.write(line.replace('\n', os.linesep)) + if line == '\n' or line == '': + break + self._file.write('*** EOOH ***' + os.linesep) + if isinstance(message, BabylMessage): + vis_buffer = StringIO.StringIO() + vis_generator = email.Generator.Generator(vis_buffer, False, 0) + vis_generator.flatten(message.get_visible()) + while True: + line = vis_buffer.readline() + self._file.write(line.replace('\n', os.linesep)) + if line == '\n' or line == '': + break + else: + orig_buffer.seek(0) + while True: + line = orig_buffer.readline() + self._file.write(line.replace('\n', os.linesep)) + if line == '\n' or line == '': + break + while True: + buffer = orig_buffer.read(4096) # Buffer size is arbitrary. + if buffer == '': + break + self._file.write(buffer.replace('\n', os.linesep)) + elif isinstance(message, str): + body_start = message.find('\n\n') + 2 + if body_start - 2 != -1: + self._file.write(message[:body_start].replace('\n', + os.linesep)) + self._file.write('*** EOOH ***' + os.linesep) + self._file.write(message[:body_start].replace('\n', + os.linesep)) + self._file.write(message[body_start:].replace('\n', + os.linesep)) + else: + self._file.write('*** EOOH ***' + os.linesep + os.linesep) + self._file.write(message.replace('\n', os.linesep)) + elif hasattr(message, 'readline'): + original_pos = message.tell() + first_pass = True + while True: + line = message.readline() + self._file.write(line.replace('\n', os.linesep)) + if line == '\n' or line == '': + self._file.write('*** EOOH ***' + os.linesep) + if first_pass: + first_pass = False + message.seek(original_pos) + else: + break + while True: + buffer = message.read(4096) # Buffer size is arbitrary. + if buffer == '': + break + self._file.write(buffer.replace('\n', os.linesep)) + else: + raise TypeError('Invalid message type: %s' % type(message)) + stop = self._file.tell() + return (start, stop) + + +class Message(email.Message.Message): + """Message with mailbox-format-specific properties.""" + + def __init__(self, message=None): + """Initialize a Message instance.""" + if isinstance(message, email.Message.Message): + self._become_message(copy.deepcopy(message)) + if isinstance(message, Message): + message._explain_to(self) + elif isinstance(message, str): + self._become_message(email.message_from_string(message)) + elif hasattr(message, "read"): + self._become_message(email.message_from_file(message)) + elif message is None: + email.Message.Message.__init__(self) + else: + raise TypeError('Invalid message type: %s' % type(message)) + + def _become_message(self, message): + """Assume the non-format-specific state of message.""" + for name in ('_headers', '_unixfrom', '_payload', '_charset', + 'preamble', 'epilogue', 'defects', '_default_type'): + self.__dict__[name] = message.__dict__[name] + + def _explain_to(self, message): + """Copy format-specific state to message insofar as possible.""" + if isinstance(message, Message): + return # There's nothing format-specific to explain. + else: + raise TypeError('Cannot convert to specified type') + + +class MaildirMessage(Message): + """Message with Maildir-specific properties.""" + + def __init__(self, message=None): + """Initialize a MaildirMessage instance.""" + self._subdir = 'new' + self._info = '' + self._date = time.time() + Message.__init__(self, message) + + def get_subdir(self): + """Return 'new' or 'cur'.""" + return self._subdir + + def set_subdir(self, subdir): + """Set subdir to 'new' or 'cur'.""" + if subdir == 'new' or subdir == 'cur': + self._subdir = subdir + else: + raise ValueError("subdir must be 'new' or 'cur': %s" % subdir) + + def get_flags(self): + """Return as a string the flags that are set.""" + if self._info.startswith('2,'): + return self._info[2:] + else: + return '' + + def set_flags(self, flags): + """Set the given flags and unset all others.""" + self._info = '2,' + ''.join(sorted(flags)) + + def add_flag(self, flag): + """Set the given flag(s) without changing others.""" + self.set_flags(''.join(set(self.get_flags()) | set(flag))) + + def remove_flag(self, flag): + """Unset the given string flag(s) without changing others.""" + if self.get_flags() != '': + self.set_flags(''.join(set(self.get_flags()) - set(flag))) + + def get_date(self): + """Return delivery date of message, in seconds since the epoch.""" + return self._date + + def set_date(self, date): + """Set delivery date of message, in seconds since the epoch.""" + try: + self._date = float(date) + except ValueError: + raise TypeError("can't convert to float: %s" % date) + + def get_info(self): + """Get the message's "info" as a string.""" + return self._info + + def set_info(self, info): + """Set the message's "info" string.""" + if isinstance(info, str): + self._info = info + else: + raise TypeError('info must be a string: %s' % type(info)) + + def _explain_to(self, message): + """Copy Maildir-specific state to message insofar as possible.""" + if isinstance(message, MaildirMessage): + message.set_flags(self.get_flags()) + message.set_subdir(self.get_subdir()) + message.set_date(self.get_date()) + elif isinstance(message, _mboxMMDFMessage): + flags = set(self.get_flags()) + if 'S' in flags: + message.add_flag('R') + if self.get_subdir() == 'cur': + message.add_flag('O') + if 'T' in flags: + message.add_flag('D') + if 'F' in flags: + message.add_flag('F') + if 'R' in flags: + message.add_flag('A') + message.set_from('MAILER-DAEMON', time.gmtime(self.get_date())) + elif isinstance(message, MHMessage): + flags = set(self.get_flags()) + if 'S' not in flags: + message.add_sequence('unseen') + if 'R' in flags: + message.add_sequence('replied') + if 'F' in flags: + message.add_sequence('flagged') + elif isinstance(message, BabylMessage): + flags = set(self.get_flags()) + if 'S' not in flags: + message.add_label('unseen') + if 'T' in flags: + message.add_label('deleted') + if 'R' in flags: + message.add_label('answered') + if 'P' in flags: + message.add_label('forwarded') + elif isinstance(message, Message): + pass + else: + raise TypeError('Cannot convert to specified type: %s' % + type(message)) + + +class _mboxMMDFMessage(Message): + """Message with mbox- or MMDF-specific properties.""" + + def __init__(self, message=None): + """Initialize an mboxMMDFMessage instance.""" + self.set_from('MAILER-DAEMON', True) + if isinstance(message, email.Message.Message): + unixfrom = message.get_unixfrom() + if unixfrom is not None and unixfrom.startswith('From '): + self.set_from(unixfrom[5:]) + Message.__init__(self, message) + + def get_from(self): + """Return contents of "From " line.""" + return self._from + + def set_from(self, from_, time_=None): + """Set "From " line, formatting and appending time_ if specified.""" + if time_ is not None: + if time_ is True: + time_ = time.gmtime() + from_ += ' ' + time.asctime(time_) + self._from = from_ + + def get_flags(self): + """Return as a string the flags that are set.""" + return self.get('Status', '') + self.get('X-Status', '') + + def set_flags(self, flags): + """Set the given flags and unset all others.""" + flags = set(flags) + status_flags, xstatus_flags = '', '' + for flag in ('R', 'O'): + if flag in flags: + status_flags += flag + flags.remove(flag) + for flag in ('D', 'F', 'A'): + if flag in flags: + xstatus_flags += flag + flags.remove(flag) + xstatus_flags += ''.join(sorted(flags)) + try: + self.replace_header('Status', status_flags) + except KeyError: + self.add_header('Status', status_flags) + try: + self.replace_header('X-Status', xstatus_flags) + except KeyError: + self.add_header('X-Status', xstatus_flags) + + def add_flag(self, flag): + """Set the given flag(s) without changing others.""" + self.set_flags(''.join(set(self.get_flags()) | set(flag))) + + def remove_flag(self, flag): + """Unset the given string flag(s) without changing others.""" + if 'Status' in self or 'X-Status' in self: + self.set_flags(''.join(set(self.get_flags()) - set(flag))) + + def _explain_to(self, message): + """Copy mbox- or MMDF-specific state to message insofar as possible.""" + if isinstance(message, MaildirMessage): + flags = set(self.get_flags()) + if 'O' in flags: + message.set_subdir('cur') + if 'F' in flags: + message.add_flag('F') + if 'A' in flags: + message.add_flag('R') + if 'R' in flags: + message.add_flag('S') + if 'D' in flags: + message.add_flag('T') + del message['status'] + del message['x-status'] + maybe_date = ' '.join(self.get_from().split()[-5:]) + try: + message.set_date(calendar.timegm(time.strptime(maybe_date, + '%a %b %d %H:%M:%S %Y'))) + except (ValueError, OverflowError): + pass + elif isinstance(message, _mboxMMDFMessage): + message.set_flags(self.get_flags()) + message.set_from(self.get_from()) + elif isinstance(message, MHMessage): + flags = set(self.get_flags()) + if 'R' not in flags: + message.add_sequence('unseen') + if 'A' in flags: + message.add_sequence('replied') + if 'F' in flags: + message.add_sequence('flagged') + del message['status'] + del message['x-status'] + elif isinstance(message, BabylMessage): + flags = set(self.get_flags()) + if 'R' not in flags: + message.add_label('unseen') + if 'D' in flags: + message.add_label('deleted') + if 'A' in flags: + message.add_label('answered') + del message['status'] + del message['x-status'] + elif isinstance(message, Message): + pass + else: + raise TypeError('Cannot convert to specified type: %s' % + type(message)) + + +class mboxMessage(_mboxMMDFMessage): + """Message with mbox-specific properties.""" + + +class MHMessage(Message): + """Message with MH-specific properties.""" + + def __init__(self, message=None): + """Initialize an MHMessage instance.""" + self._sequences = [] + Message.__init__(self, message) + + def get_sequences(self): + """Return a list of sequences that include the message.""" + return self._sequences[:] + + def set_sequences(self, sequences): + """Set the list of sequences that include the message.""" + self._sequences = list(sequences) + + def add_sequence(self, sequence): + """Add sequence to list of sequences including the message.""" + if isinstance(sequence, str): + if not sequence in self._sequences: + self._sequences.append(sequence) + else: + raise TypeError('sequence must be a string: %s' % type(sequence)) + + def remove_sequence(self, sequence): + """Remove sequence from the list of sequences including the message.""" + try: + self._sequences.remove(sequence) + except ValueError: + pass + + def _explain_to(self, message): + """Copy MH-specific state to message insofar as possible.""" + if isinstance(message, MaildirMessage): + sequences = set(self.get_sequences()) + if 'unseen' in sequences: + message.set_subdir('cur') + else: + message.set_subdir('cur') + message.add_flag('S') + if 'flagged' in sequences: + message.add_flag('F') + if 'replied' in sequences: + message.add_flag('R') + elif isinstance(message, _mboxMMDFMessage): + sequences = set(self.get_sequences()) + if 'unseen' not in sequences: + message.add_flag('RO') + else: + message.add_flag('O') + if 'flagged' in sequences: + message.add_flag('F') + if 'replied' in sequences: + message.add_flag('A') + elif isinstance(message, MHMessage): + for sequence in self.get_sequences(): + message.add_sequence(sequence) + elif isinstance(message, BabylMessage): + sequences = set(self.get_sequences()) + if 'unseen' in sequences: + message.add_label('unseen') + if 'replied' in sequences: + message.add_label('answered') + elif isinstance(message, Message): + pass + else: + raise TypeError('Cannot convert to specified type: %s' % + type(message)) + + +class BabylMessage(Message): + """Message with Babyl-specific properties.""" + + def __init__(self, message=None): + """Initialize an BabylMessage instance.""" + self._labels = [] + self._visible = Message() + Message.__init__(self, message) + + def get_labels(self): + """Return a list of labels on the message.""" + return self._labels[:] + + def set_labels(self, labels): + """Set the list of labels on the message.""" + self._labels = list(labels) + + def add_label(self, label): + """Add label to list of labels on the message.""" + if isinstance(label, str): + if label not in self._labels: + self._labels.append(label) + else: + raise TypeError('label must be a string: %s' % type(label)) + + def remove_label(self, label): + """Remove label from the list of labels on the message.""" + try: + self._labels.remove(label) + except ValueError: + pass + + def get_visible(self): + """Return a Message representation of visible headers.""" + return Message(self._visible) + + def set_visible(self, visible): + """Set the Message representation of visible headers.""" + self._visible = Message(visible) + + def update_visible(self): + """Update and/or sensibly generate a set of visible headers.""" + for header in self._visible.keys(): + if header in self: + self._visible.replace_header(header, self[header]) + else: + del self._visible[header] + for header in ('Date', 'From', 'Reply-To', 'To', 'CC', 'Subject'): + if header in self and header not in self._visible: + self._visible[header] = self[header] + + def _explain_to(self, message): + """Copy Babyl-specific state to message insofar as possible.""" + if isinstance(message, MaildirMessage): + labels = set(self.get_labels()) + if 'unseen' in labels: + message.set_subdir('cur') + else: + message.set_subdir('cur') + message.add_flag('S') + if 'forwarded' in labels or 'resent' in labels: + message.add_flag('P') + if 'answered' in labels: + message.add_flag('R') + if 'deleted' in labels: + message.add_flag('T') + elif isinstance(message, _mboxMMDFMessage): + labels = set(self.get_labels()) + if 'unseen' not in labels: + message.add_flag('RO') + else: + message.add_flag('O') + if 'deleted' in labels: + message.add_flag('D') + if 'answered' in labels: + message.add_flag('A') + elif isinstance(message, MHMessage): + labels = set(self.get_labels()) + if 'unseen' in labels: + message.add_sequence('unseen') + if 'answered' in labels: + message.add_sequence('replied') + elif isinstance(message, BabylMessage): + message.set_visible(self.get_visible()) + for label in self.get_labels(): + message.add_label(label) + elif isinstance(message, Message): + pass + else: + raise TypeError('Cannot convert to specified type: %s' % + type(message)) + + +class MMDFMessage(_mboxMMDFMessage): + """Message with MMDF-specific properties.""" + + +class _ProxyFile: + """A read-only wrapper of a file.""" + + def __init__(self, f, pos=None): + """Initialize a _ProxyFile.""" + self._file = f + if pos is None: + self._pos = f.tell() + else: + self._pos = pos + + def read(self, size=None): + """Read bytes.""" + return self._read(size, self._file.read) + + def readline(self, size=None): + """Read a line.""" + return self._read(size, self._file.readline) + + def readlines(self, sizehint=None): + """Read multiple lines.""" + result = [] + for line in self: + result.append(line) + if sizehint is not None: + sizehint -= len(line) if sizehint <= 0: break - return lines + return result + + def __iter__(self): + """Iterate over lines.""" + return iter(self.readline, "") def tell(self): - return self.pos - self.start + """Return the position.""" + return self._pos + + def seek(self, offset, whence=0): + """Change position.""" + if whence == 1: + self._file.seek(self._pos) + self._file.seek(offset, whence) + self._pos = self._file.tell() - def seek(self, pos, whence=0): + def close(self): + """Close the file.""" + del self._file + + def _read(self, size, read_method): + """Read size bytes using read_method.""" + if size is None: + size = -1 + self._file.seek(self._pos) + result = read_method(size) + self._pos = self._file.tell() + return result + + +class _PartialFile(_ProxyFile): + """A read-only wrapper of part of a file.""" + + def __init__(self, f, start=None, stop=None): + """Initialize a _PartialFile.""" + _ProxyFile.__init__(self, f, start) + self._start = start + self._stop = stop + + def tell(self): + """Return the position with respect to start.""" + return _ProxyFile.tell(self) - self._start + + def seek(self, offset, whence=0): + """Change position, possibly with respect to start or stop.""" if whence == 0: - self.pos = self.start + pos - elif whence == 1: - self.pos = self.pos + pos + self._pos = self._start + whence = 1 elif whence == 2: - self.pos = self.stop + pos + self._pos = self._stop + whence = 1 + _ProxyFile.seek(self, offset, whence) - def close(self): - del self.fp + def _read(self, size, read_method): + """Read size bytes using read_method, honoring start and stop.""" + remaining = self._stop - self._pos + if remaining <= 0: + return '' + if size is None or size < 0 or size > remaining: + size = remaining + return _ProxyFile._read(self, size, read_method) + + +def _lock_file(f, dotlock=True): + """Lock file f using lockf, flock, and dot locking.""" + dotlock_done = False + try: + if fcntl: + try: + fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB) + except IOError, e: + if e.errno == errno.EAGAIN: + raise ExternalClashError('lockf: lock unavailable: %s' % + f.name) + else: + raise + try: + fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) + except IOError, e: + if e.errno == errno.EWOULDBLOCK: + raise ExternalClashError('flock: lock unavailable: %s' % + f.name) + else: + raise + if dotlock: + try: + pre_lock = _create_temporary(f.name + '.lock') + pre_lock.close() + except IOError, e: + if e.errno == errno.EACCES: + return # Without write access, just skip dotlocking. + else: + raise + try: + if hasattr(os, 'link'): + os.link(pre_lock.name, f.name + '.lock') + dotlock_done = True + os.unlink(pre_lock.name) + else: + os.rename(pre_lock.name, f.name + '.lock') + dotlock_done = True + except OSError, e: + if e.errno == errno.EEXIST: + os.remove(pre_lock.name) + raise ExternalClashError('dot lock unavailable: %s' % + f.name) + else: + raise + except: + if fcntl: + fcntl.lockf(f, fcntl.LOCK_UN) + fcntl.flock(f, fcntl.LOCK_UN) + if dotlock_done: + os.remove(f.name + '.lock') + raise + +def _unlock_file(f): + """Unlock file f using lockf, flock, and dot locking.""" + if fcntl: + fcntl.lockf(f, fcntl.LOCK_UN) + fcntl.flock(f, fcntl.LOCK_UN) + if os.path.exists(f.name + '.lock'): + os.remove(f.name + '.lock') + +def _create_carefully(path): + """Create a file if it doesn't exist and open for reading and writing.""" + fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDWR) + try: + return open(path, 'rb+') + finally: + os.close(fd) + +def _create_temporary(path): + """Create a temp file based on path and open for reading and writing.""" + return _create_carefully('%s.%s.%s.%s' % (path, int(time.time()), + socket.gethostname(), + os.getpid())) + + +## Start: classes from the original module (for backward compatibility). + +# Note that the Maildir class, whose name is unchanged, itself offers a next() +# method for backward compatibility. + +class _Mailbox: + + def __init__(self, fp, factory=rfc822.Message): + self.fp = fp + self.seekp = 0 + self.factory = factory + def __iter__(self): + return iter(self.next, None) + + def next(self): + while 1: + self.fp.seek(self.seekp) + try: + self._search_start() + except EOFError: + self.seekp = self.fp.tell() + return None + start = self.fp.tell() + self._search_end() + self.seekp = stop = self.fp.tell() + if start != stop: + break + return self.factory(_PartialFile(self.fp, start, stop)) # Recommended to use PortableUnixMailbox instead! class UnixMailbox(_Mailbox): @@ -213,36 +2027,6 @@ class MHMailbox: return msg -class Maildir: - # Qmail directory mailbox - - def __init__(self, dirname, factory=rfc822.Message): - self.dirname = dirname - self.factory = factory - - # check for new mail - newdir = os.path.join(self.dirname, 'new') - boxes = [os.path.join(newdir, f) - for f in os.listdir(newdir) if f[0] != '.'] - - # Now check for current mail in this maildir - curdir = os.path.join(self.dirname, 'cur') - boxes += [os.path.join(curdir, f) - for f in os.listdir(curdir) if f[0] != '.'] - boxes.reverse() - self.boxes = boxes - - def __iter__(self): - return iter(self.next, None) - - def next(self): - if not self.boxes: - return None - fn = self.boxes.pop() - fp = open(fn) - return self.factory(fp) - - class BabylMailbox(_Mailbox): def _search_start(self): @@ -263,59 +2047,20 @@ class BabylMailbox(_Mailbox): self.fp.seek(pos) return +## End: classes from the original module (for backward compatibility). -def _test(): - import sys - args = sys.argv[1:] - if not args: - for key in 'MAILDIR', 'MAIL', 'LOGNAME', 'USER': - if key in os.environ: - mbox = os.environ[key] - break - else: - print "$MAIL, $LOGNAME nor $USER set -- who are you?" - return - else: - mbox = args[0] - if mbox[:1] == '+': - mbox = os.environ['HOME'] + '/Mail/' + mbox[1:] - elif not '/' in mbox: - if os.path.isfile('/var/mail/' + mbox): - mbox = '/var/mail/' + mbox - else: - mbox = '/usr/mail/' + mbox - if os.path.isdir(mbox): - if os.path.isdir(os.path.join(mbox, 'cur')): - mb = Maildir(mbox) - else: - mb = MHMailbox(mbox) - else: - fp = open(mbox, 'r') - mb = PortableUnixMailbox(fp) - - msgs = [] - while 1: - msg = mb.next() - if msg is None: - break - msgs.append(msg) - if len(args) <= 1: - msg.fp = None - if len(args) > 1: - num = int(args[1]) - print 'Message %d body:'%num - msg = msgs[num-1] - msg.rewindbody() - sys.stdout.write(msg.fp.read()) - else: - print 'Mailbox',mbox,'has',len(msgs),'messages:' - for msg in msgs: - f = msg.getheader('from') or "" - s = msg.getheader('subject') or "" - d = msg.getheader('date') or "" - print '-%20.20s %20.20s %-30.30s'%(f, d[5:], s) - - -if __name__ == '__main__': - _test() +class Error(Exception): + """Raised for module-specific errors.""" + +class NoSuchMailboxError(Error): + """The specified mailbox does not exist and won't be created.""" + +class NotEmptyError(Error): + """The specified mailbox is not empty and deletion was requested.""" + +class ExternalClashError(Error): + """Another process caused an action to fail.""" + +class FormatError(Error): + """A file appears to have an invalid format.""" diff --git a/Lib/msilib/__init__.py b/Lib/msilib/__init__.py index d847259..0881409 100644 --- a/Lib/msilib/__init__.py +++ b/Lib/msilib/__init__.py @@ -196,11 +196,9 @@ class CAB: self.filenames.add(logical) return logical - def append(self, full, file, logical = None): + def append(self, full, logical): if os.path.isdir(full): return - if not logical: - logical = self.gen_id(dir, file) self.index += 1 self.files.append((full, logical)) return self.index, logical @@ -330,7 +328,7 @@ class Directory: logical = self.keyfiles[file] else: logical = None - sequence, logical = self.cab.append(absolute, file, logical) + sequence, logical = self.cab.append(absolute, logical) assert logical not in self.ids self.ids.add(logical) short = self.make_short(file) @@ -400,13 +398,14 @@ class Control: self.dlg = dlg self.name = name - def event(self, ev, arg, cond = "1", order = None): + def event(self, event, argument, condition = "1", ordering = None): add_data(self.dlg.db, "ControlEvent", - [(self.dlg.name, self.name, ev, arg, cond, order)]) + [(self.dlg.name, self.name, event, argument, + condition, ordering)]) - def mapping(self, ev, attr): + def mapping(self, mapping, attribute): add_data(self.dlg.db, "EventMapping", - [(self.dlg.name, self.name, ev, attr)]) + [(self.dlg.name, self.name, event, attribute)]) def condition(self, action, condition): add_data(self.dlg.db, "ControlCondition", diff --git a/Lib/msilib/text.py b/Lib/msilib/text.py new file mode 100644 index 0000000..12fd2d7 --- /dev/null +++ b/Lib/msilib/text.py @@ -0,0 +1,129 @@ +import msilib,os;dirname=os.path.dirname(__file__) + +ActionText = [ +(u'InstallValidate', u'Validating install', None), +(u'InstallFiles', u'Copying new files', u'File: [1], Directory: [9], Size: [6]'), +(u'InstallAdminPackage', u'Copying network install files', u'File: [1], Directory: [9], Size: [6]'), +(u'FileCost', u'Computing space requirements', None), +(u'CostInitialize', u'Computing space requirements', None), +(u'CostFinalize', u'Computing space requirements', None), +(u'CreateShortcuts', u'Creating shortcuts', u'Shortcut: [1]'), +(u'PublishComponents', u'Publishing Qualified Components', u'Component ID: [1], Qualifier: [2]'), +(u'PublishFeatures', u'Publishing Product Features', u'Feature: [1]'), +(u'PublishProduct', u'Publishing product information', None), +(u'RegisterClassInfo', u'Registering Class servers', u'Class Id: [1]'), +(u'RegisterExtensionInfo', u'Registering extension servers', u'Extension: [1]'), +(u'RegisterMIMEInfo', u'Registering MIME info', u'MIME Content Type: [1], Extension: [2]'), +(u'RegisterProgIdInfo', u'Registering program identifiers', u'ProgId: [1]'), +(u'AllocateRegistrySpace', u'Allocating registry space', u'Free space: [1]'), +(u'AppSearch', u'Searching for installed applications', u'Property: [1], Signature: [2]'), +(u'BindImage', u'Binding executables', u'File: [1]'), +(u'CCPSearch', u'Searching for qualifying products', None), +(u'CreateFolders', u'Creating folders', u'Folder: [1]'), +(u'DeleteServices', u'Deleting services', u'Service: [1]'), +(u'DuplicateFiles', u'Creating duplicate files', u'File: [1], Directory: [9], Size: [6]'), +(u'FindRelatedProducts', u'Searching for related applications', u'Found application: [1]'), +(u'InstallODBC', u'Installing ODBC components', None), +(u'InstallServices', u'Installing new services', u'Service: [2]'), +(u'LaunchConditions', u'Evaluating launch conditions', None), +(u'MigrateFeatureStates', u'Migrating feature states from related applications', u'Application: [1]'), +(u'MoveFiles', u'Moving files', u'File: [1], Directory: [9], Size: [6]'), +(u'PatchFiles', u'Patching files', u'File: [1], Directory: [2], Size: [3]'), +(u'ProcessComponents', u'Updating component registration', None), +(u'RegisterComPlus', u'Registering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2], Users: [3], RSN: [4]}}'), +(u'RegisterFonts', u'Registering fonts', u'Font: [1]'), +(u'RegisterProduct', u'Registering product', u'[1]'), +(u'RegisterTypeLibraries', u'Registering type libraries', u'LibID: [1]'), +(u'RegisterUser', u'Registering user', u'[1]'), +(u'RemoveDuplicateFiles', u'Removing duplicated files', u'File: [1], Directory: [9]'), +(u'RemoveEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'), +(u'RemoveExistingProducts', u'Removing applications', u'Application: [1], Command line: [2]'), +(u'RemoveFiles', u'Removing files', u'File: [1], Directory: [9]'), +(u'RemoveFolders', u'Removing folders', u'Folder: [1]'), +(u'RemoveIniValues', u'Removing INI files entries', u'File: [1], Section: [2], Key: [3], Value: [4]'), +(u'RemoveODBC', u'Removing ODBC components', None), +(u'RemoveRegistryValues', u'Removing system registry values', u'Key: [1], Name: [2]'), +(u'RemoveShortcuts', u'Removing shortcuts', u'Shortcut: [1]'), +(u'RMCCPSearch', u'Searching for qualifying products', None), +(u'SelfRegModules', u'Registering modules', u'File: [1], Folder: [2]'), +(u'SelfUnregModules', u'Unregistering modules', u'File: [1], Folder: [2]'), +(u'SetODBCFolders', u'Initializing ODBC directories', None), +(u'StartServices', u'Starting services', u'Service: [1]'), +(u'StopServices', u'Stopping services', u'Service: [1]'), +(u'UnpublishComponents', u'Unpublishing Qualified Components', u'Component ID: [1], Qualifier: [2]'), +(u'UnpublishFeatures', u'Unpublishing Product Features', u'Feature: [1]'), +(u'UnregisterClassInfo', u'Unregister Class servers', u'Class Id: [1]'), +(u'UnregisterComPlus', u'Unregistering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2]}}'), +(u'UnregisterExtensionInfo', u'Unregistering extension servers', u'Extension: [1]'), +(u'UnregisterFonts', u'Unregistering fonts', u'Font: [1]'), +(u'UnregisterMIMEInfo', u'Unregistering MIME info', u'MIME Content Type: [1], Extension: [2]'), +(u'UnregisterProgIdInfo', u'Unregistering program identifiers', u'ProgId: [1]'), +(u'UnregisterTypeLibraries', u'Unregistering type libraries', u'LibID: [1]'), +(u'WriteEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'), +(u'WriteIniValues', u'Writing INI files values', u'File: [1], Section: [2], Key: [3], Value: [4]'), +(u'WriteRegistryValues', u'Writing system registry values', u'Key: [1], Name: [2], Value: [3]'), +(u'Advertise', u'Advertising application', None), +(u'GenerateScript', u'Generating script operations for action:', u'[1]'), +(u'InstallSFPCatalogFile', u'Installing system catalog', u'File: [1], Dependencies: [2]'), +(u'MsiPublishAssemblies', u'Publishing assembly information', u'Application Context:[1], Assembly Name:[2]'), +(u'MsiUnpublishAssemblies', u'Unpublishing assembly information', u'Application Context:[1], Assembly Name:[2]'), +(u'Rollback', u'Rolling back action:', u'[1]'), +(u'RollbackCleanup', u'Removing backup files', u'File: [1]'), +(u'UnmoveFiles', u'Removing moved files', u'File: [1], Directory: [9]'), +(u'UnpublishProduct', u'Unpublishing product information', None), +] + +UIText = [ +(u'AbsentPath', None), +(u'bytes', u'bytes'), +(u'GB', u'GB'), +(u'KB', u'KB'), +(u'MB', u'MB'), +(u'MenuAbsent', u'Entire feature will be unavailable'), +(u'MenuAdvertise', u'Feature will be installed when required'), +(u'MenuAllCD', u'Entire feature will be installed to run from CD'), +(u'MenuAllLocal', u'Entire feature will be installed on local hard drive'), +(u'MenuAllNetwork', u'Entire feature will be installed to run from network'), +(u'MenuCD', u'Will be installed to run from CD'), +(u'MenuLocal', u'Will be installed on local hard drive'), +(u'MenuNetwork', u'Will be installed to run from network'), +(u'ScriptInProgress', u'Gathering required information...'), +(u'SelAbsentAbsent', u'This feature will remain uninstalled'), +(u'SelAbsentAdvertise', u'This feature will be set to be installed when required'), +(u'SelAbsentCD', u'This feature will be installed to run from CD'), +(u'SelAbsentLocal', u'This feature will be installed on the local hard drive'), +(u'SelAbsentNetwork', u'This feature will be installed to run from the network'), +(u'SelAdvertiseAbsent', u'This feature will become unavailable'), +(u'SelAdvertiseAdvertise', u'Will be installed when required'), +(u'SelAdvertiseCD', u'This feature will be available to run from CD'), +(u'SelAdvertiseLocal', u'This feature will be installed on your local hard drive'), +(u'SelAdvertiseNetwork', u'This feature will be available to run from the network'), +(u'SelCDAbsent', u"This feature will be uninstalled completely, you won't be able to run it from CD"), +(u'SelCDAdvertise', u'This feature will change from run from CD state to set to be installed when required'), +(u'SelCDCD', u'This feature will remain to be run from CD'), +(u'SelCDLocal', u'This feature will change from run from CD state to be installed on the local hard drive'), +(u'SelChildCostNeg', u'This feature frees up [1] on your hard drive.'), +(u'SelChildCostPos', u'This feature requires [1] on your hard drive.'), +(u'SelCostPending', u'Compiling cost for this feature...'), +(u'SelLocalAbsent', u'This feature will be completely removed'), +(u'SelLocalAdvertise', u'This feature will be removed from your local hard drive, but will be set to be installed when required'), +(u'SelLocalCD', u'This feature will be removed from your local hard drive, but will be still available to run from CD'), +(u'SelLocalLocal', u'This feature will remain on you local hard drive'), +(u'SelLocalNetwork', u'This feature will be removed from your local hard drive, but will be still available to run from the network'), +(u'SelNetworkAbsent', u"This feature will be uninstalled completely, you won't be able to run it from the network"), +(u'SelNetworkAdvertise', u'This feature will change from run from network state to set to be installed when required'), +(u'SelNetworkLocal', u'This feature will change from run from network state to be installed on the local hard drive'), +(u'SelNetworkNetwork', u'This feature will remain to be run from the network'), +(u'SelParentCostNegNeg', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'), +(u'SelParentCostNegPos', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'), +(u'SelParentCostPosNeg', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'), +(u'SelParentCostPosPos', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'), +(u'TimeRemaining', u'Time remaining: {[1] minutes }{[2] seconds}'), +(u'VolumeCostAvailable', u'Available'), +(u'VolumeCostDifference', u'Difference'), +(u'VolumeCostRequired', u'Required'), +(u'VolumeCostSize', u'Disk Size'), +(u'VolumeCostVolume', u'Volume'), +] + +tables=['ActionText', 'UIText'] diff --git a/Lib/msilib/uisample.py b/Lib/msilib/uisample.py deleted file mode 100644 index d66c3cd..0000000 --- a/Lib/msilib/uisample.py +++ /dev/null @@ -1,1399 +0,0 @@ -import msilib,os;dirname=os.path.dirname(__file__) -AdminExecuteSequence = [ -(u'InstallValidate', None, 1400), -(u'InstallInitialize', None, 1500), -(u'InstallFinalize', None, 6600), -(u'InstallFiles', None, 4000), -(u'InstallAdminPackage', None, 3900), -(u'FileCost', None, 900), -(u'CostInitialize', None, 800), -(u'CostFinalize', None, 1000), -] - -AdminUISequence = [ -(u'AdminWelcomeDlg', None, 1230), -(u'FileCost', None, 900), -(u'CostInitialize', None, 800), -(u'CostFinalize', None, 1000), -(u'ExecuteAction', None, 1300), -(u'ExitDialog', None, -1), -(u'FatalError', None, -3), -(u'PrepareDlg', None, 140), -(u'ProgressDlg', None, 1280), -(u'UserExit', None, -2), -] - -AdvtExecuteSequence = [ -(u'InstallValidate', None, 1400), -(u'InstallInitialize', None, 1500), -(u'InstallFinalize', None, 6600), -(u'CostInitialize', None, 800), -(u'CostFinalize', None, 1000), -(u'CreateShortcuts', None, 4500), -(u'PublishComponents', None, 6200), -(u'PublishFeatures', None, 6300), -(u'PublishProduct', None, 6400), -(u'RegisterClassInfo', None, 4600), -(u'RegisterExtensionInfo', None, 4700), -(u'RegisterMIMEInfo', None, 4900), -(u'RegisterProgIdInfo', None, 4800), -] - -BBControl = [ -] - -Billboard = [ -] - -Binary = [ -(u'bannrbmp', msilib.Binary(os.path.join(dirname,"bannrbmp.bin"))), -(u'completi', msilib.Binary(os.path.join(dirname,"completi.bin"))), -(u'custicon', msilib.Binary(os.path.join(dirname,"custicon.bin"))), -(u'dlgbmp', msilib.Binary(os.path.join(dirname,"dlgbmp.bin"))), -(u'exclamic', msilib.Binary(os.path.join(dirname,"exclamic.bin"))), -(u'info', msilib.Binary(os.path.join(dirname,"info.bin"))), -(u'insticon', msilib.Binary(os.path.join(dirname,"insticon.bin"))), -(u'New', msilib.Binary(os.path.join(dirname,"New.bin"))), -(u'removico', msilib.Binary(os.path.join(dirname,"removico.bin"))), -(u'repairic', msilib.Binary(os.path.join(dirname,"repairic.bin"))), -(u'Up', msilib.Binary(os.path.join(dirname,"Up.bin"))), -] - -CheckBox = [ -] - -Property = [ -(u'BannerBitmap', u'bannrbmp'), -(u'IAgree', u'No'), -(u'ProductID', u'none'), -(u'ARPHELPLINK', u'http://www.microsoft.com/management'), -(u'ButtonText_Back', u'< &Back'), -(u'ButtonText_Browse', u'Br&owse'), -(u'ButtonText_Cancel', u'Cancel'), -(u'ButtonText_Exit', u'&Exit'), -(u'ButtonText_Finish', u'&Finish'), -(u'ButtonText_Ignore', u'&Ignore'), -(u'ButtonText_Install', u'&Install'), -(u'ButtonText_Next', u'&Next >'), -(u'ButtonText_No', u'&No'), -(u'ButtonText_OK', u'OK'), -(u'ButtonText_Remove', u'&Remove'), -(u'ButtonText_Repair', u'&Repair'), -(u'ButtonText_Reset', u'&Reset'), -(u'ButtonText_Resume', u'&Resume'), -(u'ButtonText_Retry', u'&Retry'), -(u'ButtonText_Return', u'&Return'), -(u'ButtonText_Yes', u'&Yes'), -(u'CompleteSetupIcon', u'completi'), -(u'ComponentDownload', u'ftp://anonymous@microsoft.com/components/'), -(u'CustomSetupIcon', u'custicon'), -(u'DefaultUIFont', u'DlgFont8'), -(u'DialogBitmap', u'dlgbmp'), -(u'DlgTitleFont', u'{&DlgFontBold8}'), -(u'ErrorDialog', u'ErrorDlg'), -(u'ExclamationIcon', u'exclamic'), -(u'InfoIcon', u'info'), -(u'InstallerIcon', u'insticon'), -(u'INSTALLLEVEL', u'3'), -(u'InstallMode', u'Typical'), -(u'PIDTemplate', u'12345<###-%%%%%%%>@@@@@'), -#(u'ProductLanguage', u'1033'), -(u'Progress1', u'Installing'), -(u'Progress2', u'installs'), -(u'PROMPTROLLBACKCOST', u'P'), -(u'RemoveIcon', u'removico'), -(u'RepairIcon', u'repairic'), -(u'Setup', u'Setup'), -(u'ShowUserRegistrationDlg', u'1'), -(u'Wizard', u'Setup Wizard'), -] - -ComboBox = [ -] - -Control = [ -(u'AdminWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), -(u'AdminWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'AdminWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'AdminWelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will create a server image of [ProductName], at a specified network location. Click Next to continue or Cancel to exit the [Wizard].', None, None), -(u'AdminWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), -(u'AdminWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), -(u'AdminWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -(u'ExitDialog', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), -(u'ExitDialog', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'ExitDialog', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'ExitDialog', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None), -(u'ExitDialog', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Completing the [ProductName] [Wizard]', None, None), -(u'ExitDialog', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None), -(u'ExitDialog', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None), -(u'FatalError', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), -(u'FatalError', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'FatalError', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'FatalError', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] ended prematurely', None, None), -(u'FatalError', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None), -(u'FatalError', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None), -(u'FatalError', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None), -(u'FatalError', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None), -(u'PrepareDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Cancel', None), -(u'PrepareDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'PrepareDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'PrepareDlg', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Please wait while the [Wizard] prepares to guide you through the installation.', None, None), -(u'PrepareDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), -(u'PrepareDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', None, None), -(u'PrepareDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', None, None), -(u'PrepareDlg', u'ActionData', u'Text', 135, 125, 220, 30, 196611, None, None, None, None), -(u'PrepareDlg', u'ActionText', u'Text', 135, 100, 220, 20, 196611, None, None, None, None), -(u'ProgressDlg', u'Text', u'Text', 35, 65, 300, 20, 3, None, u'Please wait while the [Wizard] [Progress2] [ProductName]. This may take several minutes.', None, None), -(u'ProgressDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), -(u'ProgressDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'ProgressDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'ProgressDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'ProgressDlg', u'Title', u'Text', 20, 15, 200, 15, 196611, None, u'[DlgTitleFont][Progress1] [ProductName]', None, None), -(u'ProgressDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), -(u'ProgressDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None), -(u'ProgressDlg', u'ActionText', u'Text', 70, 100, 265, 10, 3, None, None, None, None), -(u'ProgressDlg', u'ProgressBar', u'ProgressBar', 35, 115, 300, 10, 65537, None, u'Progress done', None, None), -(u'ProgressDlg', u'StatusLabel', u'Text', 35, 100, 35, 10, 3, None, u'Status:', None, None), -(u'UserExit', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), -(u'UserExit', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'UserExit', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'UserExit', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] was interrupted', None, None), -(u'UserExit', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None), -(u'UserExit', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None), -(u'UserExit', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup was interrupted. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None), -(u'UserExit', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None), -(u'AdminBrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'), -(u'AdminBrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None), -(u'AdminBrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 17, 3, u'TARGETDIR', None, u'OK', None), -(u'AdminBrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'AdminBrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'AdminBrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None), -(u'AdminBrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None), -(u'AdminBrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 458755, u'TARGETDIR', None, u'Up', None), -(u'AdminBrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None), -(u'AdminBrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 7, u'TARGETDIR', None, u'PathLabel', None), -(u'AdminBrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None), -(u'AdminBrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'), -(u'AdminBrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None), -(u'AdminBrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None), -(u'AdminInstallPointDlg', u'Text', u'Text', 25, 80, 320, 10, 3, None, u'&Enter a new network location or click Browse to browse to one.', u'PathEdit', None), -(u'AdminInstallPointDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Text', None), -(u'AdminInstallPointDlg', u'PathEdit', u'PathEdit', 25, 93, 320, 18, 3, u'TARGETDIR', None, u'Browse', None), -(u'AdminInstallPointDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'AdminInstallPointDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'AdminInstallPointDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'AdminInstallPointDlg', u'Description', u'Text', 25, 20, 280, 20, 196611, None, u'Please specify a network location for the server image of [ProductName] product', None, None), -(u'AdminInstallPointDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Network Location', None, None), -(u'AdminInstallPointDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), -(u'AdminInstallPointDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -(u'AdminInstallPointDlg', u'Browse', u'PushButton', 289, 119, 56, 17, 3, None, u'[ButtonText_Browse]', u'Back', None), -(u'AdminRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OrganizationLabel', None), -(u'AdminRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'AdminRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'AdminRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'AdminRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your company information', None, None), -(u'AdminRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Company Information', None, None), -(u'AdminRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 65539, None, u'[ButtonText_Back]', u'Next', None), -(u'AdminRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -(u'AdminRegistrationDlg', u'OrganizationLabel', u'Text', 45, 71, 285, 30, 3, None, u'&Please enter the name of your organization in the box below. This will be used as default company name for subsequent installations of [ProductName]:', u'OrganizationEdit', None), -(u'AdminRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 143, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None), -(u'AdminRegistrationDlg', u'CDKeyLabel', u'Text', 45, 130, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None), -(u'AdminRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 105, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None), -(u'BrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'), -(u'BrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None), -(u'BrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 18, 11, u'_BrowseProperty', None, u'OK', None), -(u'BrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'BrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'BrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None), -(u'BrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None), -(u'BrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 393227, u'_BrowseProperty', None, u'Up', None), -(u'BrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None), -(u'BrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 15, u'_BrowseProperty', None, u'PathLabel', None), -(u'BrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None), -(u'BrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'), -(u'BrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None), -(u'BrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None), -(u'CancelDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Are you sure you want to cancel [ProductName] installation?', None, None), -(u'CancelDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'), -(u'CancelDlg', u'No', u'PushButton', 132, 57, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None), -(u'CancelDlg', u'Yes', u'PushButton', 72, 57, 56, 17, 3, None, u'[ButtonText_Yes]', u'No', None), -(u'CustomizeDlg', u'Text', u'Text', 25, 55, 320, 20, 3, None, u'Click on the icons in the tree below to change the way features will be installed.', None, None), -(u'CustomizeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Tree', None), -(u'CustomizeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'CustomizeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'CustomizeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'CustomizeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Select the way you want features to be installed.', None, None), -(u'CustomizeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Custom Setup', None, None), -(u'CustomizeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), -(u'CustomizeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -(u'CustomizeDlg', u'Browse', u'PushButton', 304, 200, 56, 17, 3, None, u'[ButtonText_Browse]', u'Reset', None), -(u'CustomizeDlg', u'Tree', u'SelectionTree', 25, 85, 175, 95, 7, u'_BrowseProperty', u'Tree of selections', u'Browse', None), -(u'CustomizeDlg', u'Box', u'GroupBox', 210, 81, 140, 98, 1, None, None, None, None), -(u'CustomizeDlg', u'Reset', u'PushButton', 42, 243, 56, 17, 3, None, u'[ButtonText_Reset]', u'DiskCost', None), -(u'CustomizeDlg', u'DiskCost', u'PushButton', 111, 243, 56, 17, 3, None, u'Disk &Usage', u'Back', None), -(u'CustomizeDlg', u'ItemDescription', u'Text', 215, 90, 131, 30, 3, None, u'Multiline description of the currently selected item.', None, None), -(u'CustomizeDlg', u'ItemSize', u'Text', 215, 130, 131, 45, 3, None, u'The size of the currently selected item.', None, None), -(u'CustomizeDlg', u'Location', u'Text', 75, 200, 215, 20, 3, None, u"", None, None), -(u'CustomizeDlg', u'LocationLabel', u'Text', 25, 200, 50, 10, 3, None, u'Location:', None, None), -(u'DiskCostDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes (if any) do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None), -(u'DiskCostDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None), -(u'DiskCostDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'DiskCostDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'DiskCostDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'The disk space required for the installation of the selected features.', None, None), -(u'DiskCostDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None), -(u'DiskCostDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Disk Space Requirements', None, None), -(u'DiskCostDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None), -(u'ErrorDlg', u'Y', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Yes]', None, None), -(u'ErrorDlg', u'A', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None), -(u'ErrorDlg', u'C', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None), -(u'ErrorDlg', u'ErrorIcon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'), -(u'ErrorDlg', u'ErrorText', u'Text', 48, 15, 205, 60, 3, None, u'Information text', None, None), -(u'ErrorDlg', u'I', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Ignore]', None, None), -(u'ErrorDlg', u'N', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_No]', None, None), -(u'ErrorDlg', u'O', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_OK]', None, None), -(u'ErrorDlg', u'R', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Retry]', None, None), -(u'FilesInUse', u'Text', u'Text', 20, 55, 330, 30, 3, None, u'The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.', None, None), -(u'FilesInUse', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Retry', None), -(u'FilesInUse', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'FilesInUse', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'FilesInUse', u'Description', u'Text', 20, 23, 280, 20, 196611, None, u'Some files that need to be updated are currently in use.', None, None), -(u'FilesInUse', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Files in Use', None, None), -(u'FilesInUse', u'Retry', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Retry]', u'Ignore', None), -(u'FilesInUse', u'Exit', u'PushButton', 166, 243, 56, 17, 3, None, u'[ButtonText_Exit]', u'BannerBitmap', None), -(u'FilesInUse', u'Ignore', u'PushButton', 235, 243, 56, 17, 3, None, u'[ButtonText_Ignore]', u'Exit', None), -(u'FilesInUse', u'List', u'ListBox', 20, 87, 330, 130, 7, u'FileInUseProcess', None, None, None), -(u'LicenseAgreementDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'AgreementText', None), -(u'LicenseAgreementDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'LicenseAgreementDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'LicenseAgreementDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'LicenseAgreementDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please read the following license agreement carefully', None, None), -(u'LicenseAgreementDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]End-User License Agreement', None, None), -(u'LicenseAgreementDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), -(u'LicenseAgreementDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -(u'LicenseAgreementDlg', u'AgreementText', u'ScrollableText', 20, 60, 330, 120, 7, None, u'{\\rtf1\\ansi\\ansicpg1252\\deff0\\deftab720{\\fonttbl{\\f0\\froman\\fprq2 Times New Roman;}}{\\colortbl\\red0\\green0\\blue0;} \\deflang1033\\horzdoc{\\*\\fchars }{\\*\\lchars }\\pard\\plain\\f0\\fs20 \\par }', u'Buttons', None), -(u'LicenseAgreementDlg', u'Buttons', u'RadioButtonGroup', 20, 187, 330, 40, 3, u'IAgree', None, u'Back', None), -(u'MaintenanceTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'ChangeLabel', None), -(u'MaintenanceTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'MaintenanceTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'MaintenanceTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'MaintenanceTypeDlg', u'Description', u'Text', 25, 23, 280, 20, 196611, None, u'Select the operation you wish to perform.', None, None), -(u'MaintenanceTypeDlg', u'Title', u'Text', 15, 6, 240, 15, 196611, None, u'[DlgTitleFont]Modify, Repair or Remove installation', None, None), -(u'MaintenanceTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), -(u'MaintenanceTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None), -(u'MaintenanceTypeDlg', u'ChangeLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Modify', u'ChangeButton', None), -(u'MaintenanceTypeDlg', u'ChangeButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'RepairLabel', u'Modify Installation|'), -(u'MaintenanceTypeDlg', u'RepairLabel', u'Text', 105, 114, 100, 10, 3, None, u'[DlgTitleFont]Re&pair', u'RepairButton', None), -(u'MaintenanceTypeDlg', u'ChangeText', u'Text', 105, 78, 230, 20, 3, None, u'Allows users to change the way features are installed.', None, None), -(u'MaintenanceTypeDlg', u'RemoveButton', u'PushButton', 50, 163, 38, 38, 5767171, None, u'[RemoveIcon]', u'Back', u'Remove Installation|'), -(u'MaintenanceTypeDlg', u'RemoveLabel', u'Text', 105, 163, 100, 10, 3, None, u'[DlgTitleFont]&Remove', u'RemoveButton', None), -(u'MaintenanceTypeDlg', u'RemoveText', u'Text', 105, 176, 230, 20, 3, None, u'Removes [ProductName] from your computer.', None, None), -(u'MaintenanceTypeDlg', u'RepairButton', u'PushButton', 50, 114, 38, 38, 5767171, None, u'[RepairIcon]', u'RemoveLabel', u'Repair Installation|'), -(u'MaintenanceTypeDlg', u'RepairText', u'Text', 105, 127, 230, 30, 3, None, u'Repairs errors in the most recent installation state - fixes missing or corrupt files, shortcuts and registry entries.', None, None), -(u'MaintenanceWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), -(u'MaintenanceWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'MaintenanceWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'MaintenanceWelcomeDlg', u'Description', u'Text', 135, 70, 220, 60, 196611, None, u'The [Wizard] will allow you to change the way [ProductName] features are installed on your computer or even to remove [ProductName] from your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None), -(u'MaintenanceWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), -(u'MaintenanceWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), -(u'MaintenanceWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -(u'OutOfDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None), -(u'OutOfDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None), -(u'OutOfDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'OutOfDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'OutOfDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None), -(u'OutOfDiskDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None), -(u'OutOfDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None), -(u'OutOfDiskDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None), -(u'OutOfRbDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None), -(u'OutOfRbDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'No', None), -(u'OutOfRbDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'OutOfRbDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'OutOfRbDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None), -(u'OutOfRbDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None), -(u'OutOfRbDiskDlg', u'No', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None), -(u'OutOfRbDiskDlg', u'Yes', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Yes]', u'BannerBitmap', None), -(u'OutOfRbDiskDlg', u'VolumeList', u'VolumeCostList', 20, 140, 330, 80, 4587527, None, u'{120}{70}{70}{70}{70}', None, None), -(u'OutOfRbDiskDlg', u'Text2', u'Text', 20, 94, 330, 40, 3, None, u"Alternatively, you may choose to disable the installer's rollback functionality. This allows the installer to restore your computer's original state should the installation be interrupted in any way. Click Yes if you wish to take the risk to disable rollback.", None, None), -(u'ResumeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), -(u'ResumeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'ResumeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'ResumeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will complete the installation of [ProductName] on your computer. Click Install to continue or Cancel to exit the [Wizard].', None, None), -(u'ResumeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Resuming the [ProductName] [Wizard]', None, None), -(u'ResumeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Install', None), -(u'ResumeDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None), -(u'SetupTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'TypicalLabel', None), -(u'SetupTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'SetupTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'SetupTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'SetupTypeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Choose the setup type that best suits your needs', None, None), -(u'SetupTypeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Choose Setup Type', None, None), -(u'SetupTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), -(u'SetupTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None), -(u'SetupTypeDlg', u'TypicalLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Typical', u'TypicalButton', None), -(u'SetupTypeDlg', u'CompleteButton', u'PushButton', 50, 171, 38, 38, 5767171, None, u'[CompleteSetupIcon]', u'Back', u'Complete Installation|'), -(u'SetupTypeDlg', u'CompleteLabel', u'Text', 105, 171, 100, 10, 3, None, u'[DlgTitleFont]C&omplete', u'CompleteButton', None), -(u'SetupTypeDlg', u'CompleteText', u'Text', 105, 184, 230, 20, 3, None, u'All program features will be installed. (Requires most disk space)', None, None), -(u'SetupTypeDlg', u'CustomButton', u'PushButton', 50, 118, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'CompleteLabel', u'Custom Installation|'), -(u'SetupTypeDlg', u'CustomLabel', u'Text', 105, 118, 100, 10, 3, None, u'[DlgTitleFont]C&ustom', u'CustomButton', None), -(u'SetupTypeDlg', u'CustomText', u'Text', 105, 131, 230, 30, 3, None, u'Allows users to choose which program features will be installed and where they will be installed. Recommended for advanced users.', None, None), -(u'SetupTypeDlg', u'TypicalButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[InstallerIcon]', u'CustomLabel', u'Typical Installation|'), -(u'SetupTypeDlg', u'TypicalText', u'Text', 105, 78, 230, 20, 3, None, u'Installs the most common program features. Recommended for most users.', None, None), -(u'UserRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'NameLabel', None), -(u'UserRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'UserRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'UserRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'UserRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your customer information', None, None), -(u'UserRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Customer Information', None, None), -(u'UserRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), -(u'UserRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -(u'UserRegistrationDlg', u'OrganizationLabel', u'Text', 45, 110, 100, 15, 3, None, u'&Organization:', u'OrganizationEdit', None), -(u'UserRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 159, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None), -(u'UserRegistrationDlg', u'CDKeyLabel', u'Text', 45, 147, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None), -(u'UserRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 122, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None), -(u'UserRegistrationDlg', u'NameLabel', u'Text', 45, 73, 100, 15, 3, None, u'&User Name:', u'NameEdit', None), -(u'UserRegistrationDlg', u'NameEdit', u'Edit', 45, 85, 220, 18, 3, u'USERNAME', u'{80}', u'OrganizationLabel', None), -(u'VerifyReadyDlg', u'Text', u'Text', 25, 70, 320, 20, 3, None, u'Click Install to begin the installation. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None), -(u'VerifyReadyDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), -(u'VerifyReadyDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'VerifyReadyDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'VerifyReadyDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'VerifyReadyDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the [InstallMode] installation', None, None), -(u'VerifyReadyDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Ready to Install', None, None), -(u'VerifyReadyDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Install', None), -(u'VerifyReadyDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None), -(u'VerifyRemoveDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Remove to remove [ProductName] from your computer. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None), -(u'VerifyRemoveDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), -(u'VerifyRemoveDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'VerifyRemoveDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'VerifyRemoveDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'VerifyRemoveDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'You have chosen to remove the program from your computer.', None, None), -(u'VerifyRemoveDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Remove [ProductName]', None, None), -(u'VerifyRemoveDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Remove', None), -(u'VerifyRemoveDlg', u'Remove', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Remove]', u'Cancel', None), -(u'VerifyRepairDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Repair to repair the installation of [ProductName]. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None), -(u'VerifyRepairDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), -(u'VerifyRepairDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), -(u'VerifyRepairDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'VerifyRepairDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), -(u'VerifyRepairDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the repair of [ProductName].', None, None), -(u'VerifyRepairDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Repair [ProductName]', None, None), -(u'VerifyRepairDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Repair', None), -(u'VerifyRepairDlg', u'Repair', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Repair]', u'Cancel', None), -(u'WaitForCostingDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Please wait while the installer finishes determining your disk space requirements.', None, None), -(u'WaitForCostingDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[ExclamationIcon]', None, u'Exclamation icon|'), -(u'WaitForCostingDlg', u'Return', u'PushButton', 102, 57, 56, 17, 3, None, u'[ButtonText_Return]', None, None), -(u'WelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), -(u'WelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), -(u'WelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), -(u'WelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will install [ProductName] on your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None), -(u'WelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), -(u'WelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), -(u'WelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), -] - -ListBox = [ -] - -ActionText = [ -(u'InstallValidate', u'Validating install', None), -(u'InstallFiles', u'Copying new files', u'File: [1], Directory: [9], Size: [6]'), -(u'InstallAdminPackage', u'Copying network install files', u'File: [1], Directory: [9], Size: [6]'), -(u'FileCost', u'Computing space requirements', None), -(u'CostInitialize', u'Computing space requirements', None), -(u'CostFinalize', u'Computing space requirements', None), -(u'CreateShortcuts', u'Creating shortcuts', u'Shortcut: [1]'), -(u'PublishComponents', u'Publishing Qualified Components', u'Component ID: [1], Qualifier: [2]'), -(u'PublishFeatures', u'Publishing Product Features', u'Feature: [1]'), -(u'PublishProduct', u'Publishing product information', None), -(u'RegisterClassInfo', u'Registering Class servers', u'Class Id: [1]'), -(u'RegisterExtensionInfo', u'Registering extension servers', u'Extension: [1]'), -(u'RegisterMIMEInfo', u'Registering MIME info', u'MIME Content Type: [1], Extension: [2]'), -(u'RegisterProgIdInfo', u'Registering program identifiers', u'ProgId: [1]'), -(u'AllocateRegistrySpace', u'Allocating registry space', u'Free space: [1]'), -(u'AppSearch', u'Searching for installed applications', u'Property: [1], Signature: [2]'), -(u'BindImage', u'Binding executables', u'File: [1]'), -(u'CCPSearch', u'Searching for qualifying products', None), -(u'CreateFolders', u'Creating folders', u'Folder: [1]'), -(u'DeleteServices', u'Deleting services', u'Service: [1]'), -(u'DuplicateFiles', u'Creating duplicate files', u'File: [1], Directory: [9], Size: [6]'), -(u'FindRelatedProducts', u'Searching for related applications', u'Found application: [1]'), -(u'InstallODBC', u'Installing ODBC components', None), -(u'InstallServices', u'Installing new services', u'Service: [2]'), -(u'LaunchConditions', u'Evaluating launch conditions', None), -(u'MigrateFeatureStates', u'Migrating feature states from related applications', u'Application: [1]'), -(u'MoveFiles', u'Moving files', u'File: [1], Directory: [9], Size: [6]'), -(u'PatchFiles', u'Patching files', u'File: [1], Directory: [2], Size: [3]'), -(u'ProcessComponents', u'Updating component registration', None), -(u'RegisterComPlus', u'Registering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2], Users: [3], RSN: [4]}}'), -(u'RegisterFonts', u'Registering fonts', u'Font: [1]'), -(u'RegisterProduct', u'Registering product', u'[1]'), -(u'RegisterTypeLibraries', u'Registering type libraries', u'LibID: [1]'), -(u'RegisterUser', u'Registering user', u'[1]'), -(u'RemoveDuplicateFiles', u'Removing duplicated files', u'File: [1], Directory: [9]'), -(u'RemoveEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'), -(u'RemoveExistingProducts', u'Removing applications', u'Application: [1], Command line: [2]'), -(u'RemoveFiles', u'Removing files', u'File: [1], Directory: [9]'), -(u'RemoveFolders', u'Removing folders', u'Folder: [1]'), -(u'RemoveIniValues', u'Removing INI files entries', u'File: [1], Section: [2], Key: [3], Value: [4]'), -(u'RemoveODBC', u'Removing ODBC components', None), -(u'RemoveRegistryValues', u'Removing system registry values', u'Key: [1], Name: [2]'), -(u'RemoveShortcuts', u'Removing shortcuts', u'Shortcut: [1]'), -(u'RMCCPSearch', u'Searching for qualifying products', None), -(u'SelfRegModules', u'Registering modules', u'File: [1], Folder: [2]'), -(u'SelfUnregModules', u'Unregistering modules', u'File: [1], Folder: [2]'), -(u'SetODBCFolders', u'Initializing ODBC directories', None), -(u'StartServices', u'Starting services', u'Service: [1]'), -(u'StopServices', u'Stopping services', u'Service: [1]'), -(u'UnpublishComponents', u'Unpublishing Qualified Components', u'Component ID: [1], Qualifier: [2]'), -(u'UnpublishFeatures', u'Unpublishing Product Features', u'Feature: [1]'), -(u'UnregisterClassInfo', u'Unregister Class servers', u'Class Id: [1]'), -(u'UnregisterComPlus', u'Unregistering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2]}}'), -(u'UnregisterExtensionInfo', u'Unregistering extension servers', u'Extension: [1]'), -(u'UnregisterFonts', u'Unregistering fonts', u'Font: [1]'), -(u'UnregisterMIMEInfo', u'Unregistering MIME info', u'MIME Content Type: [1], Extension: [2]'), -(u'UnregisterProgIdInfo', u'Unregistering program identifiers', u'ProgId: [1]'), -(u'UnregisterTypeLibraries', u'Unregistering type libraries', u'LibID: [1]'), -(u'WriteEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'), -(u'WriteIniValues', u'Writing INI files values', u'File: [1], Section: [2], Key: [3], Value: [4]'), -(u'WriteRegistryValues', u'Writing system registry values', u'Key: [1], Name: [2], Value: [3]'), -(u'Advertise', u'Advertising application', None), -(u'GenerateScript', u'Generating script operations for action:', u'[1]'), -(u'InstallSFPCatalogFile', u'Installing system catalog', u'File: [1], Dependencies: [2]'), -(u'MsiPublishAssemblies', u'Publishing assembly information', u'Application Context:[1], Assembly Name:[2]'), -(u'MsiUnpublishAssemblies', u'Unpublishing assembly information', u'Application Context:[1], Assembly Name:[2]'), -(u'Rollback', u'Rolling back action:', u'[1]'), -(u'RollbackCleanup', u'Removing backup files', u'File: [1]'), -(u'UnmoveFiles', u'Removing moved files', u'File: [1], Directory: [9]'), -(u'UnpublishProduct', u'Unpublishing product information', None), -] - -ControlCondition = [ -(u'CustomizeDlg', u'Browse', u'Hide', u'Installed'), -(u'CustomizeDlg', u'Location', u'Hide', u'Installed'), -(u'CustomizeDlg', u'LocationLabel', u'Hide', u'Installed'), -(u'LicenseAgreementDlg', u'Next', u'Disable', u'IAgree <> "Yes"'), -(u'LicenseAgreementDlg', u'Next', u'Enable', u'IAgree = "Yes"'), -] - -ControlEvent = [ -(u'AdminWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'AdminWelcomeDlg', u'Next', u'NewDialog', u'AdminRegistrationDlg', u'1', 2), -(u'AdminWelcomeDlg', u'Next', u'[InstallMode]', u'Server Image', u'1', 1), -(u'ExitDialog', u'Finish', u'EndDialog', u'Return', u'1', None), -(u'FatalError', u'Finish', u'EndDialog', u'Exit', u'1', None), -(u'PrepareDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'ProgressDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'UserExit', u'Finish', u'EndDialog', u'Exit', u'1', None), -(u'AdminBrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None), -(u'AdminBrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1), -(u'AdminBrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2), -(u'AdminBrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None), -(u'AdminBrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2), -(u'AdminBrowseDlg', u'OK', u'SetTargetPath', u'TARGETDIR', u'1', 1), -(u'AdminInstallPointDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'AdminInstallPointDlg', u'Back', u'NewDialog', u'AdminRegistrationDlg', u'1', None), -(u'AdminInstallPointDlg', u'Next', u'SetTargetPath', u'TARGETDIR', u'1', 1), -(u'AdminInstallPointDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', 2), -(u'AdminInstallPointDlg', u'Browse', u'SpawnDialog', u'AdminBrowseDlg', u'1', None), -(u'AdminRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'AdminRegistrationDlg', u'Back', u'NewDialog', u'AdminWelcomeDlg', u'1', None), -(u'AdminRegistrationDlg', u'Next', u'NewDialog', u'AdminInstallPointDlg', u'ProductID', 2), -(u'AdminRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1), -(u'BrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None), -(u'BrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1), -(u'BrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2), -(u'BrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None), -(u'BrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2), -(u'BrowseDlg', u'OK', u'SetTargetPath', u'[_BrowseProperty]', u'1', 1), -(u'CancelDlg', u'No', u'EndDialog', u'Return', u'1', None), -(u'CancelDlg', u'Yes', u'EndDialog', u'Exit', u'1', None), -(u'CustomizeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'CustomizeDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Change"', None), -(u'CustomizeDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Custom"', None), -(u'CustomizeDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', None), -(u'CustomizeDlg', u'Browse', u'SelectionBrowse', u'BrowseDlg', u'1', None), -(u'CustomizeDlg', u'Reset', u'Reset', u'0', u'1', None), -(u'CustomizeDlg', u'DiskCost', u'SpawnDialog', u'DiskCostDlg', u'1', 2), -(u'DiskCostDlg', u'OK', u'EndDialog', u'Return', u'1', None), -(u'ErrorDlg', u'Y', u'EndDialog', u'ErrorYes', u'1', None), -(u'ErrorDlg', u'A', u'EndDialog', u'ErrorAbort', u'1', None), -(u'ErrorDlg', u'C', u'EndDialog', u'ErrorCancel', u'1', None), -(u'ErrorDlg', u'I', u'EndDialog', u'ErrorIgnore', u'1', None), -(u'ErrorDlg', u'N', u'EndDialog', u'ErrorNo', u'1', None), -(u'ErrorDlg', u'O', u'EndDialog', u'ErrorOk', u'1', None), -(u'ErrorDlg', u'R', u'EndDialog', u'ErrorRetry', u'1', None), -(u'FilesInUse', u'Retry', u'EndDialog', u'Retry', u'1', None), -(u'FilesInUse', u'Exit', u'EndDialog', u'Exit', u'1', None), -(u'FilesInUse', u'Ignore', u'EndDialog', u'Ignore', u'1', None), -(u'LicenseAgreementDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'LicenseAgreementDlg', u'Back', u'NewDialog', u'WelcomeDlg', u'1', None), -(u'LicenseAgreementDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg <> 1', 3), -(u'LicenseAgreementDlg', u'Next', u'NewDialog', u'UserRegistrationDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg = 1', 1), -(u'LicenseAgreementDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2), -(u'MaintenanceTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'MaintenanceTypeDlg', u'Back', u'NewDialog', u'MaintenanceWelcomeDlg', u'1', None), -(u'MaintenanceTypeDlg', u'ChangeButton', u'NewDialog', u'CustomizeDlg', u'1', 4), -(u'MaintenanceTypeDlg', u'ChangeButton', u'[InstallMode]', u'Change', u'1', 1), -(u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress1]', u'Changing', u'1', 2), -(u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress2]', u'changes', u'1', 3), -(u'MaintenanceTypeDlg', u'RemoveButton', u'NewDialog', u'VerifyRemoveDlg', u'1', 4), -(u'MaintenanceTypeDlg', u'RemoveButton', u'[InstallMode]', u'Remove', u'1', 1), -(u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress1]', u'Removing', u'1', 2), -(u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress2]', u'removes', u'1', 3), -(u'MaintenanceTypeDlg', u'RepairButton', u'NewDialog', u'VerifyRepairDlg', u'1', 4), -(u'MaintenanceTypeDlg', u'RepairButton', u'[InstallMode]', u'Repair', u'1', 1), -(u'MaintenanceTypeDlg', u'RepairButton', u'[Progress1]', u'Repairing', u'1', 2), -(u'MaintenanceTypeDlg', u'RepairButton', u'[Progress2]', u'repairs', u'1', 3), -(u'MaintenanceWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'MaintenanceWelcomeDlg', u'Next', u'NewDialog', u'MaintenanceTypeDlg', u'1', 2), -(u'MaintenanceWelcomeDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1), -(u'OutOfDiskDlg', u'OK', u'EndDialog', u'Return', u'1', None), -(u'OutOfRbDiskDlg', u'No', u'EndDialog', u'Return', u'1', None), -(u'OutOfRbDiskDlg', u'Yes', u'EndDialog', u'Return', u'1', 2), -(u'OutOfRbDiskDlg', u'Yes', u'EnableRollback', u'False', u'1', 1), -(u'ResumeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4), -(u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2), -(u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6), -(u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3), -(u'ResumeDlg', u'Install', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1), -(u'ResumeDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5), -(u'SetupTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'SetupTypeDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'ShowUserRegistrationDlg <> 1', None), -(u'SetupTypeDlg', u'Back', u'NewDialog', u'UserRegistrationDlg', u'ShowUserRegistrationDlg = 1', None), -(u'SetupTypeDlg', u'CompleteButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3), -(u'SetupTypeDlg', u'CompleteButton', u'[InstallMode]', u'Complete', u'1', 1), -(u'SetupTypeDlg', u'CompleteButton', u'SetInstallLevel', u'1000', u'1', 2), -(u'SetupTypeDlg', u'CustomButton', u'NewDialog', u'CustomizeDlg', u'1', 2), -(u'SetupTypeDlg', u'CustomButton', u'[InstallMode]', u'Custom', u'1', 1), -(u'SetupTypeDlg', u'TypicalButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3), -(u'SetupTypeDlg', u'TypicalButton', u'[InstallMode]', u'Typical', u'1', 1), -(u'SetupTypeDlg', u'TypicalButton', u'SetInstallLevel', u'3', u'1', 2), -(u'UserRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'UserRegistrationDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'1', None), -(u'UserRegistrationDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'ProductID', 3), -(u'UserRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1), -(u'UserRegistrationDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2), -(u'VerifyReadyDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'VerifyReadyDlg', u'Back', u'NewDialog', u'AdminInstallPointDlg', u'InstallMode = "Server Image"', None), -(u'VerifyReadyDlg', u'Back', u'NewDialog', u'CustomizeDlg', u'InstallMode = "Custom" OR InstallMode = "Change"', None), -(u'VerifyReadyDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Repair"', None), -(u'VerifyReadyDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Typical" OR InstallMode = "Complete"', None), -(u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 3), -(u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 1), -(u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 5), -(u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 2), -(u'VerifyReadyDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4), -(u'VerifyRemoveDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'VerifyRemoveDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None), -(u'VerifyRemoveDlg', u'Remove', u'Remove', u'All', u'OutOfDiskSpace <> 1', 1), -(u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4), -(u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2), -(u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6), -(u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3), -(u'VerifyRemoveDlg', u'Remove', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5), -(u'VerifyRepairDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'VerifyRepairDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None), -(u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5), -(u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 3), -(u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 7), -(u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 4), -(u'VerifyRepairDlg', u'Repair', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 6), -(u'VerifyRepairDlg', u'Repair', u'Reinstall', u'All', u'OutOfDiskSpace <> 1', 2), -(u'VerifyRepairDlg', u'Repair', u'ReinstallMode', u'ecmus', u'OutOfDiskSpace <> 1', 1), -(u'WaitForCostingDlg', u'Return', u'EndDialog', u'Exit', u'1', None), -(u'WelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), -(u'WelcomeDlg', u'Next', u'NewDialog', u'LicenseAgreementDlg', u'1', None), -] - -Dialog = [ -(u'AdminWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'), -(u'ExitDialog', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'), -(u'FatalError', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'), -(u'PrepareDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'), -(u'ProgressDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'), -(u'UserExit', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'), -(u'AdminBrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'), -(u'AdminInstallPointDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Text', u'Next', u'Cancel'), -(u'AdminRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OrganizationLabel', u'Next', u'Cancel'), -(u'BrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'), -(u'CancelDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'), -(u'CustomizeDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Tree', u'Next', u'Cancel'), -(u'DiskCostDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'), -(u'ErrorDlg', 50, 10, 270, 105, 65539, u'Installer Information', u'ErrorText', None, None), -(u'FilesInUse', 50, 50, 370, 270, 19, u'[ProductName] [Setup]', u'Retry', u'Retry', u'Retry'), -(u'LicenseAgreementDlg', 50, 50, 370, 270, 3, u'[ProductName] License Agreement', u'Buttons', u'Next', u'Cancel'), -(u'MaintenanceTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'ChangeLabel', u'ChangeButton', u'Cancel'), -(u'MaintenanceWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'), -(u'OutOfDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'), -(u'OutOfRbDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'), -(u'ResumeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'), -(u'SetupTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'TypicalLabel', u'TypicalButton', u'Cancel'), -(u'UserRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'NameLabel', u'Next', u'Cancel'), -(u'VerifyReadyDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'), -(u'VerifyRemoveDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Back', u'Back', u'Cancel'), -(u'VerifyRepairDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Repair', u'Repair', u'Cancel'), -(u'WaitForCostingDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'Return', u'Return', u'Return'), -(u'WelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'), -] - -EventMapping = [ -(u'PrepareDlg', u'ActionData', u'ActionData', u'Text'), -(u'PrepareDlg', u'ActionText', u'ActionText', u'Text'), -(u'ProgressDlg', u'ActionText', u'ActionText', u'Text'), -(u'ProgressDlg', u'ProgressBar', u'SetProgress', u'Progress'), -(u'AdminBrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'), -(u'BrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'), -(u'CustomizeDlg', u'Next', u'SelectionNoItems', u'Enabled'), -(u'CustomizeDlg', u'Reset', u'SelectionNoItems', u'Enabled'), -(u'CustomizeDlg', u'DiskCost', u'SelectionNoItems', u'Enabled'), -(u'CustomizeDlg', u'ItemDescription', u'SelectionDescription', u'Text'), -(u'CustomizeDlg', u'ItemSize', u'SelectionSize', u'Text'), -(u'CustomizeDlg', u'Location', u'SelectionPath', u'Text'), -(u'CustomizeDlg', u'Location', u'SelectionPathOn', u'Visible'), -(u'CustomizeDlg', u'LocationLabel', u'SelectionPathOn', u'Visible'), -] - -InstallExecuteSequence = [ -(u'InstallValidate', None, 1400), -(u'InstallInitialize', None, 1500), -(u'InstallFinalize', None, 6600), -(u'InstallFiles', None, 4000), -(u'FileCost', None, 900), -(u'CostInitialize', None, 800), -(u'CostFinalize', None, 1000), -(u'CreateShortcuts', None, 4500), -(u'PublishComponents', None, 6200), -(u'PublishFeatures', None, 6300), -(u'PublishProduct', None, 6400), -(u'RegisterClassInfo', None, 4600), -(u'RegisterExtensionInfo', None, 4700), -(u'RegisterMIMEInfo', None, 4900), -(u'RegisterProgIdInfo', None, 4800), -(u'ValidateProductID', None, 700), -(u'AllocateRegistrySpace', u'NOT Installed', 1550), -(u'AppSearch', None, 400), -(u'BindImage', None, 4300), -(u'CCPSearch', u'NOT Installed', 500), -(u'CreateFolders', None, 3700), -(u'DeleteServices', u'VersionNT', 2000), -(u'DuplicateFiles', None, 4210), -(u'FindRelatedProducts', None, 200), -(u'InstallODBC', None, 5400), -(u'InstallServices', u'VersionNT', 5800), -(u'LaunchConditions', None, 100), -(u'MigrateFeatureStates', None, 1200), -(u'MoveFiles', None, 3800), -(u'PatchFiles', None, 4090), -(u'ProcessComponents', None, 1600), -(u'RegisterComPlus', None, 5700), -(u'RegisterFonts', None, 5300), -(u'RegisterProduct', None, 6100), -(u'RegisterTypeLibraries', None, 5500), -(u'RegisterUser', None, 6000), -(u'RemoveDuplicateFiles', None, 3400), -(u'RemoveEnvironmentStrings', None, 3300), -(u'RemoveExistingProducts', None, 6700), -(u'RemoveFiles', None, 3500), -(u'RemoveFolders', None, 3600), -(u'RemoveIniValues', None, 3100), -(u'RemoveODBC', None, 2400), -(u'RemoveRegistryValues', None, 2600), -(u'RemoveShortcuts', None, 3200), -(u'RMCCPSearch', u'NOT Installed', 600), -(u'SelfRegModules', None, 5600), -(u'SelfUnregModules', None, 2200), -(u'SetODBCFolders', None, 1100), -(u'StartServices', u'VersionNT', 5900), -(u'StopServices', u'VersionNT', 1900), -(u'UnpublishComponents', None, 1700), -(u'UnpublishFeatures', None, 1800), -(u'UnregisterClassInfo', None, 2700), -(u'UnregisterComPlus', None, 2100), -(u'UnregisterExtensionInfo', None, 2800), -(u'UnregisterFonts', None, 2500), -(u'UnregisterMIMEInfo', None, 3000), -(u'UnregisterProgIdInfo', None, 2900), -(u'UnregisterTypeLibraries', None, 2300), -(u'WriteEnvironmentStrings', None, 5200), -(u'WriteIniValues', None, 5100), -(u'WriteRegistryValues', None, 5000), -] - -InstallUISequence = [ -#(u'FileCost', None, 900), -#(u'CostInitialize', None, 800), -#(u'CostFinalize', None, 1000), -#(u'ExecuteAction', None, 1300), -#(u'ExitDialog', None, -1), -#(u'FatalError', None, -3), -(u'PrepareDlg', None, 140), -(u'ProgressDlg', None, 1280), -#(u'UserExit', None, -2), -(u'MaintenanceWelcomeDlg', u'Installed AND NOT RESUME AND NOT Preselected', 1250), -(u'ResumeDlg', u'Installed AND (RESUME OR Preselected)', 1240), -(u'WelcomeDlg', u'NOT Installed', 1230), -#(u'AppSearch', None, 400), -#(u'CCPSearch', u'NOT Installed', 500), -#(u'FindRelatedProducts', None, 200), -#(u'LaunchConditions', None, 100), -#(u'MigrateFeatureStates', None, 1200), -#(u'RMCCPSearch', u'NOT Installed', 600), -] - -ListView = [ -] - -RadioButton = [ -(u'IAgree', 1, u'Yes', 5, 0, 250, 15, u'{\\DlgFont8}I &accept the terms in the License Agreement', None), -(u'IAgree', 2, u'No', 5, 20, 250, 15, u'{\\DlgFont8}I &do not accept the terms in the License Agreement', None), -] - -TextStyle = [ -(u'DlgFont8', u'Tahoma', 8, None, 0), -(u'DlgFontBold8', u'Tahoma', 8, None, 1), -(u'VerdanaBold13', u'Verdana', 13, None, 1), -] - -UIText = [ -(u'AbsentPath', None), -(u'bytes', u'bytes'), -(u'GB', u'GB'), -(u'KB', u'KB'), -(u'MB', u'MB'), -(u'MenuAbsent', u'Entire feature will be unavailable'), -(u'MenuAdvertise', u'Feature will be installed when required'), -(u'MenuAllCD', u'Entire feature will be installed to run from CD'), -(u'MenuAllLocal', u'Entire feature will be installed on local hard drive'), -(u'MenuAllNetwork', u'Entire feature will be installed to run from network'), -(u'MenuCD', u'Will be installed to run from CD'), -(u'MenuLocal', u'Will be installed on local hard drive'), -(u'MenuNetwork', u'Will be installed to run from network'), -(u'ScriptInProgress', u'Gathering required information...'), -(u'SelAbsentAbsent', u'This feature will remain uninstalled'), -(u'SelAbsentAdvertise', u'This feature will be set to be installed when required'), -(u'SelAbsentCD', u'This feature will be installed to run from CD'), -(u'SelAbsentLocal', u'This feature will be installed on the local hard drive'), -(u'SelAbsentNetwork', u'This feature will be installed to run from the network'), -(u'SelAdvertiseAbsent', u'This feature will become unavailable'), -(u'SelAdvertiseAdvertise', u'Will be installed when required'), -(u'SelAdvertiseCD', u'This feature will be available to run from CD'), -(u'SelAdvertiseLocal', u'This feature will be installed on your local hard drive'), -(u'SelAdvertiseNetwork', u'This feature will be available to run from the network'), -(u'SelCDAbsent', u"This feature will be uninstalled completely, you won't be able to run it from CD"), -(u'SelCDAdvertise', u'This feature will change from run from CD state to set to be installed when required'), -(u'SelCDCD', u'This feature will remain to be run from CD'), -(u'SelCDLocal', u'This feature will change from run from CD state to be installed on the local hard drive'), -(u'SelChildCostNeg', u'This feature frees up [1] on your hard drive.'), -(u'SelChildCostPos', u'This feature requires [1] on your hard drive.'), -(u'SelCostPending', u'Compiling cost for this feature...'), -(u'SelLocalAbsent', u'This feature will be completely removed'), -(u'SelLocalAdvertise', u'This feature will be removed from your local hard drive, but will be set to be installed when required'), -(u'SelLocalCD', u'This feature will be removed from your local hard drive, but will be still available to run from CD'), -(u'SelLocalLocal', u'This feature will remain on you local hard drive'), -(u'SelLocalNetwork', u'This feature will be removed from your local hard drive, but will be still available to run from the network'), -(u'SelNetworkAbsent', u"This feature will be uninstalled completely, you won't be able to run it from the network"), -(u'SelNetworkAdvertise', u'This feature will change from run from network state to set to be installed when required'), -(u'SelNetworkLocal', u'This feature will change from run from network state to be installed on the local hard drive'), -(u'SelNetworkNetwork', u'This feature will remain to be run from the network'), -(u'SelParentCostNegNeg', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'), -(u'SelParentCostNegPos', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'), -(u'SelParentCostPosNeg', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'), -(u'SelParentCostPosPos', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'), -(u'TimeRemaining', u'Time remaining: {[1] minutes }{[2] seconds}'), -(u'VolumeCostAvailable', u'Available'), -(u'VolumeCostDifference', u'Difference'), -(u'VolumeCostRequired', u'Required'), -(u'VolumeCostSize', u'Disk Size'), -(u'VolumeCostVolume', u'Volume'), -] - -_Validation = [ -(u'AdminExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), -(u'AdminExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), -(u'AdminExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), -(u'AdminUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), -(u'AdminUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), -(u'AdminUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), -(u'Condition', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Expression evaluated to determine if Level in the Feature table is to change.'), -(u'Condition', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Reference to a Feature entry in Feature table.'), -(u'Condition', u'Level', u'N', 0, 32767, None, None, None, None, u'New selection Level to set in Feature table if Condition evaluates to TRUE.'), -(u'AdvtExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), -(u'AdvtExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), -(u'AdvtExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), -(u'BBControl', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'), -(u'BBControl', u'BBControl', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.'), -(u'BBControl', u'Billboard_', u'N', None, None, u'Billboard', 1, u'Identifier', None, u'External key to the Billboard table, name of the billboard.'), -(u'BBControl', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'), -(u'BBControl', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'), -(u'BBControl', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'), -(u'BBControl', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'), -(u'BBControl', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'), -(u'BBControl', u'Text', u'Y', None, None, None, None, u'Text', None, u'A string used to set the initial text contained within a control (if appropriate).'), -(u'Billboard', u'Action', u'Y', None, None, None, None, u'Identifier', None, u'The name of an action. The billboard is displayed during the progress messages received from this action.'), -(u'Billboard', u'Billboard', u'N', None, None, None, None, u'Identifier', None, u'Name of the billboard.'), -(u'Billboard', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'An external key to the Feature Table. The billboard is shown only if this feature is being installed.'), -(u'Billboard', u'Ordering', u'Y', 0, 32767, None, None, None, None, u'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.'), -(u'Binary', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Unique key identifying the binary data.'), -(u'Binary', u'Data', u'N', None, None, None, None, u'Binary', None, u'The unformatted binary data.'), -(u'CheckBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to the item.'), -(u'CheckBox', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value string associated with the item.'), -(u'Property', u'Property', u'N', None, None, None, None, u'Identifier', None, u'Name of property, uppercase if settable by launcher or loader.'), -(u'Property', u'Value', u'N', None, None, None, None, u'Text', None, u'String value for property. Never null or empty.'), -(u'ComboBox', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'), -(u'ComboBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.'), -(u'ComboBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'), -(u'ComboBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.'), -(u'Control', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'), -(u'Control', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'), -(u'Control', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'), -(u'Control', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'), -(u'Control', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'), -(u'Control', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'), -(u'Control', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'A string used to set the initial text contained within a control (if appropriate).'), -(u'Control', u'Property', u'Y', None, None, None, None, u'Identifier', None, u'The name of a defined property to be linked to this control. '), -(u'Control', u'Control', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. '), -(u'Control', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'External key to the Dialog table, name of the dialog.'), -(u'Control', u'Control_Next', u'Y', None, None, u'Control', 2, u'Identifier', None, u'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!'), -(u'Control', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional. '), -(u'Icon', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Primary key. Name of the icon file.'), -(u'Icon', u'Data', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.'), -(u'ListBox', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'), -(u'ListBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.'), -(u'ListBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'), -(u'ListBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'), -(u'ActionText', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to be described.'), -(u'ActionText', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description displayed in progress dialog and log when action is executing.'), -(u'ActionText', u'Template', u'Y', None, None, None, None, u'Template', None, u'Optional localized format template used to format action data records for display during action execution.'), -(u'ControlCondition', u'Action', u'N', None, None, None, None, None, u'Default;Disable;Enable;Hide;Show', u'The desired action to be taken on the specified control.'), -(u'ControlCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions the action should be triggered.'), -(u'ControlCondition', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'), -(u'ControlCondition', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'), -(u'ControlEvent', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions an event should be triggered.'), -(u'ControlEvent', u'Ordering', u'Y', 0, 2147483647, None, None, None, None, u'An integer used to order several events tied to the same control. Can be left blank.'), -(u'ControlEvent', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'), -(u'ControlEvent', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control'), -(u'ControlEvent', u'Event', u'N', None, None, None, None, u'Formatted', None, u'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.'), -(u'ControlEvent', u'Argument', u'N', None, None, None, None, u'Formatted', None, u'A value to be used as a modifier when triggering a particular event.'), -(u'Dialog', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the dialog.'), -(u'Dialog', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the dialog.'), -(u'Dialog', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this dialog.'), -(u'Dialog', u'Title', u'Y', None, None, None, None, u'Formatted', None, u"A text string specifying the title to be displayed in the title bar of the dialog's window."), -(u'Dialog', u'Dialog', u'N', None, None, None, None, u'Identifier', None, u'Name of the dialog.'), -(u'Dialog', u'HCentering', u'N', 0, 100, None, None, None, None, u'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.'), -(u'Dialog', u'VCentering', u'N', 0, 100, None, None, None, None, u'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.'), -(u'Dialog', u'Control_First', u'N', None, None, u'Control', 2, u'Identifier', None, u'Defines the control that has the focus when the dialog is created.'), -(u'Dialog', u'Control_Default', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the default control. Hitting return is equivalent to pushing this button.'), -(u'Dialog', u'Control_Cancel', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.'), -(u'EventMapping', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the Dialog.'), -(u'EventMapping', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'), -(u'EventMapping', u'Event', u'N', None, None, None, None, u'Identifier', None, u'An identifier that specifies the type of the event that the control subscribes to.'), -(u'EventMapping', u'Attribute', u'N', None, None, None, None, u'Identifier', None, u'The name of the control attribute, that is set when this event is received.'), -(u'InstallExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), -(u'InstallExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), -(u'InstallExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), -(u'AppSearch', u'Property', u'N', None, None, None, None, u'Identifier', None, u'The property associated with a Signature'), -(u'AppSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'), -(u'BindImage', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'The index into the File table. This must be an executable file.'), -(u'BindImage', u'Path', u'Y', None, None, None, None, u'Paths', None, u'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .'), -(u'CCPSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'), -(u'InstallUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), -(u'InstallUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), -(u'InstallUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), -(u'ListView', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'), -(u'ListView', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listview.'), -(u'ListView', u'Value', u'N', None, None, None, None, u'Identifier', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'), -(u'ListView', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'), -(u'ListView', u'Binary_', u'Y', None, None, u'Binary', 1, u'Identifier', None, u'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.'), -(u'RadioButton', u'X', u'N', 0, 32767, None, None, None, None, u'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.'), -(u'RadioButton', u'Y', u'N', 0, 32767, None, None, None, None, u'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.'), -(u'RadioButton', u'Width', u'N', 0, 32767, None, None, None, None, u'The width of the button.'), -(u'RadioButton', u'Height', u'N', 0, 32767, None, None, None, None, u'The height of the button.'), -(u'RadioButton', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible title to be assigned to the radio button.'), -(u'RadioButton', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.'), -(u'RadioButton', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this button. Selecting the button will set the associated property to this value.'), -(u'RadioButton', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'), -(u'RadioButton', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional.'), -(u'TextStyle', u'TextStyle', u'N', None, None, None, None, u'Identifier', None, u'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.'), -(u'TextStyle', u'FaceName', u'N', None, None, None, None, u'Text', None, u'A string indicating the name of the font used. Required. The string must be at most 31 characters long.'), -(u'TextStyle', u'Size', u'N', 0, 32767, None, None, None, None, u'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.'), -(u'TextStyle', u'Color', u'Y', 0, 16777215, None, None, None, None, u'A long integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).'), -(u'TextStyle', u'StyleBits', u'Y', 0, 15, None, None, None, None, u'A combination of style bits.'), -(u'UIText', u'Text', u'Y', None, None, None, None, u'Text', None, u'The localized version of the string.'), -(u'UIText', u'Key', u'N', None, None, None, None, u'Identifier', None, u'A unique key that identifies the particular string.'), -(u'_Validation', u'Table', u'N', None, None, None, None, u'Identifier', None, u'Name of table'), -(u'_Validation', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of column'), -(u'_Validation', u'Column', u'N', None, None, None, None, u'Identifier', None, u'Name of column'), -(u'_Validation', u'Nullable', u'N', None, None, None, None, None, u'Y;N;@', u'Whether the column is nullable'), -(u'_Validation', u'MinValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Minimum value allowed'), -(u'_Validation', u'MaxValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Maximum value allowed'), -(u'_Validation', u'KeyTable', u'Y', None, None, None, None, u'Identifier', None, u'For foreign key, Name of table to which data must link'), -(u'_Validation', u'KeyColumn', u'Y', 1, 32, None, None, None, None, u'Column to which foreign key connects'), -(u'_Validation', u'Category', u'Y', None, None, None, None, None, u'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL', u'String category'), -(u'_Validation', u'Set', u'Y', None, None, None, None, u'Text', None, u'Set of values that are permitted'), -(u'AdvtUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), -(u'AdvtUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), -(u'AdvtUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), -(u'AppId', u'AppId', u'N', None, None, None, None, u'Guid', None, None), -(u'AppId', u'ActivateAtStorage', u'Y', 0, 1, None, None, None, None, None), -(u'AppId', u'DllSurrogate', u'Y', None, None, None, None, u'Text', None, None), -(u'AppId', u'LocalService', u'Y', None, None, None, None, u'Text', None, None), -(u'AppId', u'RemoteServerName', u'Y', None, None, None, None, u'Formatted', None, None), -(u'AppId', u'RunAsInteractiveUser', u'Y', 0, 1, None, None, None, None, None), -(u'AppId', u'ServiceParameters', u'Y', None, None, None, None, u'Text', None, None), -(u'Feature', u'Attributes', u'N', None, None, None, None, None, u'0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54', u'Feature attributes'), -(u'Feature', u'Description', u'Y', None, None, None, None, u'Text', None, u'Longer descriptive text describing a visible feature item.'), -(u'Feature', u'Title', u'Y', None, None, None, None, u'Text', None, u'Short text identifying a visible feature item.'), -(u'Feature', u'Feature', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular feature record.'), -(u'Feature', u'Directory_', u'Y', None, None, u'Directory', 1, u'UpperCase', None, u'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.'), -(u'Feature', u'Level', u'N', 0, 32767, None, None, None, None, u'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.'), -(u'Feature', u'Display', u'Y', 0, 32767, None, None, None, None, u'Numeric sort order, used to force a specific display ordering.'), -(u'Feature', u'Feature_Parent', u'Y', None, None, u'Feature', 1, u'Identifier', None, u'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.'), -(u'File', u'Sequence', u'N', 1, 32767, None, None, None, None, u'Sequence with respect to the media images; order must track cabinet order.'), -(u'File', u'Attributes', u'Y', 0, 32767, None, None, None, None, u'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)'), -(u'File', u'File', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.'), -(u'File', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file.'), -(u'File', u'FileName', u'N', None, None, None, None, u'Filename', None, u'File name used for installation, may be localized. This may contain a "short name|long name" pair.'), -(u'File', u'FileSize', u'N', 0, 2147483647, None, None, None, None, u'Size of file in bytes (long integer).'), -(u'File', u'Language', u'Y', None, None, None, None, u'Language', None, u'List of decimal language Ids, comma-separated if more than one.'), -(u'File', u'Version', u'Y', None, None, u'File', 1, u'Version', None, u'Version string for versioned files; Blank for unversioned files.'), -(u'Class', u'Attributes', u'Y', None, 32767, None, None, None, None, u'Class registration attributes.'), -(u'Class', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'), -(u'Class', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Class.'), -(u'Class', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'optional argument for LocalServers.'), -(u'Class', u'AppId_', u'Y', None, None, u'AppId', 1, u'Guid', None, u'Optional AppID containing DCOM information for associated application (string GUID).'), -(u'Class', u'CLSID', u'N', None, None, None, None, u'Guid', None, u'The CLSID of an OLE factory.'), -(u'Class', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'), -(u'Class', u'Context', u'N', None, None, None, None, u'Identifier', None, u'The numeric server context for this server. CLSCTX_xxxx'), -(u'Class', u'DefInprocHandler', u'Y', None, None, None, None, u'Filename', u'1;2;3', u'Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"'), -(u'Class', u'FileTypeMask', u'Y', None, None, None, None, u'Text', None, u'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...'), -(u'Class', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.'), -(u'Class', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'), -(u'Class', u'ProgId_Default', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this CLSID.'), -(u'Component', u'Condition', u'Y', None, None, None, None, u'Condition', None, u"A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component."), -(u'Component', u'Attributes', u'N', None, None, None, None, None, None, u'Remote execution option, one of irsEnum'), -(u'Component', u'Component', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular component record.'), -(u'Component', u'ComponentId', u'Y', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'), -(u'Component', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.'), -(u'Component', u'KeyPath', u'Y', None, None, u'File;Registry;ODBCDataSource', 1, u'Identifier', None, u'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.'), -(u'ProgId', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Program identifier.'), -(u'ProgId', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.'), -(u'ProgId', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'), -(u'ProgId', u'ProgId', u'N', None, None, None, None, u'Text', None, u'The Program Identifier. Primary key.'), -(u'ProgId', u'Class_', u'Y', None, None, u'Class', 1, u'Guid', None, u'The CLSID of an OLE factory corresponding to the ProgId.'), -(u'ProgId', u'ProgId_Parent', u'Y', None, None, u'ProgId', 1, u'Text', None, u'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.'), -(u'CompLocator', u'Type', u'Y', 0, 1, None, None, None, None, u'A boolean value that determines if the registry value is a filename or a directory location.'), -(u'CompLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'), -(u'CompLocator', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'), -(u'Complus', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the ComPlus component.'), -(u'Complus', u'ExpType', u'Y', 0, 32767, None, None, None, None, u'ComPlus component attributes.'), -(u'Directory', u'Directory', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.'), -(u'Directory', u'DefaultDir', u'N', None, None, None, None, u'DefaultDir', None, u"The default sub-path under parent's path."), -(u'Directory', u'Directory_Parent', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.'), -(u'CreateFolder', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'), -(u'CreateFolder', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Primary key, could be foreign key into the Directory table.'), -(u'CustomAction', u'Type', u'N', 1, 16383, None, None, None, None, u'The numeric custom action type, consisting of source location, code type, entry, option flags.'), -(u'CustomAction', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Primary key, name of action, normally appears in sequence table unless private use.'), -(u'CustomAction', u'Source', u'Y', None, None, None, None, u'CustomSource', None, u'The table reference of the source of the code.'), -(u'CustomAction', u'Target', u'Y', None, None, None, None, u'Formatted', None, u'Excecution parameter, depends on the type of custom action'), -(u'DrLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'), -(u'DrLocator', u'Path', u'Y', None, None, None, None, u'AnyPath', None, u'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.'), -(u'DrLocator', u'Depth', u'Y', 0, 32767, None, None, None, None, u'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.'), -(u'DrLocator', u'Parent', u'Y', None, None, None, None, u'Identifier', None, u'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.'), -(u'DuplicateFile', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key referencing the source file to be duplicated.'), -(u'DuplicateFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the duplicate file.'), -(u'DuplicateFile', u'DestFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.'), -(u'DuplicateFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Filename to be given to the duplicate file.'), -(u'DuplicateFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'), -(u'Environment', u'Name', u'N', None, None, None, None, u'Text', None, u'The name of the environmental value.'), -(u'Environment', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to set in the environmental settings.'), -(u'Environment', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the environmental value.'), -(u'Environment', u'Environment', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for the environmental variable setting'), -(u'Error', u'Error', u'N', 0, 32767, None, None, None, None, u'Integer error number, obtained from header file IError(...) macros.'), -(u'Error', u'Message', u'Y', None, None, None, None, u'Template', None, u'Error formatting template, obtained from user ed. or localizers.'), -(u'Extension', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'), -(u'Extension', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'), -(u'Extension', u'Extension', u'N', None, None, None, None, u'Text', None, u'The extension associated with the table row.'), -(u'Extension', u'MIME_', u'Y', None, None, u'MIME', 1, u'Text', None, u'Optional Context identifier, typically "type/format" associated with the extension'), -(u'Extension', u'ProgId_', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this extension.'), -(u'MIME', u'CLSID', u'Y', None, None, None, None, u'Guid', None, u'Optional associated CLSID.'), -(u'MIME', u'ContentType', u'N', None, None, None, None, u'Text', None, u'Primary key. Context identifier, typically "type/format".'), -(u'MIME', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'Optional associated extension (without dot)'), -(u'FeatureComponents', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'), -(u'FeatureComponents', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'), -(u'FileSFPCatalog', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'File associated with the catalog'), -(u'FileSFPCatalog', u'SFPCatalog_', u'N', None, None, u'SFPCatalog', 1, u'Filename', None, u'Catalog associated with the file'), -(u'SFPCatalog', u'SFPCatalog', u'N', None, None, None, None, u'Filename', None, u'File name for the catalog.'), -(u'SFPCatalog', u'Catalog', u'N', None, None, None, None, u'Binary', None, u'SFP Catalog'), -(u'SFPCatalog', u'Dependency', u'Y', None, None, None, None, u'Formatted', None, u'Parent catalog - only used by SFP'), -(u'Font', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing font file.'), -(u'Font', u'FontTitle', u'Y', None, None, None, None, u'Text', None, u'Font name.'), -(u'IniFile', u'Action', u'N', None, None, None, None, None, u'0;1;3', u'The type of modification to be made, one of iifEnum'), -(u'IniFile', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value to be written.'), -(u'IniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'), -(u'IniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the .INI value.'), -(u'IniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to write the information'), -(u'IniFile', u'IniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), -(u'IniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'), -(u'IniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'), -(u'IniLocator', u'Type', u'Y', 0, 2, None, None, None, None, u'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.'), -(u'IniLocator', u'Key', u'N', None, None, None, None, u'Text', None, u'Key value (followed by an equals sign in INI file).'), -(u'IniLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'), -(u'IniLocator', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name.'), -(u'IniLocator', u'Section', u'N', None, None, None, None, u'Text', None, u'Section name within in file (within square brackets in INI file).'), -(u'IniLocator', u'Field', u'Y', 0, 32767, None, None, None, None, u'The field in the .INI line. If Field is null or 0 the entire line is read.'), -(u'IsolatedComponent', u'Component_Application', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item for application'), -(u'IsolatedComponent', u'Component_Shared', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item to be isolated'), -(u'LaunchCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'Expression which must evaluate to TRUE in order for install to commence.'), -(u'LaunchCondition', u'Description', u'N', None, None, None, None, u'Formatted', None, u'Localizable text to display when condition fails and install must abort.'), -(u'LockPermissions', u'Table', u'N', None, None, None, None, u'Identifier', u'Directory;File;Registry', u'Reference to another table name'), -(u'LockPermissions', u'Domain', u'Y', None, None, None, None, u'Formatted', None, u'Domain name for user whose permissions are being set. (usually a property)'), -(u'LockPermissions', u'LockObject', u'N', None, None, None, None, u'Identifier', None, u'Foreign key into Registry or File table'), -(u'LockPermissions', u'Permission', u'Y', -2147483647, 2147483647, None, None, None, None, u'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)'), -(u'LockPermissions', u'User', u'N', None, None, None, None, u'Formatted', None, u'User for permissions to be set. (usually a property)'), -(u'Media', u'Source', u'Y', None, None, None, None, u'Property', None, u'The property defining the location of the cabinet file.'), -(u'Media', u'Cabinet', u'Y', None, None, None, None, u'Cabinet', None, u'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.'), -(u'Media', u'DiskId', u'N', 1, 32767, None, None, None, None, u'Primary key, integer to determine sort order for table.'), -(u'Media', u'DiskPrompt', u'Y', None, None, None, None, u'Text', None, u'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.'), -(u'Media', u'LastSequence', u'N', 0, 32767, None, None, None, None, u'File sequence number for the last file for this media.'), -(u'Media', u'VolumeLabel', u'Y', None, None, None, None, u'Text', None, u'The label attributed to the volume.'), -(u'ModuleComponents', u'Component', u'N', None, None, u'Component', 1, u'Identifier', None, u'Component contained in the module.'), -(u'ModuleComponents', u'Language', u'N', None, None, u'ModuleSignature', 2, None, None, u'Default language ID for module (may be changed by transform).'), -(u'ModuleComponents', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module containing the component.'), -(u'ModuleSignature', u'Language', u'N', None, None, None, None, None, None, u'Default decimal language of module.'), -(u'ModuleSignature', u'Version', u'N', None, None, None, None, u'Version', None, u'Version of the module.'), -(u'ModuleSignature', u'ModuleID', u'N', None, None, None, None, u'Identifier', None, u'Module identifier (String.GUID).'), -(u'ModuleDependency', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module requiring the dependency.'), -(u'ModuleDependency', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'Language of module requiring the dependency.'), -(u'ModuleDependency', u'RequiredID', u'N', None, None, None, None, None, None, u'String.GUID of required module.'), -(u'ModuleDependency', u'RequiredLanguage', u'N', None, None, None, None, None, None, u'LanguageID of the required module.'), -(u'ModuleDependency', u'RequiredVersion', u'Y', None, None, None, None, u'Version', None, u'Version of the required version.'), -(u'ModuleExclusion', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'String.GUID of module with exclusion requirement.'), -(u'ModuleExclusion', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'LanguageID of module with exclusion requirement.'), -(u'ModuleExclusion', u'ExcludedID', u'N', None, None, None, None, None, None, u'String.GUID of excluded module.'), -(u'ModuleExclusion', u'ExcludedLanguage', u'N', None, None, None, None, None, None, u'Language of excluded module.'), -(u'ModuleExclusion', u'ExcludedMaxVersion', u'Y', None, None, None, None, u'Version', None, u'Maximum version of excluded module.'), -(u'ModuleExclusion', u'ExcludedMinVersion', u'Y', None, None, None, None, u'Version', None, u'Minimum version of excluded module.'), -(u'MoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry'), -(u'MoveFile', u'DestFolder', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'), -(u'MoveFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file'), -(u'MoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular MoveFile record'), -(u'MoveFile', u'Options', u'N', 0, 1, None, None, None, None, u'Integer value specifying the MoveFile operating mode, one of imfoEnum'), -(u'MoveFile', u'SourceFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the source directory'), -(u'MoveFile', u'SourceName', u'Y', None, None, None, None, u'Text', None, u"Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards."), -(u'MsiAssembly', u'Attributes', u'Y', None, None, None, None, None, None, u'Assembly attributes'), -(u'MsiAssembly', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'), -(u'MsiAssembly', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'), -(u'MsiAssembly', u'File_Application', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.'), -(u'MsiAssembly', u'File_Manifest', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the manifest file for the assembly.'), -(u'MsiAssemblyName', u'Name', u'N', None, None, None, None, u'Text', None, u'The name part of the name-value pairs for the assembly name.'), -(u'MsiAssemblyName', u'Value', u'N', None, None, None, None, u'Text', None, u'The value part of the name-value pairs for the assembly name.'), -(u'MsiAssemblyName', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'), -(u'MsiDigitalCertificate', u'CertData', u'N', None, None, None, None, u'Binary', None, u'A certificate context blob for a signer certificate'), -(u'MsiDigitalCertificate', u'DigitalCertificate', u'N', None, None, None, None, u'Identifier', None, u'A unique identifier for the row'), -(u'MsiDigitalSignature', u'Table', u'N', None, None, None, None, None, u'Media', u'Reference to another table name (only Media table is supported)'), -(u'MsiDigitalSignature', u'DigitalCertificate_', u'N', None, None, u'MsiDigitalCertificate', 1, u'Identifier', None, u'Foreign key to MsiDigitalCertificate table identifying the signer certificate'), -(u'MsiDigitalSignature', u'Hash', u'Y', None, None, None, None, u'Binary', None, u'The encoded hash blob from the digital signature'), -(u'MsiDigitalSignature', u'SignObject', u'N', None, None, None, None, u'Text', None, u'Foreign key to Media table'), -(u'MsiFileHash', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing file with this hash'), -(u'MsiFileHash', u'Options', u'N', 0, 32767, None, None, None, None, u'Various options and attributes for this hash.'), -(u'MsiFileHash', u'HashPart1', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), -(u'MsiFileHash', u'HashPart2', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), -(u'MsiFileHash', u'HashPart3', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), -(u'MsiFileHash', u'HashPart4', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), -(u'MsiPatchHeaders', u'StreamRef', u'N', None, None, None, None, u'Identifier', None, u'Primary key. A unique identifier for the row.'), -(u'MsiPatchHeaders', u'Header', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'), -(u'ODBCAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC driver attribute'), -(u'ODBCAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC driver attribute'), -(u'ODBCAttribute', u'Driver_', u'N', None, None, u'ODBCDriver', 1, u'Identifier', None, u'Reference to ODBC driver in ODBCDriver table'), -(u'ODBCDriver', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for driver, non-localized'), -(u'ODBCDriver', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key driver file'), -(u'ODBCDriver', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'), -(u'ODBCDriver', u'Driver', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for driver'), -(u'ODBCDriver', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key driver setup DLL'), -(u'ODBCDataSource', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for data source'), -(u'ODBCDataSource', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'), -(u'ODBCDataSource', u'DataSource', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for data source'), -(u'ODBCDataSource', u'DriverDescription', u'N', None, None, None, None, u'Text', None, u'Reference to driver description, may be existing driver'), -(u'ODBCDataSource', u'Registration', u'N', 0, 1, None, None, None, None, u'Registration option: 0=machine, 1=user, others t.b.d.'), -(u'ODBCSourceAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC data source attribute'), -(u'ODBCSourceAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC data source attribute'), -(u'ODBCSourceAttribute', u'DataSource_', u'N', None, None, u'ODBCDataSource', 1, u'Identifier', None, u'Reference to ODBC data source in ODBCDataSource table'), -(u'ODBCTranslator', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for translator'), -(u'ODBCTranslator', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key translator file'), -(u'ODBCTranslator', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'), -(u'ODBCTranslator', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key translator setup DLL'), -(u'ODBCTranslator', u'Translator', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for translator'), -(u'Patch', u'Sequence', u'N', 0, 32767, None, None, None, None, u'Primary key, sequence with respect to the media images; order must track cabinet order.'), -(u'Patch', u'Attributes', u'N', 0, 32767, None, None, None, None, u'Integer containing bit flags representing patch attributes'), -(u'Patch', u'File_', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.'), -(u'Patch', u'Header', u'Y', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'), -(u'Patch', u'PatchSize', u'N', 0, 2147483647, None, None, None, None, u'Size of patch in bytes (long integer).'), -(u'Patch', u'StreamRef_', u'Y', None, None, None, None, u'Identifier', None, u'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.'), -(u'PatchPackage', u'Media_', u'N', 0, 32767, None, None, None, None, u'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.'), -(u'PatchPackage', u'PatchId', u'N', None, None, None, None, u'Guid', None, u'A unique string GUID representing this patch.'), -(u'PublishComponent', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into the Feature table.'), -(u'PublishComponent', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'), -(u'PublishComponent', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID that represents the component id that will be requested by the alien product.'), -(u'PublishComponent', u'AppData', u'Y', None, None, None, None, u'Text', None, u'This is localisable Application specific data that can be associated with a Qualified Component.'), -(u'PublishComponent', u'Qualifier', u'N', None, None, None, None, u'Text', None, u'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.'), -(u'Registry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'), -(u'Registry', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The registry value.'), -(u'Registry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'), -(u'Registry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the registry value.'), -(u'Registry', u'Registry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), -(u'Registry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'), -(u'RegLocator', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'), -(u'RegLocator', u'Type', u'Y', 0, 18, None, None, None, None, u'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.'), -(u'RegLocator', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'), -(u'RegLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.'), -(u'RegLocator', u'Root', u'N', 0, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'), -(u'RemoveFile', u'InstallMode', u'N', None, None, None, None, None, u'1;2;3', u'Installation option, one of iimEnum.'), -(u'RemoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file to be removed.'), -(u'RemoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'), -(u'RemoveFile', u'FileName', u'Y', None, None, None, None, u'WildCardFilename', None, u'Name of the file to be removed.'), -(u'RemoveFile', u'DirProperty', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.'), -(u'RemoveIniFile', u'Action', u'N', None, None, None, None, None, u'2;4', u'The type of modification to be made, one of iifEnum.'), -(u'RemoveIniFile', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to be deleted. The value is required when Action is iifIniRemoveTag'), -(u'RemoveIniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'), -(u'RemoveIniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the .INI value.'), -(u'RemoveIniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to delete the information'), -(u'RemoveIniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'), -(u'RemoveIniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'), -(u'RemoveIniFile', u'RemoveIniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), -(u'RemoveRegistry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'), -(u'RemoveRegistry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'), -(u'RemoveRegistry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the registry value.'), -(u'RemoveRegistry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum'), -(u'RemoveRegistry', u'RemoveRegistry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), -(u'ReserveCost', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reserve a specified amount of space if this component is to be installed.'), -(u'ReserveCost', u'ReserveFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'), -(u'ReserveCost', u'ReserveKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular ReserveCost record'), -(u'ReserveCost', u'ReserveLocal', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed locally.'), -(u'ReserveCost', u'ReserveSource', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed to run from the source location.'), -(u'SelfReg', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the module that needs to be registered.'), -(u'SelfReg', u'Cost', u'Y', 0, 32767, None, None, None, None, u'The cost of registering the module.'), -(u'ServiceControl', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Name of a service. /, \\, comma and space are invalid'), -(u'ServiceControl', u'Event', u'N', 0, 187, None, None, None, None, u'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete'), -(u'ServiceControl', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'), -(u'ServiceControl', u'ServiceControl', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), -(u'ServiceControl', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments for the service. Separate by [~].'), -(u'ServiceControl', u'Wait', u'Y', 0, 1, None, None, None, None, u'Boolean for whether to wait for the service to fully start'), -(u'ServiceInstall', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Internal Name of the Service'), -(u'ServiceInstall', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of service.'), -(u'ServiceInstall', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'), -(u'ServiceInstall', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments to include in every start of the service, passed to WinMain'), -(u'ServiceInstall', u'ServiceInstall', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), -(u'ServiceInstall', u'Dependencies', u'Y', None, None, None, None, u'Formatted', None, u'Other services this depends on to start. Separate by [~], and end with [~][~]'), -(u'ServiceInstall', u'DisplayName', u'Y', None, None, None, None, u'Formatted', None, u'External Name of the Service'), -(u'ServiceInstall', u'ErrorControl', u'N', -2147483647, 2147483647, None, None, None, None, u'Severity of error if service fails to start'), -(u'ServiceInstall', u'LoadOrderGroup', u'Y', None, None, None, None, u'Formatted', None, u'LoadOrderGroup'), -(u'ServiceInstall', u'Password', u'Y', None, None, None, None, u'Formatted', None, u'password to run service with. (with StartName)'), -(u'ServiceInstall', u'ServiceType', u'N', -2147483647, 2147483647, None, None, None, None, u'Type of the service'), -(u'ServiceInstall', u'StartName', u'Y', None, None, None, None, u'Formatted', None, u'User or object name to run service as'), -(u'ServiceInstall', u'StartType', u'N', 0, 4, None, None, None, None, u'Type of the service'), -(u'Shortcut', u'Name', u'N', None, None, None, None, u'Filename', None, u'The name of the shortcut to be created.'), -(u'Shortcut', u'Description', u'Y', None, None, None, None, u'Text', None, u'The description for the shortcut.'), -(u'Shortcut', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table denoting the component whose selection gates the the shortcut creation/deletion.'), -(u'Shortcut', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Foreign key into the File table denoting the external icon file for the shortcut.'), -(u'Shortcut', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'The icon index for the shortcut.'), -(u'Shortcut', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the shortcut file is created.'), -(u'Shortcut', u'Target', u'N', None, None, None, None, u'Shortcut', None, u'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.'), -(u'Shortcut', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'The command-line arguments for the shortcut.'), -(u'Shortcut', u'Shortcut', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), -(u'Shortcut', u'Hotkey', u'Y', 0, 32767, None, None, None, None, u'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. '), -(u'Shortcut', u'ShowCmd', u'Y', None, None, None, None, None, u'1;3;7', u'The show command for the application window.The following values may be used.'), -(u'Shortcut', u'WkDir', u'Y', None, None, None, None, u'Identifier', None, u'Name of property defining location of working directory.'), -(u'Signature', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The name of the file. This may contain a "short name|long name" pair.'), -(u'Signature', u'Signature', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature represents a unique file signature.'), -(u'Signature', u'Languages', u'Y', None, None, None, None, u'Language', None, u'The languages supported by the file.'), -(u'Signature', u'MaxDate', u'Y', 0, 2147483647, None, None, None, None, u'The maximum creation date of the file.'), -(u'Signature', u'MaxSize', u'Y', 0, 2147483647, None, None, None, None, u'The maximum size of the file. '), -(u'Signature', u'MaxVersion', u'Y', None, None, None, None, u'Text', None, u'The maximum version of the file.'), -(u'Signature', u'MinDate', u'Y', 0, 2147483647, None, None, None, None, u'The minimum creation date of the file.'), -(u'Signature', u'MinSize', u'Y', 0, 2147483647, None, None, None, None, u'The minimum size of the file.'), -(u'Signature', u'MinVersion', u'Y', None, None, None, None, u'Text', None, u'The minimum version of the file.'), -(u'TypeLib', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.'), -(u'TypeLib', u'Description', u'Y', None, None, None, None, u'Text', None, None), -(u'TypeLib', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'), -(u'TypeLib', u'Directory_', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.'), -(u'TypeLib', u'Language', u'N', 0, 32767, None, None, None, None, u'The language of the library.'), -(u'TypeLib', u'Version', u'Y', 0, 16777215, None, None, None, None, u'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. '), -(u'TypeLib', u'Cost', u'Y', 0, 2147483647, None, None, None, None, u'The cost associated with the registration of the typelib. This column is currently optional.'), -(u'TypeLib', u'LibID', u'N', None, None, None, None, u'Guid', None, u'The GUID that represents the library.'), -(u'Upgrade', u'Attributes', u'N', 0, 2147483647, None, None, None, None, u'The attributes of this product set.'), -(u'Upgrade', u'Remove', u'Y', None, None, None, None, u'Formatted', None, u'The list of features to remove when uninstalling a product from this set. The default is "ALL".'), -(u'Upgrade', u'Language', u'Y', None, None, None, None, u'Language', None, u'A comma-separated list of languages for either products in this set or products not in this set.'), -(u'Upgrade', u'ActionProperty', u'N', None, None, None, None, u'UpperCase', None, u'The property to set when a product in this set is found.'), -(u'Upgrade', u'UpgradeCode', u'N', None, None, None, None, u'Guid', None, u'The UpgradeCode GUID belonging to the products in this set.'), -(u'Upgrade', u'VersionMax', u'Y', None, None, None, None, u'Text', None, u'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.'), -(u'Upgrade', u'VersionMin', u'Y', None, None, None, None, u'Text', None, u'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.'), -(u'Verb', u'Sequence', u'Y', 0, 32767, None, None, None, None, u'Order within the verbs for a particular extension. Also used simply to specify the default verb.'), -(u'Verb', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'Optional value for the command arguments.'), -(u'Verb', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'The extension associated with the table row.'), -(u'Verb', u'Verb', u'N', None, None, None, None, u'Text', None, u'The verb for the command.'), -(u'Verb', u'Command', u'Y', None, None, None, None, u'Formatted', None, u'The command text.'), -] - -Error = [ -(0, u'{{Fatal error: }}'), -(1, u'{{Error [1]. }}'), -(2, u'Warning [1]. '), -(3, None), -(4, u'Info [1]. '), -(5, u'The installer has encountered an unexpected error installing this package. This may indicate a problem with this package. The error code is [1]. {{The arguments are: [2], [3], [4]}}'), -(6, None), -(7, u'{{Disk full: }}'), -(8, u'Action [Time]: [1]. [2]'), -(9, u'[ProductName]'), -(10, u'{[2]}{, [3]}{, [4]}'), -(11, u'Message type: [1], Argument: [2]'), -(12, u'=== Logging started: [Date] [Time] ==='), -(13, u'=== Logging stopped: [Date] [Time] ==='), -(14, u'Action start [Time]: [1].'), -(15, u'Action ended [Time]: [1]. Return value [2].'), -(16, u'Time remaining: {[1] minutes }{[2] seconds}'), -(17, u'Out of memory. Shut down other applications before retrying.'), -(18, u'Installer is no longer responding.'), -(19, u'Installer stopped prematurely.'), -(20, u'Please wait while Windows configures [ProductName]'), -(21, u'Gathering required information...'), -(22, u'Removing older versions of this application...'), -(23, u'Preparing to remove older versions of this application...'), -(32, u'{[ProductName] }Setup completed successfully.'), -(33, u'{[ProductName] }Setup failed.'), -(1101, u'Error reading from file: [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'), -(1301, u"Cannot create the file '[2]'. A directory with this name already exists. Cancel the install and try installing to a different location."), -(1302, u'Please insert the disk: [2]'), -(1303, u'The installer has insufficient privileges to access this directory: [2]. The installation cannot continue. Log on as administrator or contact your system administrator.'), -(1304, u'Error writing to file: [2]. Verify that you have access to that directory.'), -(1305, u'Error reading from file [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'), -(1306, u"Another application has exclusive access to the file '[2]'. Please shut down all other applications, then click Retry."), -(1307, u'There is not enough disk space to install this file: [2]. Free some disk space and click Retry, or click Cancel to exit.'), -(1308, u'Source file not found: [2]. Verify that the file exists and that you can access it.'), -(1309, u'Error reading from file: [3]. {{ System error [2].}} Verify that the file exists and that you can access it.'), -(1310, u'Error writing to file: [3]. {{ System error [2].}} Verify that you have access to that directory.'), -(1311, u'Source file not found{{(cabinet)}}: [2]. Verify that the file exists and that you can access it.'), -(1312, u"Cannot create the directory '[2]'. A file with this name already exists. Please rename or remove the file and click retry, or click Cancel to exit."), -(1313, u'The volume [2] is currently unavailable. Please select another.'), -(1314, u"The specified path '[2]' is unavailable."), -(1315, u'Unable to write to the specified folder: [2].'), -(1316, u'A network error occurred while attempting to read from the file: [2]'), -(1317, u'An error occurred while attempting to create the directory: [2]'), -(1318, u'A network error occurred while attempting to create the directory: [2]'), -(1319, u'A network error occurred while attempting to open the source file cabinet: [2]'), -(1320, u'The specified path is too long: [2]'), -(1321, u'The Installer has insufficient privileges to modify this file: [2].'), -(1322, u"A portion of the folder path '[2]' is invalid. It is either empty or exceeds the length allowed by the system."), -(1323, u"The folder path '[2]' contains words that are not valid in folder paths."), -(1324, u"The folder path '[2]' contains an invalid character."), -(1325, u"'[2]' is not a valid short file name."), -(1326, u'Error getting file security: [3] GetLastError: [2]'), -(1327, u'Invalid Drive: [2]'), -(1328, u'Error applying patch to file [2]. It has probably been updated by other means, and can no longer be modified by this patch. For more information contact your patch vendor. {{System Error: [3]}}'), -(1329, u'A file that is required cannot be installed because the cabinet file [2] is not digitally signed. This may indicate that the cabinet file is corrupt.'), -(1330, u'A file that is required cannot be installed because the cabinet file [2] has an invalid digital signature. This may indicate that the cabinet file is corrupt.{{ Error [3] was returned by WinVerifyTrust.}}'), -(1331, u'Failed to correctly copy [2] file: CRC error.'), -(1332, u'Failed to correctly move [2] file: CRC error.'), -(1333, u'Failed to correctly patch [2] file: CRC error.'), -(1334, u"The file '[2]' cannot be installed because the file cannot be found in cabinet file '[3]'. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."), -(1335, u"The cabinet file '[2]' required for this installation is corrupt and cannot be used. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."), -(1336, u'There was an error creating a temporary file that is needed to complete this installation.{{ Folder: [3]. System error code: [2]}}'), -(1401, u'Could not create key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '), -(1402, u'Could not open key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '), -(1403, u'Could not delete value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '), -(1404, u'Could not delete key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '), -(1405, u'Could not read value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '), -(1406, u'Could not write value [2] to key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel.'), -(1407, u'Could not get value names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'), -(1408, u'Could not get sub key names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'), -(1409, u'Could not read security information for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'), -(1410, u'Could not increase the available registry space. [2] KB of free registry space is required for the installation of this application.'), -(1500, u'Another installation is in progress. You must complete that installation before continuing this one.'), -(1501, u'Error accessing secured data. Please make sure the Windows Installer is configured properly and try the install again.'), -(1502, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product. Your current install will now continue."), -(1503, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product."), -(1601, u"Out of disk space -- Volume: '[2]'; required space: [3] KB; available space: [4] KB. Free some disk space and retry."), -(1602, u'Are you sure you want to cancel?'), -(1603, u"The file [2][3] is being held in use{ by the following process: Name: [4], Id: [5], Window Title: '[6]'}. Close that application and retry."), -(1604, u"The product '[2]' is already installed, preventing the installation of this product. The two products are incompatible."), -(1605, u"There is not enough disk space on the volume '[2]' to continue the install with recovery enabled. [3] KB are required, but only [4] KB are available. Click Ignore to continue the install without saving recovery information, click Retry to check for available space again, or click Cancel to quit the installation."), -(1606, u'Could not access network location [2].'), -(1607, u'The following applications should be closed before continuing the install:'), -(1608, u'Could not find any previously installed compliant products on the machine for installing this product.'), -(1609, u"An error occurred while applying security settings. [2] is not a valid user or group. This could be a problem with the package, or a problem connecting to a domain controller on the network. Check your network connection and click Retry, or Cancel to end the install. {{Unable to locate the user's SID, system error [3]}}"), -(1701, u'The key [2] is not valid. Verify that you entered the correct key.'), -(1702, u'The installer must restart your system before configuration of [2] can continue. Click Yes to restart now or No if you plan to manually restart later.'), -(1703, u'You must restart your system for the configuration changes made to [2] to take effect. Click Yes to restart now or No if you plan to manually restart later.'), -(1704, u'An installation for [2] is currently suspended. You must undo the changes made by that installation to continue. Do you want to undo those changes?'), -(1705, u'A previous installation for this product is in progress. You must undo the changes made by that installation to continue. Do you want to undo those changes?'), -(1706, u"An installation package for the product [2] cannot be found. Try the installation again using a valid copy of the installation package '[3]'."), -(1707, u'Installation completed successfully.'), -(1708, u'Installation failed.'), -(1709, u'Product: [2] -- [3]'), -(1710, u'You may either restore your computer to its previous state or continue the install later. Would you like to restore?'), -(1711, u'An error occurred while writing installation information to disk. Check to make sure enough disk space is available, and click Retry, or Cancel to end the install.'), -(1712, u'One or more of the files required to restore your computer to its previous state could not be found. Restoration will not be possible.'), -(1713, u'[2] cannot install one of its required products. Contact your technical support group. {{System Error: [3].}}'), -(1714, u'The older version of [2] cannot be removed. Contact your technical support group. {{System Error [3].}}'), -(1715, u'Installed [2]'), -(1716, u'Configured [2]'), -(1717, u'Removed [2]'), -(1718, u'File [2] was rejected by digital signature policy.'), -(1719, u'The Windows Installer Service could not be accessed. This can occur if you are running Windows in safe mode, or if the Windows Installer is not correctly installed. Contact your support personnel for assistance.'), -(1720, u'There is a problem with this Windows Installer package. A script required for this install to complete could not be run. Contact your support personnel or package vendor. {{Custom action [2] script error [3], [4]: [5] Line [6], Column [7], [8] }}'), -(1721, u'There is a problem with this Windows Installer package. A program required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action: [2], location: [3], command: [4] }}'), -(1722, u'There is a problem with this Windows Installer package. A program run as part of the setup did not finish as expected. Contact your support personnel or package vendor. {{Action [2], location: [3], command: [4] }}'), -(1723, u'There is a problem with this Windows Installer package. A DLL required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action [2], entry: [3], library: [4] }}'), -(1724, u'Removal completed successfully.'), -(1725, u'Removal failed.'), -(1726, u'Advertisement completed successfully.'), -(1727, u'Advertisement failed.'), -(1728, u'Configuration completed successfully.'), -(1729, u'Configuration failed.'), -(1730, u'You must be an Administrator to remove this application. To remove this application, you can log on as an Administrator, or contact your technical support group for assistance.'), -(1801, u'The path [2] is not valid. Please specify a valid path.'), -(1802, u'Out of memory. Shut down other applications before retrying.'), -(1803, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to go back to the previously selected volume.'), -(1804, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to return to the browse dialog and select a different volume.'), -(1805, u'The folder [2] does not exist. Please enter a path to an existing folder.'), -(1806, u'You have insufficient privileges to read this folder.'), -(1807, u'A valid destination folder for the install could not be determined.'), -(1901, u'Error attempting to read from the source install database: [2].'), -(1902, u'Scheduling reboot operation: Renaming file [2] to [3]. Must reboot to complete operation.'), -(1903, u'Scheduling reboot operation: Deleting file [2]. Must reboot to complete operation.'), -(1904, u'Module [2] failed to register. HRESULT [3]. Contact your support personnel.'), -(1905, u'Module [2] failed to unregister. HRESULT [3]. Contact your support personnel.'), -(1906, u'Failed to cache package [2]. Error: [3]. Contact your support personnel.'), -(1907, u'Could not register font [2]. Verify that you have sufficient permissions to install fonts, and that the system supports this font.'), -(1908, u'Could not unregister font [2]. Verify that you that you have sufficient permissions to remove fonts.'), -(1909, u'Could not create Shortcut [2]. Verify that the destination folder exists and that you can access it.'), -(1910, u'Could not remove Shortcut [2]. Verify that the shortcut file exists and that you can access it.'), -(1911, u'Could not register type library for file [2]. Contact your support personnel.'), -(1912, u'Could not unregister type library for file [2]. Contact your support personnel.'), -(1913, u'Could not update the ini file [2][3]. Verify that the file exists and that you can access it.'), -(1914, u'Could not schedule file [2] to replace file [3] on reboot. Verify that you have write permissions to file [3].'), -(1915, u'Error removing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'), -(1916, u'Error installing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'), -(1917, u'Error removing ODBC driver: [4], ODBC error [2]: [3]. Verify that you have sufficient privileges to remove ODBC drivers.'), -(1918, u'Error installing ODBC driver: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'), -(1919, u'Error configuring ODBC data source: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'), -(1920, u"Service '[2]' ([3]) failed to start. Verify that you have sufficient privileges to start system services."), -(1921, u"Service '[2]' ([3]) could not be stopped. Verify that you have sufficient privileges to stop system services."), -(1922, u"Service '[2]' ([3]) could not be deleted. Verify that you have sufficient privileges to remove system services."), -(1923, u"Service '[2]' ([3]) could not be installed. Verify that you have sufficient privileges to install system services."), -(1924, u"Could not update environment variable '[2]'. Verify that you have sufficient privileges to modify environment variables."), -(1925, u'You do not have sufficient privileges to complete this installation for all users of the machine. Log on as administrator and then retry this installation.'), -(1926, u"Could not set file security for file '[3]'. Error: [2]. Verify that you have sufficient privileges to modify the security permissions for this file."), -(1927, u'Component Services (COM+ 1.0) are not installed on this computer. This installation requires Component Services in order to complete successfully. Component Services are available on Windows 2000.'), -(1928, u'Error registering COM+ Application. Contact your support personnel for more information.'), -(1929, u'Error unregistering COM+ Application. Contact your support personnel for more information.'), -(1930, u"The description for service '[2]' ([3]) could not be changed."), -(1931, u'The Windows Installer service cannot update the system file [2] because the file is protected by Windows. You may need to update your operating system for this program to work correctly. {{Package version: [3], OS Protected version: [4]}}'), -(1932, u'The Windows Installer service cannot update the protected Windows file [2]. {{Package version: [3], OS Protected version: [4], SFP Error: [5]}}'), -(1933, u'The Windows Installer service cannot update one or more protected Windows files. {{SFP Error: [2]. List of protected files:\\r\\n[3]}}'), -(1934, u'User installations are disabled via policy on the machine.'), -(1935, u'An error occured during the installation of assembly component [2]. HRESULT: [3]. {{assembly interface: [4], function: [5], assembly name: [6]}}'), -] - -tables=['AdminExecuteSequence', 'AdminUISequence', 'AdvtExecuteSequence', 'BBControl', 'Billboard', 'Binary', 'CheckBox', 'Property', 'ComboBox', 'Control', 'ListBox', 'ActionText', 'ControlCondition', 'ControlEvent', 'Dialog', 'EventMapping', 'InstallExecuteSequence', 'InstallUISequence', 'ListView', 'RadioButton', 'TextStyle', 'UIText', '_Validation', 'Error'] diff --git a/Lib/ntpath.py b/Lib/ntpath.py index 5dd5f1a..7a79b53 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -481,27 +481,28 @@ def normpath(path): # Return an absolute path. -def abspath(path): - """Return the absolute version of a path""" - try: - from nt import _getfullpathname - except ImportError: # Not running on Windows - mock up something sensible. - global abspath - def _abspath(path): - if not isabs(path): - path = join(os.getcwd(), path) - return normpath(path) - abspath = _abspath - return _abspath(path) - - if path: # Empty path must return current working directory. - try: - path = _getfullpathname(path) - except WindowsError: - pass # Bad path - return unchanged. - else: - path = os.getcwd() - return normpath(path) +try: + from nt import _getfullpathname + +except ImportError: # not running on Windows - mock up something sensible + def abspath(path): + """Return the absolute version of a path.""" + if not isabs(path): + path = join(os.getcwd(), path) + return normpath(path) + +else: # use native Windows method on Windows + def abspath(path): + """Return the absolute version of a path.""" + + if path: # Empty path must return current working directory. + try: + path = _getfullpathname(path) + except WindowsError: + pass # Bad path - return unchanged. + else: + path = os.getcwd() + return normpath(path) # realpath is a no-op on systems without islink support realpath = abspath diff --git a/Lib/optparse.py b/Lib/optparse.py index ae3d00d..9ac987e 100644 --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -16,7 +16,7 @@ For support, use the optik-users@lists.sourceforge.net mailing list # Python developers: please do not make changes to this file, since # it is automatically generated from the Optik source code. -__version__ = "1.5a2" +__version__ = "1.5.1" __all__ = ['Option', 'SUPPRESS_HELP', @@ -35,8 +35,8 @@ __all__ = ['Option', 'BadOptionError'] __copyright__ = """ -Copyright (c) 2001-2004 Gregory P. Ward. All rights reserved. -Copyright (c) 2002-2004 Python Software Foundation. All rights reserved. +Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved. +Copyright (c) 2002-2006 Python Software Foundation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are @@ -67,21 +67,26 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import sys, os +import types import textwrap -try: - from gettext import gettext as _ -except ImportError: - _ = lambda arg: arg def _repr(self): return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self) # This file was generated from: -# Id: option_parser.py 421 2004-10-26 00:45:16Z greg -# Id: option.py 422 2004-10-26 00:53:47Z greg -# Id: help.py 367 2004-07-24 23:21:21Z gward -# Id: errors.py 367 2004-07-24 23:21:21Z gward +# Id: option_parser.py 509 2006-04-20 00:58:24Z gward +# Id: option.py 509 2006-04-20 00:58:24Z gward +# Id: help.py 509 2006-04-20 00:58:24Z gward +# Id: errors.py 509 2006-04-20 00:58:24Z gward + +try: + from gettext import gettext +except ImportError: + def gettext(message): + return message +_ = gettext + class OptParseError (Exception): def __init__(self, msg): @@ -120,8 +125,25 @@ class OptionValueError (OptParseError): class BadOptionError (OptParseError): """ - Raised if an invalid or ambiguous option is seen on the command-line. + Raised if an invalid option is seen on the command line. + """ + def __init__(self, opt_str): + self.opt_str = opt_str + + def __str__(self): + return _("no such option: %s") % self.opt_str + +class AmbiguousOptionError (BadOptionError): + """ + Raised if an ambiguous option is seen on the command line. """ + def __init__(self, opt_str, possibilities): + BadOptionError.__init__(self, opt_str) + self.possibilities = possibilities + + def __str__(self): + return (_("ambiguous option: %s (%s?)") + % (self.opt_str, ", ".join(self.possibilities))) class HelpFormatter: @@ -223,15 +245,30 @@ class HelpFormatter: def format_heading(self, heading): raise NotImplementedError, "subclasses must implement" - def format_description(self, description): - if not description: - return "" - desc_width = self.width - self.current_indent + def _format_text(self, text): + """ + Format a paragraph of free-form text for inclusion in the + help output at the current indentation level. + """ + text_width = self.width - self.current_indent indent = " "*self.current_indent - return textwrap.fill(description, - desc_width, + return textwrap.fill(text, + text_width, initial_indent=indent, - subsequent_indent=indent) + "\n" + subsequent_indent=indent) + + def format_description(self, description): + if description: + return self._format_text(description) + "\n" + else: + return "" + + def format_epilog(self, epilog): + if epilog: + return "\n" + self._format_text(epilog) + "\n" + else: + return "" + def expand_default(self, option): if self.parser is None or not self.default_tag: @@ -328,7 +365,7 @@ class IndentedHelpFormatter (HelpFormatter): self, indent_increment, max_help_position, width, short_first) def format_usage(self, usage): - return _("usage: %s\n") % usage + return _("Usage: %s\n") % usage def format_heading(self, heading): return "%*s%s:\n" % (self.current_indent, "", heading) @@ -353,8 +390,27 @@ class TitledHelpFormatter (HelpFormatter): return "%s\n%s\n" % (heading, "=-"[self.level] * len(heading)) -_builtin_cvt = { "int" : (int, _("integer")), - "long" : (long, _("long integer")), +def _parse_num(val, type): + if val[:2].lower() == "0x": # hexadecimal + radix = 16 + elif val[:2].lower() == "0b": # binary + radix = 2 + val = val[2:] or "0" # have to remove "0b" prefix + elif val[:1] == "0": # octal + radix = 8 + else: # decimal + radix = 10 + + return type(val, radix) + +def _parse_int(val): + return _parse_num(val, int) + +def _parse_long(val): + return _parse_num(val, long) + +_builtin_cvt = { "int" : (_parse_int, _("integer")), + "long" : (_parse_long, _("long integer")), "float" : (float, _("floating-point")), "complex" : (complex, _("complex")) } @@ -422,6 +478,7 @@ class Option: "store_true", "store_false", "append", + "append_const", "count", "callback", "help", @@ -435,6 +492,7 @@ class Option: "store_true", "store_false", "append", + "append_const", "count") # The set of actions for which it makes sense to supply a value @@ -448,6 +506,10 @@ class Option: ALWAYS_TYPED_ACTIONS = ("store", "append") + # The set of actions which take a 'const' attribute. + CONST_ACTIONS = ("store_const", + "append_const") + # The set of known types for option parsers. Again, listed here for # constructor argument validation. TYPES = ("string", "int", "long", "float", "complex", "choice") @@ -572,9 +634,17 @@ class Option: # No type given? "string" is the most sensible default. self.type = "string" else: - # Allow type objects as an alternative to their names. - if type(self.type) is type: + # Allow type objects or builtin type conversion functions + # (int, str, etc.) as an alternative to their names. (The + # complicated check of __builtin__ is only necessary for + # Python 2.1 and earlier, and is short-circuited by the + # first check on modern Pythons.) + import __builtin__ + if ( type(self.type) is types.TypeType or + (hasattr(self.type, "__name__") and + getattr(__builtin__, self.type.__name__, None) is self.type) ): self.type = self.type.__name__ + if self.type == "str": self.type = "string" @@ -589,7 +659,7 @@ class Option: if self.choices is None: raise OptionError( "must supply a list of choices for type 'choice'", self) - elif type(self.choices) not in (tuple, list): + elif type(self.choices) not in (types.TupleType, types.ListType): raise OptionError( "choices must be a list of strings ('%s' supplied)" % str(type(self.choices)).split("'")[1], self) @@ -613,7 +683,7 @@ class Option: self.dest = self._short_opts[0][1] def _check_const(self): - if self.action != "store_const" and self.const is not None: + if self.action not in self.CONST_ACTIONS and self.const is not None: raise OptionError( "'const' must not be supplied for action %r" % self.action, self) @@ -633,12 +703,12 @@ class Option: raise OptionError( "callback not callable: %r" % self.callback, self) if (self.callback_args is not None and - type(self.callback_args) is not tuple): + type(self.callback_args) is not types.TupleType): raise OptionError( "callback_args, if supplied, must be a tuple: not %r" % self.callback_args, self) if (self.callback_kwargs is not None and - type(self.callback_kwargs) is not dict): + type(self.callback_kwargs) is not types.DictType): raise OptionError( "callback_kwargs, if supplied, must be a dict: not %r" % self.callback_kwargs, self) @@ -720,6 +790,8 @@ class Option: setattr(values, dest, False) elif action == "append": values.ensure_value(dest, []).append(value) + elif action == "append_const": + values.ensure_value(dest, []).append(self.const) elif action == "count": setattr(values, dest, values.ensure_value(dest, 0) + 1) elif action == "callback": @@ -748,11 +820,9 @@ try: True, False except NameError: (True, False) = (1, 0) -try: - basestring -except NameError: - basestring = (str, unicode) +def isbasestring(x): + return isinstance(x, types.StringType) or isinstance(x, types.UnicodeType) class Values: @@ -766,16 +836,13 @@ class Values: __repr__ = _repr - def __eq__(self, other): + def __cmp__(self, other): if isinstance(other, Values): - return self.__dict__ == other.__dict__ - elif isinstance(other, dict): - return self.__dict__ == other + return cmp(self.__dict__, other.__dict__) + elif isinstance(other, types.DictType): + return cmp(self.__dict__, other) else: - return False - - def __ne__(self, other): - return not (self == other) + return -1 def _update_careful(self, dict): """ @@ -893,6 +960,13 @@ class OptionContainer: return self.description + def destroy(self): + """see OptionParser.destroy().""" + del self._short_opt + del self._long_opt + del self.defaults + + # -- Option-adding methods ----------------------------------------- def _check_conflict(self, option): @@ -926,7 +1000,7 @@ class OptionContainer: """add_option(Option) add_option(opt_str, ..., kwarg=val, ...) """ - if type(args[0]) is str: + if type(args[0]) is types.StringType: option = self.option_class(*args, **kwargs) elif len(args) == 1 and not kwargs: option = args[0] @@ -1018,6 +1092,11 @@ class OptionGroup (OptionContainer): def set_title(self, title): self.title = title + def destroy(self): + """see OptionParser.destroy().""" + OptionContainer.destroy(self) + del self.option_list + # -- Help-formatting methods --------------------------------------- def format_help(self, formatter): @@ -1044,6 +1123,8 @@ class OptionParser (OptionContainer): prog : string the name of the current program (to override os.path.basename(sys.argv[0])). + epilog : string + paragraph of help text to print after option help option_groups : [OptionGroup] list of option groups in this parser (option groups are @@ -1102,7 +1183,8 @@ class OptionParser (OptionContainer): description=None, formatter=None, add_help_option=True, - prog=None): + prog=None, + epilog=None): OptionContainer.__init__( self, option_class, conflict_handler, description) self.set_usage(usage) @@ -1114,6 +1196,7 @@ class OptionParser (OptionContainer): formatter = IndentedHelpFormatter() self.formatter = formatter self.formatter.set_parser(self) + self.epilog = epilog # Populate the option list; initial sources are the # standard_option_list class attribute, the 'option_list' @@ -1124,6 +1207,22 @@ class OptionParser (OptionContainer): self._init_parsing_state() + + def destroy(self): + """ + Declare that you are done with this OptionParser. This cleans up + reference cycles so the OptionParser (and all objects referenced by + it) can be garbage-collected promptly. After calling destroy(), the + OptionParser is unusable. + """ + OptionContainer.destroy(self) + for group in self.option_groups: + group.destroy() + del self.option_list + del self.option_groups + del self.formatter + + # -- Private methods ----------------------------------------------- # (used by our or OptionContainer's constructor) @@ -1167,7 +1266,7 @@ class OptionParser (OptionContainer): elif usage is SUPPRESS_USAGE: self.usage = None # For backwards compatibility with Optik 1.3 and earlier. - elif usage.startswith("usage:" + " "): + elif usage.lower().startswith("usage: "): self.usage = usage[7:] else: self.usage = usage @@ -1201,7 +1300,7 @@ class OptionParser (OptionContainer): defaults = self.defaults.copy() for option in self._get_all_options(): default = defaults.get(option.dest) - if isinstance(default, basestring): + if isbasestring(default): opt_str = option.get_opt_string() defaults[option.dest] = option.check_value(opt_str, default) @@ -1212,7 +1311,7 @@ class OptionParser (OptionContainer): def add_option_group(self, *args, **kwargs): # XXX lots of overlap with OptionContainer.add_option() - if type(args[0]) is str: + if type(args[0]) is types.StringType: group = OptionGroup(self, *args, **kwargs) elif len(args) == 1 and not kwargs: group = args[0] @@ -1276,7 +1375,7 @@ class OptionParser (OptionContainer): try: stop = self._process_args(largs, rargs, values) except (BadOptionError, OptionValueError), err: - self.error(err.msg) + self.error(str(err)) args = largs + rargs return self.check_values(values, args) @@ -1401,7 +1500,7 @@ class OptionParser (OptionContainer): i += 1 # we have consumed a character if not option: - self.error(_("no such option: %s") % opt) + raise BadOptionError(opt) if option.takes_value(): # Any characters left in arg? Pretend they're the # next arg, and stop consuming characters of arg. @@ -1501,7 +1600,7 @@ class OptionParser (OptionContainer): formatter = self.formatter formatter.store_option_strings(self) result = [] - result.append(formatter.format_heading(_("options"))) + result.append(formatter.format_heading(_("Options"))) formatter.indent() if self.option_list: result.append(OptionContainer.format_option_help(self, formatter)) @@ -1513,6 +1612,9 @@ class OptionParser (OptionContainer): # Drop the last "\n", or the header if no options or option groups: return "".join(result[:-1]) + def format_epilog(self, formatter): + return formatter.format_epilog(self.epilog) + def format_help(self, formatter=None): if formatter is None: formatter = self.formatter @@ -1522,6 +1624,7 @@ class OptionParser (OptionContainer): if self.description: result.append(self.format_description(formatter) + "\n") result.append(self.format_option_help(formatter)) + result.append(self.format_epilog(formatter)) return "".join(result) def print_help(self, file=None): @@ -1555,11 +1658,10 @@ def _match_abbrev(s, wordmap): if len(possibilities) == 1: return possibilities[0] elif not possibilities: - raise BadOptionError(_("no such option: %s") % s) + raise BadOptionError(s) else: # More than one possible completion: ambiguous prefix. - raise BadOptionError(_("ambiguous option: %s (%s?)") - % (s, ", ".join(possibilities))) + raise AmbiguousOptionError(s, possibilities) # Some day, there might be many Option classes. As of Optik 1.3, the diff --git a/Lib/pdb.py b/Lib/pdb.py index 5b7ea99..94f61f7 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -57,9 +57,11 @@ line_prefix = '\n-> ' # Probably a better default class Pdb(bdb.Bdb, cmd.Cmd): - def __init__(self): + def __init__(self, completekey='tab', stdin=None, stdout=None): bdb.Bdb.__init__(self) - cmd.Cmd.__init__(self) + cmd.Cmd.__init__(self, completekey, stdin, stdout) + if stdout: + self.use_rawinput = 0 self.prompt = '(Pdb) ' self.aliases = {} self.mainpyfile = '' @@ -133,7 +135,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): if self._wait_for_mainpyfile: return if self.stop_here(frame): - print '--Call--' + print >>self.stdout, '--Call--' self.interaction(frame, None) def user_line(self, frame): @@ -169,7 +171,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): def user_return(self, frame, return_value): """This function is called when a return trap is set here.""" frame.f_locals['__return__'] = return_value - print '--Return--' + print >>self.stdout, '--Return--' self.interaction(frame, None) def user_exception(self, frame, (exc_type, exc_value, exc_traceback)): @@ -179,7 +181,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): if type(exc_type) == type(''): exc_type_name = exc_type else: exc_type_name = exc_type.__name__ - print exc_type_name + ':', _saferepr(exc_value) + print >>self.stdout, exc_type_name + ':', _saferepr(exc_value) self.interaction(frame, exc_traceback) # General interaction function @@ -202,7 +204,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): if type(t) == type(''): exc_type_name = t else: exc_type_name = t.__name__ - print '***', exc_type_name + ':', v + print >>self.stdout, '***', exc_type_name + ':', v def precmd(self, line): """Handle alias expansion and ';;' separator.""" @@ -280,7 +282,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): try: bnum = int(arg) except: - print "Usage : commands [bnum]\n ...\n end" + print >>self.stdout, "Usage : commands [bnum]\n ...\n end" return self.commands_bnum = bnum self.commands[bnum] = [] @@ -297,10 +299,10 @@ class Pdb(bdb.Bdb, cmd.Cmd): # break [ ([filename:]lineno | function) [, "condition"] ] if not arg: if self.breaks: # There's at least one - print "Num Type Disp Enb Where" + print >>self.stdout, "Num Type Disp Enb Where" for bp in bdb.Breakpoint.bpbynumber: if bp: - bp.bpprint() + bp.bpprint(self.stdout) return # parse arguments; comma has lowest precedence # and cannot occur in filename @@ -319,8 +321,8 @@ class Pdb(bdb.Bdb, cmd.Cmd): filename = arg[:colon].rstrip() f = self.lookupmodule(filename) if not f: - print '*** ', repr(filename), - print 'not found from sys.path' + print >>self.stdout, '*** ', repr(filename), + print >>self.stdout, 'not found from sys.path' return else: filename = f @@ -328,7 +330,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): try: lineno = int(arg) except ValueError, msg: - print '*** Bad lineno:', arg + print >>self.stdout, '*** Bad lineno:', arg return else: # no colon; can be lineno or function @@ -354,11 +356,10 @@ class Pdb(bdb.Bdb, cmd.Cmd): # last thing to try (ok, filename, ln) = self.lineinfo(arg) if not ok: - print '*** The specified object', - print repr(arg), - print 'is not a function' - print ('or was not found ' - 'along sys.path.') + print >>self.stdout, '*** The specified object', + print >>self.stdout, repr(arg), + print >>self.stdout, 'is not a function' + print >>self.stdout, 'or was not found along sys.path.' return funcname = ok # ok contains a function name lineno = int(ln) @@ -369,12 +370,12 @@ class Pdb(bdb.Bdb, cmd.Cmd): if line: # now set the break point err = self.set_break(filename, line, temporary, cond, funcname) - if err: print '***', err + if err: print >>self.stdout, '***', err else: bp = self.get_breaks(filename, line)[-1] - print "Breakpoint %d at %s:%d" % (bp.number, - bp.file, - bp.line) + print >>self.stdout, "Breakpoint %d at %s:%d" % (bp.number, + bp.file, + bp.line) # To be overridden in derived debuggers def defaultFile(self): @@ -430,13 +431,13 @@ class Pdb(bdb.Bdb, cmd.Cmd): """ line = linecache.getline(filename, lineno) if not line: - print 'End of file' + print >>self.stdout, 'End of file' return 0 line = line.strip() # Don't allow setting breakpoint at a blank line if (not line or (line[0] == '#') or (line[:3] == '"""') or line[:3] == "'''"): - print '*** Blank or comment' + print >>self.stdout, '*** Blank or comment' return 0 return lineno @@ -446,11 +447,11 @@ class Pdb(bdb.Bdb, cmd.Cmd): try: i = int(i) except ValueError: - print 'Breakpoint index %r is not a number' % i + print >>self.stdout, 'Breakpoint index %r is not a number' % i continue if not (0 <= i < len(bdb.Breakpoint.bpbynumber)): - print 'No breakpoint numbered', i + print >>self.stdout, 'No breakpoint numbered', i continue bp = bdb.Breakpoint.bpbynumber[i] @@ -463,11 +464,11 @@ class Pdb(bdb.Bdb, cmd.Cmd): try: i = int(i) except ValueError: - print 'Breakpoint index %r is not a number' % i + print >>self.stdout, 'Breakpoint index %r is not a number' % i continue if not (0 <= i < len(bdb.Breakpoint.bpbynumber)): - print 'No breakpoint numbered', i + print >>self.stdout, 'No breakpoint numbered', i continue bp = bdb.Breakpoint.bpbynumber[i] @@ -486,8 +487,8 @@ class Pdb(bdb.Bdb, cmd.Cmd): if bp: bp.cond = cond if not cond: - print 'Breakpoint', bpnum, - print 'is now unconditional.' + print >>self.stdout, 'Breakpoint', bpnum, + print >>self.stdout, 'is now unconditional.' def do_ignore(self,arg): """arg is bp number followed by ignore count.""" @@ -506,10 +507,10 @@ class Pdb(bdb.Bdb, cmd.Cmd): reply = reply + '%d crossings' % count else: reply = reply + '1 crossing' - print reply + ' of breakpoint %d.' % bpnum + print >>self.stdout, reply + ' of breakpoint %d.' % bpnum else: - print 'Will stop next time breakpoint', - print bpnum, 'is reached.' + print >>self.stdout, 'Will stop next time breakpoint', + print >>self.stdout, bpnum, 'is reached.' def do_clear(self, arg): """Three possibilities, tried in this order: @@ -532,22 +533,28 @@ class Pdb(bdb.Bdb, cmd.Cmd): arg = arg[i+1:] try: lineno = int(arg) - except: + except ValueError: err = "Invalid line number (%s)" % arg else: err = self.clear_break(filename, lineno) - if err: print '***', err + if err: print >>self.stdout, '***', err return numberlist = arg.split() for i in numberlist: + try: + i = int(i) + except ValueError: + print >>self.stdout, 'Breakpoint index %r is not a number' % i + continue + if not (0 <= i < len(bdb.Breakpoint.bpbynumber)): - print 'No breakpoint numbered', i + print >>self.stdout, 'No breakpoint numbered', i continue err = self.clear_bpbynumber(i) if err: - print '***', err + print >>self.stdout, '***', err else: - print 'Deleted breakpoint', i + print >>self.stdout, 'Deleted breakpoint', i do_cl = do_clear # 'c' is already an abbreviation for 'continue' def do_where(self, arg): @@ -557,7 +564,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): def do_up(self, arg): if self.curindex == 0: - print '*** Oldest frame' + print >>self.stdout, '*** Oldest frame' else: self.curindex = self.curindex - 1 self.curframe = self.stack[self.curindex][0] @@ -567,7 +574,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): def do_down(self, arg): if self.curindex + 1 == len(self.stack): - print '*** Newest frame' + print >>self.stdout, '*** Newest frame' else: self.curindex = self.curindex + 1 self.curframe = self.stack[self.curindex][0] @@ -597,12 +604,12 @@ class Pdb(bdb.Bdb, cmd.Cmd): def do_jump(self, arg): if self.curindex + 1 != len(self.stack): - print "*** You can only jump within the bottom frame" + print >>self.stdout, "*** You can only jump within the bottom frame" return try: arg = int(arg) except ValueError: - print "*** The 'jump' command requires a line number." + print >>self.stdout, "*** The 'jump' command requires a line number." else: try: # Do the jump, fix up our copy of the stack, and display the @@ -611,7 +618,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): self.stack[self.curindex] = self.stack[self.curindex][0], arg self.print_stack_entry(self.stack[self.curindex]) except ValueError, e: - print '*** Jump failed:', e + print >>self.stdout, '*** Jump failed:', e do_j = do_jump def do_debug(self, arg): @@ -620,9 +627,9 @@ class Pdb(bdb.Bdb, cmd.Cmd): locals = self.curframe.f_locals p = Pdb() p.prompt = "(%s) " % self.prompt.strip() - print "ENTERING RECURSIVE DEBUGGER" + print >>self.stdout, "ENTERING RECURSIVE DEBUGGER" sys.call_tracing(p.run, (arg, globals, locals)) - print "LEAVING RECURSIVE DEBUGGER" + print >>self.stdout, "LEAVING RECURSIVE DEBUGGER" sys.settrace(self.trace_dispatch) self.lastcmd = p.lastcmd @@ -635,7 +642,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): do_exit = do_quit def do_EOF(self, arg): - print + print >>self.stdout self._user_requested_quit = 1 self.set_quit() return 1 @@ -649,16 +656,16 @@ class Pdb(bdb.Bdb, cmd.Cmd): if co.co_flags & 8: n = n+1 for i in range(n): name = co.co_varnames[i] - print name, '=', - if name in dict: print dict[name] - else: print "*** undefined ***" + print >>self.stdout, name, '=', + if name in dict: print >>self.stdout, dict[name] + else: print >>self.stdout, "*** undefined ***" do_a = do_args def do_retval(self, arg): if '__return__' in self.curframe.f_locals: - print self.curframe.f_locals['__return__'] + print >>self.stdout, self.curframe.f_locals['__return__'] else: - print '*** Not yet returned!' + print >>self.stdout, '*** Not yet returned!' do_rv = do_retval def _getval(self, arg): @@ -670,18 +677,18 @@ class Pdb(bdb.Bdb, cmd.Cmd): if isinstance(t, str): exc_type_name = t else: exc_type_name = t.__name__ - print '***', exc_type_name + ':', repr(v) + print >>self.stdout, '***', exc_type_name + ':', repr(v) raise def do_p(self, arg): try: - print repr(self._getval(arg)) + print >>self.stdout, repr(self._getval(arg)) except: pass def do_pp(self, arg): try: - pprint.pprint(self._getval(arg)) + pprint.pprint(self._getval(arg), self.stdout) except: pass @@ -701,7 +708,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): else: first = max(1, int(x) - 5) except: - print '*** Error in argument:', repr(arg) + print >>self.stdout, '*** Error in argument:', repr(arg) return elif self.lineno is None: first = max(1, self.curframe.f_lineno - 5) @@ -715,7 +722,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): for lineno in range(first, last+1): line = linecache.getline(filename, lineno) if not line: - print '[EOF]' + print >>self.stdout, '[EOF]' break else: s = repr(lineno).rjust(3) @@ -724,7 +731,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): else: s = s + ' ' if lineno == self.curframe.f_lineno: s = s + '->' - print s + '\t' + line, + print >>self.stdout, s + '\t' + line, self.lineno = lineno except KeyboardInterrupt: pass @@ -739,23 +746,23 @@ class Pdb(bdb.Bdb, cmd.Cmd): if type(t) == type(''): exc_type_name = t else: exc_type_name = t.__name__ - print '***', exc_type_name + ':', repr(v) + print >>self.stdout, '***', exc_type_name + ':', repr(v) return code = None # Is it a function? try: code = value.func_code except: pass if code: - print 'Function', code.co_name + print >>self.stdout, 'Function', code.co_name return # Is it an instance method? try: code = value.im_func.func_code except: pass if code: - print 'Method', code.co_name + print >>self.stdout, 'Method', code.co_name return # None of the above... - print type(value) + print >>self.stdout, type(value) def do_alias(self, arg): args = arg.split() @@ -763,10 +770,10 @@ class Pdb(bdb.Bdb, cmd.Cmd): keys = self.aliases.keys() keys.sort() for alias in keys: - print "%s = %s" % (alias, self.aliases[alias]) + print >>self.stdout, "%s = %s" % (alias, self.aliases[alias]) return if args[0] in self.aliases and len(args) == 1: - print "%s = %s" % (args[0], self.aliases[args[0]]) + print >>self.stdout, "%s = %s" % (args[0], self.aliases[args[0]]) else: self.aliases[args[0]] = ' '.join(args[1:]) @@ -777,7 +784,8 @@ class Pdb(bdb.Bdb, cmd.Cmd): del self.aliases[args[0]] #list of all the commands making the program resume execution. - commands_resuming = ['do_continue', 'do_step', 'do_next', 'do_return', 'do_quit', 'do_jump'] + commands_resuming = ['do_continue', 'do_step', 'do_next', 'do_return', + 'do_quit', 'do_jump'] # Print a traceback starting at the top stack frame. # The most recently entered frame is printed last; @@ -797,10 +805,11 @@ class Pdb(bdb.Bdb, cmd.Cmd): def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix): frame, lineno = frame_lineno if frame is self.curframe: - print '>', + print >>self.stdout, '>', else: - print ' ', - print self.format_stack_entry(frame_lineno, prompt_prefix) + print >>self.stdout, ' ', + print >>self.stdout, self.format_stack_entry(frame_lineno, + prompt_prefix) # Help methods (derived from pdb.doc) @@ -809,7 +818,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): self.help_h() def help_h(self): - print """h(elp) + print >>self.stdout, """h(elp) Without argument, print the list of available commands. With a command name as argument, print help about that command "help pdb" pipes the full documentation file to the $PAGER @@ -819,7 +828,7 @@ With a command name as argument, print help about that command self.help_w() def help_w(self): - print """w(here) + print >>self.stdout, """w(here) Print a stack trace, with the most recent frame at the bottom. An arrow indicates the "current frame", which determines the context of most commands. 'bt' is an alias for this command.""" @@ -830,7 +839,7 @@ context of most commands. 'bt' is an alias for this command.""" self.help_d() def help_d(self): - print """d(own) + print >>self.stdout, """d(own) Move the current frame one level down in the stack trace (to a newer frame).""" @@ -838,7 +847,7 @@ Move the current frame one level down in the stack trace self.help_u() def help_u(self): - print """u(p) + print >>self.stdout, """u(p) Move the current frame one level up in the stack trace (to an older frame).""" @@ -846,7 +855,7 @@ Move the current frame one level up in the stack trace self.help_b() def help_b(self): - print """b(reak) ([file:]lineno | function) [, condition] + print >>self.stdout, """b(reak) ([file:]lineno | function) [, condition] With a line number argument, set a break there in the current file. With a function name, set a break at first executable line of that function. Without argument, list all breaks. If a second @@ -862,8 +871,8 @@ the .py suffix may be omitted.""" self.help_cl() def help_cl(self): - print "cl(ear) filename:lineno" - print """cl(ear) [bpnumber [bpnumber...]] + print >>self.stdout, "cl(ear) filename:lineno" + print >>self.stdout, """cl(ear) [bpnumber [bpnumber...]] With a space separated list of breakpoint numbers, clear those breakpoints. Without argument, clear all breaks (but first ask confirmation). With a filename:lineno argument, @@ -875,21 +884,21 @@ a linenumber was used instead of either filename:lineno or breakpoint numbers.""" def help_tbreak(self): - print """tbreak same arguments as break, but breakpoint is + print >>self.stdout, """tbreak same arguments as break, but breakpoint is removed when first hit.""" def help_enable(self): - print """enable bpnumber [bpnumber ...] + print >>self.stdout, """enable bpnumber [bpnumber ...] Enables the breakpoints given as a space separated list of bp numbers.""" def help_disable(self): - print """disable bpnumber [bpnumber ...] + print >>self.stdout, """disable bpnumber [bpnumber ...] Disables the breakpoints given as a space separated list of bp numbers.""" def help_ignore(self): - print """ignore bpnumber count + print >>self.stdout, """ignore bpnumber count Sets the ignore count for the given breakpoint number. A breakpoint becomes active when the ignore count is zero. When non-zero, the count is decremented each time the breakpoint is reached and the @@ -897,7 +906,7 @@ breakpoint is not disabled and any associated condition evaluates to true.""" def help_condition(self): - print """condition bpnumber str_condition + print >>self.stdout, """condition bpnumber str_condition str_condition is a string specifying an expression which must evaluate to true before the breakpoint is honored. If str_condition is absent, any existing condition is removed; @@ -907,7 +916,7 @@ i.e., the breakpoint is made unconditional.""" self.help_s() def help_s(self): - print """s(tep) + print >>self.stdout, """s(tep) Execute the current line, stop at the first possible occasion (either in a function that is called or in the current function).""" @@ -915,7 +924,7 @@ Execute the current line, stop at the first possible occasion self.help_n() def help_n(self): - print """n(ext) + print >>self.stdout, """n(ext) Continue execution until the next line in the current function is reached or it returns.""" @@ -923,7 +932,7 @@ is reached or it returns.""" self.help_r() def help_r(self): - print """r(eturn) + print >>self.stdout, """r(eturn) Continue execution until the current function returns.""" def help_continue(self): @@ -933,18 +942,18 @@ Continue execution until the current function returns.""" self.help_c() def help_c(self): - print """c(ont(inue)) + print >>self.stdout, """c(ont(inue)) Continue execution, only stop when a breakpoint is encountered.""" def help_jump(self): self.help_j() def help_j(self): - print """j(ump) lineno + print >>self.stdout, """j(ump) lineno Set the next line that will be executed.""" def help_debug(self): - print """debug code + print >>self.stdout, """debug code Enter a recursive debugger that steps through the code argument (which is an arbitrary expression or statement to be executed in the current environment).""" @@ -953,7 +962,7 @@ in the current environment).""" self.help_l() def help_l(self): - print """l(ist) [first [,last]] + print >>self.stdout, """l(ist) [first [,last]] List source code for the current file. Without arguments, list 11 lines around the current line or continue the previous listing. @@ -965,19 +974,19 @@ if the second argument is less than the first, it is a count.""" self.help_a() def help_a(self): - print """a(rgs) + print >>self.stdout, """a(rgs) Print the arguments of the current function.""" def help_p(self): - print """p expression + print >>self.stdout, """p expression Print the value of the expression.""" def help_pp(self): - print """pp expression + print >>self.stdout, """pp expression Pretty-print the value of the expression.""" def help_exec(self): - print """(!) statement + print >>self.stdout, """(!) statement Execute the (one-line) statement in the context of the current stack frame. The exclamation point can be omitted unless the first word @@ -991,21 +1000,21 @@ command with a 'global' command, e.g.: self.help_q() def help_q(self): - print """q(uit) or exit - Quit from the debugger. + print >>self.stdout, """q(uit) or exit - Quit from the debugger. The program being executed is aborted.""" help_exit = help_q def help_whatis(self): - print """whatis arg + print >>self.stdout, """whatis arg Prints the type of the argument.""" def help_EOF(self): - print """EOF + print >>self.stdout, """EOF Handles the receipt of EOF as a command.""" def help_alias(self): - print """alias [name [command [parameter parameter ...] ]] + print >>self.stdout, """alias [name [command [parameter parameter ...] ]] Creates an alias called 'name' the executes 'command'. The command must *not* be enclosed in quotes. Replaceable parameters are indicated by %1, %2, and so on, while %* is replaced by all the @@ -1029,11 +1038,11 @@ alias ps pi self """ def help_unalias(self): - print """unalias name + print >>self.stdout, """unalias name Deletes the specified alias.""" def help_commands(self): - print """commands [bpnumber] + print >>self.stdout, """commands [bpnumber] (com) ... (com) end (Pdb) diff --git a/Lib/pkg_resources.py b/Lib/pkg_resources.py deleted file mode 100644 index db6cc90..0000000 --- a/Lib/pkg_resources.py +++ /dev/null @@ -1,2377 +0,0 @@ -"""Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -import sys, os, zipimport, time, re, imp, new, pkgutil # XXX -from sets import ImmutableSet -from os import utime, rename, unlink # capture these to bypass sandboxing -from os import open as os_open - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform(); m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - pass # not Mac OS X - return plat - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - def __repr__(self): - return self.__class__.__name__+repr(self.args) - -class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" - -_provider_factories = {} -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - -def _macosx_vers(_cache=[]): - if not _cache: - info = os.popen('/usr/bin/sw_vers').read().splitlines() - for line in info: - key, value = line.split(None, 1) - if key == 'ProductVersion:': - _cache.append(value.strip().split(".")) - break - else: - raise ValueError, "What?!" - return _cache[0] - -def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - from distutils.util import get_platform - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), - _macosx_arch(machine)) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat - -def compatible_platforms(provided,required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided==required: - return True # easy case - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) - return True - return False # egg isn't macosx or legacy darwin - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - -run_main = run_script # backward compatibility - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -try: - from pkgutil import get_importer -except ImportError: - import _pkgutil as pkgutil - get_importer = pkgutil.get_importer -else: - import pkgutil - - -class IMetadataProvider: - - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - - - - - - - - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - - - - - - - - - - - - - - -class WorkingSet(object): - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry,False)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - - def __contains__(self,dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - - - - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - for dist in self: - entries = dist.get_entry_map(group) - if name is None: - for ep in entries.values(): - yield ep - elif name in entries: - yield entries[name] - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - for key in self.entry_keys[item]: - if key not in seen: - seen[key]=1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set. If it's added, any - callbacks registered with the ``subscribe()`` method will be called. - """ - if insert: - dist.insert_on(self.entries, entry) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - - if dist.key in self.by_key: - return # ignore hidden distros - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - - self._added_new(dist) - - - def resolve(self, requirements, env=None, installer=None): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - """ - - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist - to_activate = [] - - while requirements: - req = requirements.pop(0) # process dependencies breadth-first - if req in processed: - # Ignore cyclic or redundant dependencies - continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None: - if env is None: - env = Environment(self.entries) - dist = best[req.key] = env.best_match(req, self, installer) - if dist is None: - raise DistributionNotFound(req) # XXX put more info here - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) - processed[req] = True - - return to_activate # return list of distros to activate - - def find_plugins(self, - plugin_env, full_env=None, installer=None, fallback=True - ): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print "Couldn't load", errors # display errors - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - map(shadow_set.add, self) # put all our entries in shadow_set - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError,v: - error_info[dist] = v # save error info - if fallback: - continue # try the next older version of project - else: - break # give up on this project, keep going - - else: - map(shadow_set.add, resolvees) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - - - - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" - if callback in self.callbacks: - return - self.callbacks.append(callback) - for dist in self: - callback(dist) - - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - - - - - - - - - - -class Environment(object): - """Searchable snapshot of distributions on a search path""" - - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'2.4'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self._cache = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self,project_name): - """Return a newest-to-oldest list of distributions for `project_name` - """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] - - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) - if dist not in dists: - dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) - - - def best_match(self, req, working_set, installer=None): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - dist = working_set.find(req) - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - return self.obtain(req, installer) # try and download/install - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: yield key - - - - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): - self.add(other) - elif isinstance(other,Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -AvailableDistributions = Environment # XXX backward compatibility - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - err = ExtractionError("""Can't extract file(s) to egg cache - -The following error occurred while trying to extract file(s) to the Python egg -cache: - - %s - -The Python egg cache directory is currently set to: - - %s - -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - - - - - - - - - - - - - - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - try: - ensure_directory(target_path) - except: - self.extraction_error() - - self.cached_files[target_path] = 1 - return target_path - - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - # XXX - - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - - - -def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". - """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - app_data = 'Application Data' # XXX this may be locale-specific! - app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname,subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') - - - - - - - - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return StringIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) - - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)) - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self,resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) - - - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) - - def metadata_listdir(self,name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) - return [] - - def run_script(self,script_name,namespace): - script = 'scripts/'+script_name - if not self.has_metadata(script): - raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text,script_filename,'exec') - exec script_code in namespace, namespace - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - return os.path.join(base, *resource_name.split('/')) - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self,module): - NullProvider.__init__(self,module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path!=old: - if path.lower().endswith('.egg'): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - - - - - - - - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self,path): - return os.path.isdir(path) - - def _listdir(self,path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - stream = open(path, 'rb') - try: - return stream.read() - finally: - stream.close() - -register_loader_type(type(None), DefaultProvider) - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] - module_path = None - - def __init__(self): - pass - -empty_provider = EmptyProvider() - - - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - - def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = zipimport._zip_directory_cache[self.loader.archive] - self.zip_pre = self.loader.archive+os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) - ) - - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) - ) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - return os.path.dirname(last) # return the extracted directory name - - zip_stat = self.zipinfo[zip_path] - t,d,size = zip_stat[5], zip_stat[6], zip_stat[3] - date_time = ( - (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd - (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc. - ) - timestamp = time.mktime(date_time) - - try: - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if os.path.isfile(real_path): - stat = os.stat(real_path) - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, don't bother extracting - return real_path - - outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp,timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - stat = os.stat(real_path) - - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, somebody did it just ahead of - # us, so we're done - return real_path - elif os.name=='nt': # Windows, del old file and retry - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - manager.extraction_error() # report a user-friendly error - - return real_path - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self,fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self,fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) - - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) - -register_loader_type(zipimport.zipimporter, ZipProvider) - - - - - - - - - - - - - - - - - - - - - - - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self,path): - self.path = path - - def has_metadata(self,name): - return name=='PKG-INFO' - - def get_metadata(self,name): - if name=='PKG-INFO': - return open(self.path,'rU').read() - raise KeyError("No metadata except PKG-INFO is available") - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - - - - - - - - - - - - - - - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zipinfo = zipimport._zip_directory_cache[importer.archive] - self.zip_pre = importer.archive+os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - - - -_distribution_finders = {} - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - -def find_in_zip(importer, path_item, only=False): - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - return # don't yield nested distros - for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): - subpath = os.path.join(path_item, subitem) - for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): - yield dist - -register_finder(zipimport.zipimporter, find_in_zip) - -def StringIO(*args, **kw): - """Thunk to load the real StringIO on demand""" - global StringIO - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO - return StringIO(*args,**kw) - -def find_nothing(importer, path_item, only=False): - return () -register_finder(object,find_nothing) - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if os.path.isdir(path_item): - if path_item.lower().endswith('.egg'): - # unpacked egg - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') - ) - ) - else: - # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): - lower = entry.lower() - if lower.endswith('.egg-info'): - fullpath = os.path.join(path_item, entry) - if os.path.isdir(fullpath): - # egg-info directory, allow getting metadata - metadata = PathMetadata(path_item, fullpath) - else: - metadata = FileMetadata(fullpath) - yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST - ) - elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): - yield dist - elif not only and lower.endswith('.egg-link'): - for line in file(os.path.join(path_item, entry)): - if not line.strip(): continue - for item in find_distributions(line.rstrip()): - yield item - -register_finder(pkgutil.ImpImporter, find_on_path) - -_namespace_handlers = {} -_namespace_packages = {} - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer,path_entry,moduleName,module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - importer = get_importer(path_item) - if importer is None: - return None - loader = importer.find_module(packageName) - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = new.module(packageName) - module.__path__ = []; _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer,path_item,packageName,module) - if subpath is not None: - path = module.__path__; path.append(subpath) - loader.load_module(packageName); module.__path__ = path - return subpath - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path, parent = sys.path, None - if '.' in packageName: - parent = '.'.join(packageName.split('.')[:-1]) - declare_namespace(parent) - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - imp.release_lock() - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - imp.acquire_lock() - try: - for package in _namespace_packages.get(parent,()): - subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) - finally: - imp.release_lock() - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item)==normalized: - break - else: - # Only return the path if it's not already there - return subpath - -register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -register_namespace_handler(zipimport.zipimporter, file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - -register_namespace_handler(object,null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) - -def _normalize_cached(filename,_cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): - for s in strs.splitlines(): - s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r"(?P[^-]+)" - r"( -(?P[^-]+) (-py(?P[^-]+) (-(?P.+))? )? )?", - re.VERBOSE | re.IGNORECASE -).match - -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c'}.get - -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", whic in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) - -class EntryPoint(object): - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer)) - - - - #@classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1,extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.lstrip(), attrs, extras, dist) - - parse = classmethod(parse) - - - - - - - - - #@classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep - return this - - parse_group = classmethod(parse_group) - - #@classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data,dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - parse_map = classmethod(parse_map) - - - - - - -class Distribution(object): - """Wrap an actual or potential sys.path entry w/metadata""" - def __init__(self, - location=None, metadata=None, project_name=None, version=None, - py_version=PY_MAJOR, platform=None, precedence = EGG_DIST - ): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - #@classmethod - def from_location(cls,location,basename,metadata=None,**kw): - project_name, version, py_version, platform = [None]*4 - basename, ext = os.path.splitext(basename) - if ext.lower() in (".egg",".egg-info"): - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' - ) - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - ) - from_location = classmethod(from_location) - - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), self.precedence, self.key, - -len(self.location or ''), self.location, self.py_version, - self.platform - ) - ) - def __cmp__(self, other): return cmp(self.hashcmp, other) - def __hash__(self): return hash(self.hashcmp) - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - #@property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - key = property(key) - - #@property - def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv - - parsed_version = property(parsed_version) - - #@property - def version(self): - try: - return self._version - except AttributeError: - for line in self._get_metadata('PKG-INFO'): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: - raise ValueError( - "Missing 'Version:' header and/or PKG-INFO file", self - ) - version = property(version) - - - - - #@property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - dm = self.__dep_map = {None: []} - for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): - if extra: extra = safe_extra(extra) - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) - return dm - _dep_map = property(_dep_map) - - def requires(self,extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None,())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata(self,name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def activate(self,path=None): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path - self.insert_on(path) - if path is sys.path: - fixup_namespace_packages(self.location) - for pkg in self._get_metadata('namespace_packages.txt'): - if pkg in sys.modules: declare_namespace(pkg) - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-'+self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self,self.location) - else: - return str(self) - - def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) - - def __getattr__(self,attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError,attr - return getattr(self._provider, attr) - - #@classmethod - def from_filename(cls,filename,metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - from_filename = classmethod(from_filename) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group,{}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - def insert_on(self, path, loc = None): - """Insert self.location in path before its nearest parent directory""" - loc = loc or self.location - if not loc: return - if path is sys.path: - self.check_version_conflict() - best, pos = 0, -1 - for p,item in enumerate(path): - item = _normalize_cached(item) - if loc.startswith(item) and len(item)>best and loc<>item: - best, pos = len(item), p - if pos==-1: - if loc not in path: path.append(loc) - elif loc not in path[:pos+1]: - while loc in path: path.remove(loc) - path.insert(pos,loc) - - - def check_version_conflict(self): - if self.key=='setuptools': - return # ignore the inevitable setuptools self-conflicts :( - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages - ): - continue - - fn = getattr(sys.modules[modname], '__file__', None) - if fn and normalize_path(fn).startswith(loc): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) - return False - return True - - def clone(self,**kw): - """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - - - - #@property - def extras(self): - return [dep for dep in self._dep_map if dep] - extras = property(extras) - - -def issue_warning(*args,**kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) - - - - - - - - - - - - - - - - - - - - - - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): - - items = [] - - while not TERMINATOR(line,p): - if CONTINUE(line,p): - try: - line = lines.next(); p = 0 - except StopIteration: - raise ValueError( - "\\ must not appear on the last nonblank line" - ) - - match = ITEM(line,p) - if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line,p) - if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) - - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any - return line, p, items - - for line in lines: - match = DISTRO(line) - if not match: - raise ValueError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line,p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] - yield Requirement(project_name, specs, extras) - - -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - - - - - - - - - - - - - - - - -class Requirement: - def __init__(self, project_name, specs, extras): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) - self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) - self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - ImmutableSet(self.extras) - ) - self.__hash = hash(self.hashCmp) - - def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) - extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) - - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp - - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key <> self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - for parsed,trans,op,ver in self.index: - action = trans[cmp(item,parsed)] - if action=='F': return False - elif action=='T': return True - elif action=='+': last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last - - - def __hash__(self): - return self.__hash - - def __repr__(self): return "Requirement.parse(%r)" % str(self) - - #@staticmethod - def parse(s): - reqs = list(parse_requirements(s)) - if reqs: - if len(reqs)==1: - return reqs[0] - raise ValueError("Expected only one requirement", s) - raise ValueError("No requirements found", s) - - parse = staticmethod(parse) - -state_machine = { - # =>< - '<' : '--T', - '<=': 'T-T', - '>' : 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - - -def _get_mro(cls): - """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass - return cls.__mro__[1:] - return cls.__mro__ - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - for t in _get_mro(getattr(ob, '__class__', type(ob))): - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - -def _mkstemp(*args,**kw): - from tempfile import mkstemp - old_open = os.open - try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) - finally: - os.open = old_open # and then put it back - - -# Set up global resource manager -_manager = ResourceManager() -def _initialize(g): - for name in dir(_manager): - if not name.startswith('_'): - g[name] = getattr(_manager, name) -_initialize(globals()) - -# Prepare the master working set and make the ``require()`` API available -working_set = WorkingSet() -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except VersionConflict: # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path - -require = working_set.require -iter_entry_points = working_set.iter_entry_points -add_activation_listener = working_set.subscribe -run_script = working_set.run_script -run_main = run_script # backward compatibility -# Activate all distributions already on sys.path, and ensure that -# all distributions added to the working set in the future (e.g. by -# calling ``require()``) will get activated as well. -add_activation_listener(lambda dist: dist.activate()) -working_set.entries=[]; map(working_set.add_entry,sys.path) # match order diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index 7316892..26c797f 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -31,7 +31,7 @@ def read_code(stream): def simplegeneric(func): """Make a trivial single-dispatch generic function""" registry = {} - def wrapper(*args,**kw): + def wrapper(*args, **kw): ob = args[0] try: cls = ob.__class__ @@ -41,18 +41,19 @@ def simplegeneric(func): mro = cls.__mro__ except AttributeError: try: - class cls(cls,object): pass + class cls(cls, object): + pass mro = cls.__mro__[1:] except TypeError: mro = object, # must be an ExtensionClass or some such :( for t in mro: if t in registry: - return registry[t](*args,**kw) + return registry[t](*args, **kw) else: - return func(*args,**kw) + return func(*args, **kw) try: wrapper.__name__ = func.__name__ - except (TypeError,AttributeError): + except (TypeError, AttributeError): pass # Python 2.3 doesn't allow functions to be renamed def register(typ, func=None): @@ -70,8 +71,9 @@ def simplegeneric(func): def walk_packages(path=None, prefix='', onerror=None): """Yield submodule names+loaders recursively, for path or sys.path""" - def seen(p,m={}): - if p in m: return True + def seen(p, m={}): + if p in m: + return True m[p] = True for importer, name, ispkg in iter_modules(path, prefix): @@ -110,7 +112,7 @@ def iter_modules(path=None, prefix=''): #@simplegeneric def iter_importer_modules(importer, prefix=''): - if not hasattr(importer,'iter_modules'): + if not hasattr(importer, 'iter_modules'): return [] return importer.iter_modules(prefix) @@ -336,13 +338,15 @@ def get_importer(path_item): pass else: importer = None - sys.path_importer_cache.setdefault(path_item,importer) + sys.path_importer_cache.setdefault(path_item, importer) - if importer is None: + # The boolean values are used for caching valid and invalid + # file paths for the built-in import machinery + if importer in (None, True, False): try: importer = ImpImporter(path_item) except ImportError: - pass + importer = None return importer @@ -377,7 +381,7 @@ def iter_importers(fullname=""): pkg = '.'.join(fullname.split('.')[:-1]) if pkg not in sys.modules: __import__(pkg) - path = getattr(sys.modules[pkg],'__path__',None) or [] + path = getattr(sys.modules[pkg], '__path__', None) or [] else: for importer in sys.meta_path: yield importer @@ -404,7 +408,7 @@ def get_loader(module_or_name): module_or_name = sys.modules[module_or_name] if isinstance(module_or_name, ModuleType): module = module_or_name - loader = getattr(module,'__loader__',None) + loader = getattr(module, '__loader__', None) if loader is not None: return loader fullname = module.__name__ diff --git a/Lib/plat-mac/bundlebuilder.py b/Lib/plat-mac/bundlebuilder.py index 03d8c81..266b845 100755 --- a/Lib/plat-mac/bundlebuilder.py +++ b/Lib/plat-mac/bundlebuilder.py @@ -145,11 +145,24 @@ class BundleBuilder(Defaults): self.message("Building %s" % repr(self.bundlepath), 1) if os.path.exists(self.bundlepath): shutil.rmtree(self.bundlepath) - os.mkdir(self.bundlepath) - self.preProcess() - self._copyFiles() - self._addMetaFiles() - self.postProcess() + if os.path.exists(self.bundlepath + '~'): + shutil.rmtree(self.bundlepath + '~') + bp = self.bundlepath + + # Create the app bundle in a temporary location and then + # rename the completed bundle. This way the Finder will + # never see an incomplete bundle (where it might pick up + # and cache the wrong meta data) + self.bundlepath = bp + '~' + try: + os.mkdir(self.bundlepath) + self.preProcess() + self._copyFiles() + self._addMetaFiles() + self.postProcess() + os.rename(self.bundlepath, bp) + finally: + self.bundlepath = bp self.message("Done.", 1) def preProcess(self): diff --git a/Lib/plat-mac/pimp.py b/Lib/plat-mac/pimp.py index 21923ec..456427c 100644 --- a/Lib/plat-mac/pimp.py +++ b/Lib/plat-mac/pimp.py @@ -21,7 +21,7 @@ import urlparse import plistlib import distutils.util import distutils.sysconfig -import md5 +import hashlib import tarfile import tempfile import shutil @@ -693,7 +693,7 @@ class PimpPackage: sys.stderr.write("Warning: no MD5Sum for %s\n" % self.fullname()) return 1 data = open(self.archiveFilename, 'rb').read() - checksum = md5.new(data).hexdigest() + checksum = hashlib.md5(data).hexdigest() return checksum == self._dict['MD5Sum'] def unpackPackageOnly(self, output=None): diff --git a/Lib/popen2.py b/Lib/popen2.py index 67ebd26..b966d4c 100644 --- a/Lib/popen2.py +++ b/Lib/popen2.py @@ -72,8 +72,9 @@ class Popen3: # In case the child hasn't been waited on, check if it's done. self.poll(_deadstate=sys.maxint) if self.sts < 0: - # Child is still running, keep us alive until we can wait on it. - _active.append(self) + if _active: + # Child is still running, keep us alive until we can wait on it. + _active.append(self) def _run_child(self, cmd): if isinstance(cmd, basestring): diff --git a/Lib/poplib.py b/Lib/poplib.py index 202c6e0..1cf114a 100644 --- a/Lib/poplib.py +++ b/Lib/poplib.py @@ -295,8 +295,8 @@ class POP3: m = self.timestamp.match(self.welcome) if not m: raise error_proto('-ERR APOP not supported by server') - import md5 - digest = md5.new(m.group(1)+secret).digest() + import hashlib + digest = hashlib.md5(m.group(1)+secret).digest() digest = ''.join(map(lambda x:'%02x'%ord(x), digest)) return self._shortcmd('APOP %s %s' % (user, digest)) diff --git a/Lib/rfc822.py b/Lib/rfc822.py index 871a049..d6d5e47 100644 --- a/Lib/rfc822.py +++ b/Lib/rfc822.py @@ -700,6 +700,7 @@ class AddrlistClass: break elif allowcomments and self.field[self.pos] == '(': slist.append(self.getcomment()) + continue # have already advanced pos from getcomment elif self.field[self.pos] == '\\': quote = 1 else: diff --git a/Lib/rlcompleter.py b/Lib/rlcompleter.py index 6eb77f9..4d11aec 100644 --- a/Lib/rlcompleter.py +++ b/Lib/rlcompleter.py @@ -33,7 +33,6 @@ used, and this module (and the readline module) are silently inactive. """ -import readline import __builtin__ import __main__ @@ -141,4 +140,9 @@ def get_class_members(klass): ret = ret + get_class_members(base) return ret -readline.set_completer(Completer().complete) +try: + import readline +except ImportError: + pass +else: + readline.set_completer(Completer().complete) diff --git a/Lib/setuptools.egg-info/PKG-INFO b/Lib/setuptools.egg-info/PKG-INFO deleted file mode 100644 index ff5c1a1..0000000 --- a/Lib/setuptools.egg-info/PKG-INFO +++ /dev/null @@ -1,89 +0,0 @@ -Metadata-Version: 1.0 -Name: setuptools -Version: 0.7a1dev-r45536 -Summary: Download, build, install, upgrade, and uninstall Python packages -- easily! -Home-page: http://peak.telecommunity.com/DevCenter/setuptools -Author: Phillip J. Eby -Author-email: peak@eby-sarna.com -License: PSF or ZPL -Description: ``setuptools`` is a collection of enhancements to the Python ``distutils`` - (for Python 2.3.5 and up on most platforms; 64-bit platforms require a minimum - of Python 2.4) that allow you to more easily build and distribute Python - packages, especially ones that have dependencies on other packages. - - Packages built and distributed using ``setuptools`` look to the user like - ordinary Python packages based on the ``distutils``. Your users don't need to - install or even know about setuptools in order to use them, and you don't - have to include the entire setuptools package in your distributions. By - including just a single `bootstrap module`_ (an 8K .py file), your package will - automatically download and install ``setuptools`` if the user is building your - package from source and doesn't have a suitable version already installed. - - .. _bootstrap module: http://peak.telecommunity.com/dist/ez_setup.py - - Feature Highlights: - - * Automatically find/download/install/upgrade dependencies at build time using - the `EasyInstall tool `_, - which supports downloading via HTTP, FTP, Subversion, and SourceForge, and - automatically scans web pages linked from PyPI to find download links. (It's - the closest thing to CPAN currently available for Python.) - - * Create `Python Eggs `_ - - a single-file importable distribution format - - * Include data files inside your package directories, where your code can - actually use them. (Python 2.4 distutils also supports this feature, but - setuptools provides the feature for Python 2.3 packages also, and supports - accessing data files in zipped packages too.) - - * Automatically include all packages in your source tree, without listing them - individually in setup.py - - * Automatically include all relevant files in your source distributions, - without needing to create a ``MANIFEST.in`` file, and without having to force - regeneration of the ``MANIFEST`` file when your source tree changes. - - * Automatically generate wrapper scripts or Windows (console and GUI) .exe - files for any number of "main" functions in your project. (Note: this is not - a py2exe replacement; the .exe files rely on the local Python installation.) - - * Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and - still work even when the end-user doesn't have Pyrex installed (as long as - you include the Pyrex-generated C in your source distribution) - - * Command aliases - create project-specific, per-user, or site-wide shortcut - names for commonly used commands and options - - * PyPI upload support - upload your source distributions and eggs to PyPI - - * Deploy your project in "development mode", such that it's available on - ``sys.path``, yet can still be edited directly from its source checkout. - - * Easily extend the distutils with new commands or ``setup()`` arguments, and - distribute/reuse your extensions for multiple projects, without copying code. - - * Create extensible applications and frameworks that automatically discover - extensions, using simple "entry points" declared in a project's setup script. - - In addition to the PyPI downloads, the development version of ``setuptools`` - is available from the `Python SVN sandbox`_, and in-development versions of the - `0.6 branch`_ are available as well. - - .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - - .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev - - -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities diff --git a/Lib/setuptools.egg-info/entry_points.txt b/Lib/setuptools.egg-info/entry_points.txt deleted file mode 100755 index 0afe2cb..0000000 --- a/Lib/setuptools.egg-info/entry_points.txt +++ /dev/null @@ -1,51 +0,0 @@ -[distutils.setup_keywords] -dependency_links = setuptools.dist:assert_string_list -entry_points = setuptools.dist:check_entry_points -extras_require = setuptools.dist:check_extras -package_data = setuptools.dist:check_package_data -install_requires = setuptools.dist:check_requirements -include_package_data = setuptools.dist:assert_bool -exclude_package_data = setuptools.dist:check_package_data -namespace_packages = setuptools.dist:check_nsp -test_suite = setuptools.dist:check_test_suite -eager_resources = setuptools.dist:assert_string_list -zip_safe = setuptools.dist:assert_bool -test_loader = setuptools.dist:check_importable -tests_require = setuptools.dist:check_requirements - -[setuptools.file_finders] -svn_cvs = setuptools.command.sdist:_default_revctrl - -[egg_info.writers] -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -PKG-INFO = setuptools.command.egg_info:write_pkg_info -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -top_level.txt = setuptools.command.egg_info:write_toplevel_names -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -entry_points.txt = setuptools.command.egg_info:write_entries -depends.txt = setuptools.command.egg_info:warn_depends_obsolete - -[console_scripts] -easy_install = setuptools.command.easy_install:main -easy_install-2.5 = setuptools.command.easy_install:main - -[distutils.commands] -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -rotate = setuptools.command.rotate:rotate -develop = setuptools.command.develop:develop -setopt = setuptools.command.setopt:setopt -build_py = setuptools.command.build_py:build_py -saveopts = setuptools.command.saveopts:saveopts -egg_info = setuptools.command.egg_info:egg_info -install_egg_info = setuptools.command.install_egg_info:install_egg_info -alias = setuptools.command.alias:alias -easy_install = setuptools.command.easy_install:easy_install -install_scripts = setuptools.command.install_scripts:install_scripts -bdist_egg = setuptools.command.bdist_egg:bdist_egg -install = setuptools.command.install:install -test = setuptools.command.test:test -install_lib = setuptools.command.install_lib:install_lib -build_ext = setuptools.command.build_ext:build_ext -sdist = setuptools.command.sdist:sdist - diff --git a/Lib/setuptools.egg-info/top_level.txt b/Lib/setuptools.egg-info/top_level.txt deleted file mode 100644 index 4577c6a..0000000 --- a/Lib/setuptools.egg-info/top_level.txt +++ /dev/null @@ -1,3 +0,0 @@ -easy_install -pkg_resources -setuptools diff --git a/Lib/setuptools.egg-info/zip-safe b/Lib/setuptools.egg-info/zip-safe deleted file mode 100644 index e69de29..0000000 diff --git a/Lib/setuptools/__init__.py b/Lib/setuptools/__init__.py deleted file mode 100644 index 3921ce2..0000000 --- a/Lib/setuptools/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" -from setuptools.extension import Extension, Library -from setuptools.dist import Distribution, Feature, _get_unpatched -import distutils.core, setuptools.command -from setuptools.depends import Require -from distutils.core import Command as _Command -from distutils.util import convert_path -import os.path - -__version__ = '0.7a1' -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' -] - -bootstrap_install_from = None - -def find_packages(where='.', exclude=()): - """Return a list all Python packages found within directory 'where' - - 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it - will be converted to the appropriate local path syntax. 'exclude' is a - sequence of package names to exclude; '*' can be used as a wildcard in the - names, such that 'foo.*' will exclude all subpackages of 'foo' (but not - 'foo' itself). - """ - out = [] - stack=[(convert_path(where), '')] - while stack: - where,prefix = stack.pop(0) - for name in os.listdir(where): - fn = os.path.join(where,name) - if (os.path.isdir(fn) and - os.path.isfile(os.path.join(fn,'__init__.py')) - ): - out.append(prefix+name); stack.append((fn,prefix+name+'.')) - for pat in exclude: - from fnmatch import fnmatchcase - out = [item for item in out if not fnmatchcase(item,pat)] - return out - -setup = distutils.core.setup - -_Command = _get_unpatched(_Command) - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - # Add support for keyword arguments - _Command.__init__(self,dist) - for k,v in kw.items(): - setattr(self,k,v) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - for k,v in kw.items(): - setattr(cmd,k,v) # update command with keywords - return cmd - -import distutils.core -distutils.core.Command = Command # we can't patch distutils.cmd, alas diff --git a/Lib/setuptools/archive_util.py b/Lib/setuptools/archive_util.py deleted file mode 100755 index dd9c684..0000000 --- a/Lib/setuptools/archive_util.py +++ /dev/null @@ -1,200 +0,0 @@ -"""Utilities for extracting common archive formats""" - - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - -import zipfile, tarfile, os, shutil -from pkg_resources import ensure_directory -from distutils.errors import DistutilsError - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - -def default_filter(src,dst): - """The default progress/filter callback; returns True for all files""" - return dst - - - - - - - - - - - - - - - - - - - - - - - -def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None -): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - - - - - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % (filename,)) - - paths = {filename:('',extract_dir)} - for base, dirs, files in os.walk(filename): - src,dst = paths[base] - for d in dirs: - paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) - for f in files: - name = src+f - target = os.path.join(dst,f) - target = progress_filter(src+f, target) - if not target: - continue # skip non-files - ensure_directory(target) - f = os.path.join(base,f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - - - - - - - - - - - - - - - - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - z = zipfile.ZipFile(filename) - try: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name: - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - f = open(target,'wb') - try: - f.write(data) - finally: - f.close() - del data - finally: - z.close() - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) - - try: - tarobj.chown = lambda *args: None # don't do any chowning! - for member in tarobj: - if member.isfile() or member.isdir(): - name = member.name - # don't extract absolute paths or ones with .. in them - if not name.startswith('/') and '..' not in name: - dst = os.path.join(extract_dir, *name.split('/')) - dst = progress_filter(name, dst) - if dst: - if dst.endswith(os.sep): - dst = dst[:-1] - tarobj._extract_member(member,dst) # XXX Ugh - return True - finally: - tarobj.close() - - - - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/Lib/setuptools/cli.exe b/Lib/setuptools/cli.exe deleted file mode 100755 index fc83339..0000000 Binary files a/Lib/setuptools/cli.exe and /dev/null differ diff --git a/Lib/setuptools/command/__init__.py b/Lib/setuptools/command/__init__.py deleted file mode 100644 index bff53e7..0000000 --- a/Lib/setuptools/command/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -__all__ = [ - 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', - 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', - 'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts', -] - -import sys -if sys.version>='2.5': - # In Python 2.5 and above, distutils includes its own upload command - __all__.remove('upload') - - -from distutils.command.bdist import bdist - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/Lib/setuptools/command/alias.py b/Lib/setuptools/command/alias.py deleted file mode 100755 index 1df474a..0000000 --- a/Lib/setuptools/command/alias.py +++ /dev/null @@ -1,79 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * -from setuptools.command.setopt import edit_config, option_base, config_file - -def shquote(arg): - """Quote an argument for later parsing by shlex.split()""" - for c in '"', "'", "\\", "#": - if c in arg: return repr(arg) - if arg.split()<>[arg]: - return repr(arg) - return arg - - -class alias(option_base): - """Define a shortcut that invokes one or more commands""" - - description = "define a shortcut to invoke one or more commands" - command_consumes_arguments = True - - user_options = [ - ('remove', 'r', 'remove (unset) the alias'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.args = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.remove and len(self.args)<>1: - raise DistutilsOptionError( - "Must specify exactly one argument (the alias name) when " - "using --remove" - ) - - def run(self): - aliases = self.distribution.get_option_dict('aliases') - - if not self.args: - print "Command Aliases" - print "---------------" - for alias in aliases: - print "setup.py alias", format_alias(alias, aliases) - return - - elif len(self.args)==1: - alias, = self.args - if self.remove: - command = None - elif alias in aliases: - print "setup.py alias", format_alias(alias, aliases) - return - else: - print "No alias definition found for %r" % alias - return - else: - alias = self.args[0] - command = ' '.join(map(shquote,self.args[1:])) - - edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run) - - -def format_alias(name, aliases): - source, command = aliases[name] - if source == config_file('global'): - source = '--global-config ' - elif source == config_file('user'): - source = '--user-config ' - elif source == config_file('local'): - source = '' - else: - source = '--filename=%r' % source - return source+name+' '+command diff --git a/Lib/setuptools/command/bdist_egg.py b/Lib/setuptools/command/bdist_egg.py deleted file mode 100644 index 617d88d..0000000 --- a/Lib/setuptools/command/bdist_egg.py +++ /dev/null @@ -1,449 +0,0 @@ -"""setuptools.command.bdist_egg - -Build .egg distributions""" - -# This module should be kept compatible with Python 2.3 -import sys, os, marshal -from setuptools import Command -from distutils.dir_util import remove_tree, mkpath -from distutils.sysconfig import get_python_version, get_python_lib -from distutils import log -from pkg_resources import get_build_platform, Distribution -from types import CodeType -from setuptools.extension import Library - -def write_stub(resource, pyfile): - f = open(pyfile,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __loader__, __file__", - " import sys, pkg_resources, imp", - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % resource, - " del __bootstrap__, __loader__", - " imp.load_dynamic(__name__,__file__)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - -# stub __init__.py for packages distributed without one -NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)' - - - - - - - - - - -class bdist_egg(Command): - - description = "create an \"egg\" distribution" - - user_options = [ - ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), - ('exclude-source-files', None, - "remove all .py files from the generated egg"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ] - - boolean_options = [ - 'keep-temp', 'skip-build', 'exclude-source-files' - ] - - - - - - - - - - - - - - - - - - def initialize_options (self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = 0 - self.egg_output = None - self.exclude_source_files = None - - - def finalize_options(self): - ei_cmd = self.get_finalized_command("egg_info") - self.egg_info = ei_cmd.egg_info - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'egg') - - if self.plat_name is None: - self.plat_name = get_build_platform() - - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - if self.egg_output is None: - - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() - - self.egg_output = os.path.join(self.dist_dir, basename+'.egg') - - - - - - - - - def do_install_data(self): - # Hack for packages that install data to install's --install-lib - self.get_finalized_command('install').install_lib = self.bdist_dir - - site_packages = os.path.normcase(os.path.realpath(get_python_lib())) - old, self.distribution.data_files = self.distribution.data_files,[] - - for item in old: - if isinstance(item,tuple) and len(item)==2: - if os.path.isabs(item[0]): - realpath = os.path.realpath(item[0]) - normalized = os.path.normcase(realpath) - if normalized==site_packages or normalized.startswith( - site_packages+os.sep - ): - item = realpath[len(site_packages)+1:], item[1] - # XXX else: raise ??? - self.distribution.data_files.append(item) - - try: - log.info("installing package data to %s" % self.bdist_dir) - self.call_command('install_data', force=0, root=None) - finally: - self.distribution.data_files = old - - - def get_outputs(self): - return [self.egg_output] - - - def call_command(self,cmdname,**kw): - """Invoke reinitialized command `cmdname` with keyword args""" - for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname,self.bdist_dir) - kw.setdefault('skip_build',self.skip_build) - kw.setdefault('dry_run', self.dry_run) - cmd = self.reinitialize_command(cmdname, **kw) - self.run_command(cmdname) - return cmd - - - def run(self): - # Generate metadata first - self.run_command("egg_info") - - # We run install_lib before install_data, because some data hacks - # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) - instcmd = self.get_finalized_command('install') - old_root = instcmd.root; instcmd.root = None - cmd = self.call_command('install_lib', warn_dir=0) - instcmd.root = old_root - - all_outputs, ext_outputs = self.get_ext_outputs() - self.stubs = [] - to_compile = [] - for (p,ext_name) in enumerate(ext_outputs): - filename,ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, filename + '.py') - self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) - if not self.dry_run: - write_stub(os.path.basename(ext_name), pyfile) - to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep,'/') - - to_compile.extend(self.make_init_files()) - if to_compile: - cmd.byte_compile(to_compile) - - if self.distribution.data_files: - self.do_install_data() - - # Make the EGG-INFO directory - archive_root = self.bdist_dir - egg_info = os.path.join(archive_root,'EGG-INFO') - self.mkpath(egg_info) - if self.distribution.scripts: - script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) - self.call_command('install_scripts',install_dir=script_dir,no_ep=1) - - native_libs = os.path.join(self.egg_info,"native_libs.txt") - if all_outputs: - log.info("writing %s" % native_libs) - if not self.dry_run: - libs_file = open(native_libs, 'wt') - libs_file.write('\n'.join(all_outputs)) - libs_file.write('\n') - libs_file.close() - elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) - if not self.dry_run: - os.unlink(native_libs) - - for filename in os.listdir(self.egg_info): - path = os.path.join(self.egg_info,filename) - if os.path.isfile(path): - self.copy_file(path,os.path.join(egg_info,filename)) - - write_safety_flag( - os.path.join(archive_root,'EGG-INFO'), self.zip_safe() - ) - - if os.path.exists(os.path.join(self.egg_info,'depends.txt')): - log.warn( - "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - if self.exclude_source_files: - self.zap_pyfiles() - - # Make the archive - make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run) - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution,'dist_files',[]).append( - ('bdist_egg',get_python_version(),self.egg_output)) - - def zap_pyfiles(self): - log.info("Removing .py files from temporary directory") - for base,dirs,files in walk_egg(self.bdist_dir): - for name in files: - if name.endswith('.py'): - path = os.path.join(base,name) - log.debug("Deleting %s", path) - os.unlink(path) - - def zip_safe(self): - safe = getattr(self.distribution,'zip_safe',None) - if safe is not None: - return safe - log.warn("zip_safe flag not set; analyzing archive contents...") - return analyze_egg(self.bdist_dir, self.stubs) - - def make_init_files(self): - """Create missing package __init__ files""" - init_files = [] - for base,dirs,files in walk_egg(self.bdist_dir): - if base==self.bdist_dir: - # don't put an __init__ in the root - continue - for name in files: - if name.endswith('.py'): - if '__init__.py' not in files: - pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.') - if self.distribution.has_contents_for(pkg): - log.warn("Creating missing __init__.py for %s",pkg) - filename = os.path.join(base,'__init__.py') - if not self.dry_run: - f = open(filename,'w'); f.write(NS_PKG_STUB) - f.close() - init_files.append(filename) - break - else: - # not a package, don't traverse to subdirectories - dirs[:] = [] - - return init_files - - def get_ext_outputs(self): - """Get a list of relative paths to C extensions in the output distro""" - - all_outputs = [] - ext_outputs = [] - - paths = {self.bdist_dir:''} - for base, dirs, files in os.walk(self.bdist_dir): - for filename in files: - if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base]+filename) - for filename in dirs: - paths[os.path.join(base,filename)] = paths[base]+filename+'/' - - if self.distribution.has_ext_modules(): - build_cmd = self.get_finalized_command('build_ext') - for ext in build_cmd.extensions: - if isinstance(ext,Library): - continue - fullname = build_cmd.get_ext_fullname(ext.name) - filename = build_cmd.get_ext_filename(fullname) - if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir,filename)): - ext_outputs.append(filename) - - return all_outputs, ext_outputs - - -NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - - - - - - - - - - - -def walk_egg(egg_dir): - """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = os.walk(egg_dir) - base,dirs,files = walker.next() - if 'EGG-INFO' in dirs: - dirs.remove('EGG-INFO') - yield base,dirs,files - for bdf in walker: - yield bdf - -def analyze_egg(egg_dir, stubs): - # check for existing flag in EGG-INFO - for flag,fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)): - return flag - - safe = True - for base, dirs, files in walk_egg(egg_dir): - for name in files: - if name.endswith('.py') or name.endswith('.pyw'): - continue - elif name.endswith('.pyc') or name.endswith('.pyo'): - # always scan, even if we already know we're not safe - safe = scan_module(egg_dir, base, name, stubs) and safe - return safe - -def write_safety_flag(egg_dir, safe): - # Write or remove zip safety flag file(s) - for flag,fn in safety_flags.items(): - fn = os.path.join(egg_dir, fn) - if os.path.exists(fn): - if safe is None or bool(safe)<>flag: - os.unlink(fn) - elif safe is not None and bool(safe)==flag: - open(fn,'w').close() - -safety_flags = { - True: 'zip-safe', - False: 'not-zip-safe', -} - -def scan_module(egg_dir, base, name, stubs): - """Check whether module possibly uses unsafe-for-zipfile stuff""" - - filename = os.path.join(base,name) - if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir)+1:].replace(os.sep,'.') - module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] - f = open(filename,'rb'); f.read(8) # skip magic & date - code = marshal.load(f); f.close() - safe = True - symbols = dict.fromkeys(iter_symbols(code)) - for bad in ['__file__', '__path__']: - if bad in symbols: - log.warn("%s: module references %s", module, bad) - safe = False - if 'inspect' in symbols: - for bad in [ - 'getsource', 'getabsfile', 'getsourcefile', 'getfile' - 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', - 'getinnerframes', 'getouterframes', 'stack', 'trace' - ]: - if bad in symbols: - log.warn("%s: module MAY be using inspect.%s", module, bad) - safe = False - if '__name__' in symbols and '__main__' in symbols and '.' not in module: - if get_python_version()>="2.4": - log.warn("%s: top-level module may be 'python -m' script", module) - safe = False - return safe - -def iter_symbols(code): - """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: yield name - for const in code.co_consts: - if isinstance(const,basestring): - yield const - elif isinstance(const,CodeType): - for name in iter_symbols(const): - yield name - -# Attribute names of options for commands that might need to be convinced to -# install to the egg build directory - -INSTALL_DIRECTORY_ATTRS = [ - 'install_lib', 'install_dir', 'install_data', 'install_base' -] - -def make_zipfile (zip_filename, base_dir, verbose=0, dry_run=0, compress=None): - """Create a zip file from all the files under 'base_dir'. The output - zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" - Python module (if available) or the InfoZIP "zip" utility (if installed - and found on the default search path). If neither tool is available, - raises DistutilsExecError. Returns the name of the output zip file. - """ - import zipfile - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - def visit (z, dirname, names): - for name in names: - path = os.path.normpath(os.path.join(dirname, name)) - if os.path.isfile(path): - p = path[len(base_dir)+1:] - if not dry_run: - z.write(path, p) - log.debug("adding '%s'" % p) - - if compress is None: - compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits - - compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] - if not dry_run: - z = zipfile.ZipFile(zip_filename, "w", compression=compression) - os.path.walk(base_dir, visit, z) - z.close() - else: - os.path.walk(base_dir, visit, None) - - return zip_filename diff --git a/Lib/setuptools/command/bdist_rpm.py b/Lib/setuptools/command/bdist_rpm.py deleted file mode 100755 index 00e07ac..0000000 --- a/Lib/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,37 +0,0 @@ -# This is just a kludge so that bdist_rpm doesn't guess wrong about the -# distribution name and version, if the egg_info command is going to alter -# them, and another kludge to allow you to build old-style non-egg RPMs - -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm - -class bdist_rpm(_bdist_rpm): - - def initialize_options(self): - _bdist_rpm.initialize_options(self) - self.no_egg = None - - def run(self): - self.run_command('egg_info') # ensure distro name is up-to-date - _bdist_rpm.run(self) - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-','_') - spec = _bdist_rpm._make_spec_file(self) - line23 = '%define version '+version - line24 = '%define version '+rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23,line24) - for line in spec - ] - spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) - return spec diff --git a/Lib/setuptools/command/build_ext.py b/Lib/setuptools/command/build_ext.py deleted file mode 100644 index f8551fb..0000000 --- a/Lib/setuptools/command/build_ext.py +++ /dev/null @@ -1,285 +0,0 @@ -from distutils.command.build_ext import build_ext as _du_build_ext -try: - # Attempt to use Pyrex for building extensions, if available - from Pyrex.Distutils.build_ext import build_ext as _build_ext -except ImportError: - _build_ext = _du_build_ext - -import os, sys -from distutils.file_util import copy_file -from setuptools.extension import Library -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler, get_config_var -get_config_var("LDSHARED") # make sure _config_vars is initialized -from distutils.sysconfig import _config_vars -from distutils import log -from distutils.errors import * - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - from dl import RTLD_NOW - have_rtld = True - use_stubs = True - except ImportError: - pass - -def if_dl(s): - if have_rtld: - return s - return '' - - - - - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir,os.path.basename(filename)) - src_filename = os.path.join(self.build_lib,filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - - if _build_ext is not _du_build_ext: - # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 - def swig_sources(self, sources, *otherargs): - # first do any Pyrex processing - sources = _build_ext.swig_sources(self, sources) or sources - # Then do any actual SWIG stuff on the remainder - return _du_build_ext.swig_sources(self, sources, *otherargs) - - - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self,fullname) - ext = self.ext_map[fullname] - if isinstance(ext,Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn,libtype) - elif use_stubs and ext._links_to_dynamic: - d,fn = os.path.split(filename) - return os.path.join(d,'dl-'+fn) - else: - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext,Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - fullname = ext._full_name = self.get_ext_fullname(ext.name) - self.ext_map[fullname] = ext - ltd = ext._links_to_dynamic = \ - self.shlibs and self.links_to_dynamic(ext) or False - ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library) - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib,filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - if sys.platform == "darwin": - tmp = _config_vars.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" - _config_vars['CCSHARED'] = " -dynamiclib" - _config_vars['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _config_vars.clear() - _config_vars.update(tmp) - else: - customize_compiler(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - - - def get_export_symbols(self, ext): - if isinstance(ext,Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self,ext) - - def build_extension(self, ext): - _compiler = self.compiler - try: - if isinstance(ext,Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self,ext) - if ext._needs_stub: - self.write_stub( - self.get_finalized_command('build_py').build_lib, ext - ) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1]+['']) - for libname in ext.libraries: - if pkg+libname in libnames: return True - return False - - def get_outputs(self): - outputs = _build_ext.get_outputs(self) - optimize = self.get_finalized_command('build_py').optimize - for ext in self.extensions: - if ext._needs_stub: - base = os.path.join(self.build_lib, *ext._full_name.split('.')) - outputs.append(base+'.py') - outputs.append(base+'.pyc') - if optimize: - outputs.append(base+'.pyo') - return outputs - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s",ext._full_name, output_dir) - stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py' - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file+" already exists! Please delete.") - if not self.dry_run: - f = open(stub_file,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp"+if_dl(", dl"), - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - if compile: - from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name=='nt': - # Build shared libraries - # - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - #libraries=None, library_dirs=None, runtime_library_dirs=None, - #export_symbols=None, extra_preargs=None, extra_postargs=None, - #build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir,filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) diff --git a/Lib/setuptools/command/build_py.py b/Lib/setuptools/command/build_py.py deleted file mode 100644 index 77a9b23..0000000 --- a/Lib/setuptools/command/build_py.py +++ /dev/null @@ -1,192 +0,0 @@ -import os.path, sys, fnmatch -from distutils.command.build_py import build_py as _build_py -from distutils.util import convert_path -from glob import glob - -class build_py(_build_py): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - def finalize_options(self): - _build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = self.distribution.exclude_package_data or {} - if 'data_files' in self.__dict__: del self.__dict__['data_files'] - - def run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self,attr): - if attr=='data_files': # lazily compute data files - self.data_files = files = self._get_data_files(); return files - return _build_py.__getattr__(self,attr) - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - data = [] - for package in self.packages or (): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append( (package, src_dir, build_dir, filenames) ) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = self.manifest_files.get(package, [])[:] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - lastdir = None - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - self.copy_file(os.path.join(src_dir, filename), target) - - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - if path.endswith('.py'): - continue - d,f = os.path.split(assert_relative(path)) - prev = None - while d and d!=prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - mf.setdefault(src_dirs[d],[]).append(path) - - - def get_data_files(self): pass # kludge 2.4 for lazy computation - - if sys.version<"2.4": # Python 2.4 already has this code - def get_outputs(self, include_bytecode=1): - """Return complete list of files copied to the build directory - - This includes both '.py' files and data files, as well as '.pyc' - and '.pyo' files if 'include_bytecode' is true. (This method is - needed for the 'install_lib' command to do its job properly, and to - generate a correct installation manifest.) - """ - return _build_py.get_outputs(self, include_bytecode) + [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir,filenames in self.data_files - for filename in filenames - ] - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = _build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg==package or pkg.startswith(package+'.'): - break - else: - return init_py - - f = open(init_py,'rU') - if 'declare_namespace' not in f.read(): - from distutils.errors import DistutilsError - raise DistutilsError( - "Namespace package problem: %s is a namespace package, but its\n" - "__init__.py does not call declare_namespace()! Please fix it.\n" - '(See the setuptools manual under "Namespace Packages" for ' - "details.)\n" % (package,) - ) - f.close() - return init_py - - def initialize_options(self): - self.packages_checked={} - _build_py.initialize_options(self) - - - - - - - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - globs = (self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, [])) - bad = [] - for pattern in globs: - bad.extend( - fnmatch.filter( - files, os.path.join(src_dir, convert_path(pattern)) - ) - ) - bad = dict.fromkeys(bad) - return [f for f in files if f not in bad] - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - raise DistutilsSetupError( -"""Error: setup script specifies an absolute path: - - %s - -setup() arguments must *always* be /-separated paths relative to the -setup.py directory, *never* absolute paths. -""" % path - ) diff --git a/Lib/setuptools/command/develop.py b/Lib/setuptools/command/develop.py deleted file mode 100755 index 7ab5b23..0000000 --- a/Lib/setuptools/command/develop.py +++ /dev/null @@ -1,116 +0,0 @@ -from setuptools.command.easy_install import easy_install -from distutils.util import convert_path -from pkg_resources import Distribution, PathMetadata, normalize_path -from distutils import log -from distutils.errors import * -import sys, os - -class develop(easy_install): - """Set up package for development""" - - description = "install package in 'development mode'" - - user_options = easy_install.user_options + [ - ("uninstall", "u", "Uninstall this source package"), - ] - - boolean_options = easy_install.boolean_options + ['uninstall'] - - command_consumes_arguments = False # override base - - def run(self): - if self.uninstall: - self.multi_version = True - self.uninstall_link() - else: - self.install_for_development() - self.warn_deprecated_options() - - def initialize_options(self): - self.uninstall = None - easy_install.initialize_options(self) - - - - - - - - - - - def finalize_options(self): - ei = self.get_finalized_command("egg_info") - if ei.broken_egg_info: - raise DistutilsError( - "Please rename %r to %r before using 'develop'" - % (ei.egg_info, ei.broken_egg_info) - ) - self.args = [ei.egg_name] - easy_install.finalize_options(self) - self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link') - self.egg_base = ei.egg_base - self.egg_path = os.path.abspath(ei.egg_base) - - # Make a distribution for the package's source - self.dist = Distribution( - normalize_path(self.egg_path), - PathMetadata(self.egg_path, os.path.abspath(ei.egg_info)), - project_name = ei.egg_name - ) - - def install_for_development(self): - # Ensure metadata is up-to-date - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - self.install_site_py() # ensure that target dir is site-safe - - # create an .egg-link in the installation dir, pointing to our egg - log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) - if not self.dry_run: - f = open(self.egg_link,"w") - f.write(self.egg_path) - f.close() - - # postprocess the installed distro, fixing up .pth, installing scripts, - # and handling requirements - self.process_distribution(None, self.dist) - - def uninstall_link(self): - if os.path.exists(self.egg_link): - log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) - contents = [line.rstrip() for line in file(self.egg_link)] - if contents != [self.egg_path]: - log.warn("Link points to %s: uninstall aborted", contents) - return - if not self.dry_run: - os.unlink(self.egg_link) - if not self.dry_run: - self.update_pth(self.dist) # remove any .pth link to us - if self.distribution.scripts: - # XXX should also check for entry point scripts! - log.warn("Note: you must uninstall or replace scripts manually!") - - - def install_egg_scripts(self, dist): - if dist is not self.dist: - # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self,dist) - - # create wrapper scripts in the script dir, pointing to dist.scripts - - # new-style... - self.install_wrapper_scripts(dist) - - # ...and old-style - for script_name in self.distribution.scripts or []: - script_path = os.path.abspath(convert_path(script_name)) - script_name = os.path.basename(script_path) - f = open(script_path,'rU') - script_text = f.read() - f.close() - self.install_script(dist, script_name, script_text, script_path) diff --git a/Lib/setuptools/command/easy_install.py b/Lib/setuptools/command/easy_install.py deleted file mode 100755 index 3ddcec4..0000000 --- a/Lib/setuptools/command/easy_install.py +++ /dev/null @@ -1,1555 +0,0 @@ -#!python -"""\ -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ http://peak.telecommunity.com/DevCenter/EasyInstall -""" -import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random -from glob import glob -from setuptools import Command -from setuptools.sandbox import run_setup -from distutils import log, dir_util -from distutils.sysconfig import get_python_lib -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError -from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex, parse_bdist_wininst -from setuptools.package_index import URL_SCHEME -from setuptools.command import bdist_egg, egg_info -from pkg_resources import * -sys_executable = os.path.normpath(sys.executable) - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'main', 'get_exe_prefixes', -] - -def samefile(p1,p2): - if hasattr(os.path,'samefile') and ( - os.path.exists(p1) and os.path.exists(p2) - ): - return os.path.samefile(p1,p2) - return ( - os.path.normpath(os.path.normcase(p1)) == - os.path.normpath(os.path.normcase(p2)) - ) - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("delete-conflicting", "D", "no longer needed; don't use this"), - ("ignore-conflicts-at-my-risk", None, - "no longer needed; don't use this"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=','S',"list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable', - 'no-deps', - ] - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - - def initialize_options(self): - self.zip_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = None - self.delete_conflicting = None - self.ignore_conflicts_at_my_risk = None - self.site_dirs = None - self.installed_projects = {} - self.sitepy_installed = False - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - for filename in blockers: - if os.path.exists(filename) or os.path.islink(filename): - log.info("Deleting %s", filename) - if not self.dry_run: - if os.path.isdir(filename) and not os.path.islink(filename): - rmtree(filename) - else: - os.unlink(filename) - - def finalize_options(self): - self._expand('install_dir','script_dir','build_directory','site_dirs') - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options('install_lib', - ('install_dir','install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options('install_scripts', - ('install_dir', 'script_dir') - ) - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d+" (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - self.check_site_dir() - self.index_url = self.index_url or "http://www.python.org/pypi" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path = self.shadow_path, hosts=hosts - ) - self.local_index = Environment(self.shadow_path) - - if self.find_links is not None: - if isinstance(self.find_links, basestring): - self.find_links = self.find_links.split() - else: - self.find_links = [] - - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize','optimize')) - if not isinstance(self.optimize,int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): raise ValueError - except ValueError: - raise DistutilsOptionError("--optimize must be 0, 1, or 2") - - if self.delete_conflicting and self.ignore_conflicts_at_my_risk: - raise DistutilsOptionError( - "Can't use both --delete-conflicting and " - "--ignore-conflicts-at-my-risk at the same time" - ) - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - def run(self): - if self.verbose<>self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in xrange(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except: - pid = random.randint(0,sys.maxint) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - if self.delete_conflicting or self.ignore_conflicts_at_my_risk: - log.warn( - "Note: The -D, --delete-conflicting and" - " --ignore-conflicts-at-my-risk no longer have any purpose" - " and should not be used." - ) - - def check_site_dir(self): - """Verify that self.install_dir is .pth-capable dir, if needed""" - - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir,'easy-install.pth') - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname()+'.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: os.unlink(testfile) - open(testfile,'w').close() - os.unlink(testfile) - except (OSError,IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir - raise DistutilsError(self.no_default_version_msg()) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file) - else: - self.pth_file = None - - PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep) - if instdir not in map(normalize_path, filter(None,PYTHONPATH)): - # only PYTHONPATH dirs need a site.py, so pretend it's there - self.sitepy_installed = True - - self.install_dir = instdir - - - def cant_write_to_target(self): - msg = """can't create or remove files in install directory - -The following error occurred while trying to add or remove files in the -installation directory: - - %s - -The installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s -""" % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += """ -This directory does not currently exist. Please create it and try again, or -choose a different installation directory (using the -d or --install-dir -option). -""" - else: - msg += """ -Perhaps your account does not have write access to this directory? If the -installation directory is a system-owned directory, you may need to sign in -as the administrator or "root" account. If you do not have administrative -access to this machine, you may wish to choose a different installation -directory, preferably one that is listed in your PYTHONPATH environment -variable. - -For information on other options, you may wish to consult the -documentation at: - - http://peak.telecommunity.com/EasyInstall.html - -Please make the appropriate changes for your system and try again. -""" - raise DistutilsError(msg) - - - - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname()+".pth" - ok_file = pth_file+'.ok' - ok_exists = os.path.exists(ok_file) - try: - if ok_exists: os.unlink(ok_file) - f = open(pth_file,'w') - except (OSError,IOError): - self.cant_write_to_target() - else: - try: - f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,)) - f.close(); f=None - executable = sys.executable - if os.name=='nt': - dirname,basename = os.path.split(executable) - alt = os.path.join(dirname,'pythonw.exe') - if basename.lower()=='python.exe' and os.path.exists(alt): - # use pythonw.exe to avoid opening a console window - executable = alt - if ' ' in executable: executable='"%s"' % executable - from distutils.spawn import spawn - spawn([executable,'-E','-c','pass'],0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: f.close() - if os.path.exists(ok_file): os.unlink(ok_file) - if os.path.exists(pth_file): os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - - self.install_wrapper_scripts(dist) - if self.exclude_scripts or not dist.metadata_isdir('scripts'): - return - - for script_name in dist.metadata_listdir('scripts'): - self.install_script( - dist, script_name, - dist.get_metadata('scripts/'+script_name).replace('\r','\n') - ) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base,filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self,spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - - - def easy_install(self, spec, deps=False): - tmpdir = tempfile.mkdtemp(prefix="easy_install-") - download = None - self.install_site_py() - - try: - if not isinstance(spec,Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, not self.always_copy - ) - - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg+=" (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence==DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - finally: - if os.path.exists(tmpdir): - rmtree(tmpdir) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - - log.info("Processing %s", os.path.basename(download)) - - if install_needed or self.always_copy: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.check_conflicts(self.egg_distribution(download))] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - - - - - - - - - - - - - - - - - - - - - def process_distribution(self, requirement, dist, deps=True, *info): - self.update_pth(dist) - self.package_index.add(dist) - self.local_index.add(dist) - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.warn(self.installation_report(requirement, dist, *info)) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = requirement or distreq - requirement = Requirement( - distreq.project_name, distreq.specs, requirement.extras - ) - if dist.has_metadata('dependency_links.txt'): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound, e: - raise DistutilsError( - "Could not find required distribution %s" % e.args - ) - except VersionConflict, e: - raise DistutilsError( - "Installed distribution %s conflicts with requirement %s" - % e.args - ) - if self.always_copy: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return False - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - log.warn( - "%r already exists in %s; build directory %s will not be kept", - spec.key, self.build_directory, setup_base - ) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename)==setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents)==1: - dist_filename = os.path.join(setup_base,contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst); shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if not self.exclude_scripts: - for args in get_script_args(dist): - self.write_script(*args) - - - - - - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - - if dev_path: - script_text = get_script_header(script_text) + ( - "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n" - "__requires__ = %(spec)r\n" - "from pkg_resources import require; require(%(spec)r)\n" - "del require\n" - "__file__ = %(dev_path)r\n" - "execfile(__file__)\n" - ) % locals() - else: - script_text = get_script_header(script_text) + ( - "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n" - "__requires__ = %(spec)r\n" - "import pkg_resources\n" - "pkg_resources.run_script(%(spec)r, %(script_name)r)\n" - ) % locals() - - self.write_script(script_name, script_text) - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir,x) for x in blockers]) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - try: - os.chmod(target,0755) - except (AttributeError, os.error): - pass - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - if dist_filename.lower().endswith('.egg'): - return [self.install_egg(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.exe'): - return [self.install_exe(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None - ): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % dist_filename - ) - if len(setups)>1: - raise DistutilsError( - "Multiple setup scripts in %s" % dist_filename - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.warn(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path,metadata=metadata) - - def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir,os.path.basename(egg_path)) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - self.check_conflicts(dist) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute(os.unlink,(destination,),"Removing "+destination) - uncache_zipdir(destination) - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f,m = self.unpack_and_compile, "Extracting" - elif egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copy2, "Copying" - - self.execute(f, (egg_path, destination), - (m+" %s to %s") % - (os.path.basename(egg_path),os.path.dirname(destination))) - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution(None, - project_name=cfg.get('metadata','name'), - version=cfg.get('metadata','version'), platform="win32" - ) - - # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg') - egg_tmp = egg_path+'.tmp' - egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf,'w') - f.write('Metadata-Version: 1.0\n') - for k,v in cfg.items('metadata'): - if k<>'target_version': - f.write('%s: %s\n' % (k.replace('_','-').title(), v)) - f.close() - script_dir = os.path.join(egg_info,'scripts') - self.delete_blockers( # delete entry-point scripts to avoid duping - [os.path.join(script_dir,args[0]) for args in get_script_args(dist)] - ) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - def exe_to_egg(self, dist_filename, egg_tmp): - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - - def process(src,dst): - for old,new in prefixes: - if src.startswith(old): - src = new+src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old!='SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource, parts[-1] = parts[-1], parts[-1][:-1] - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile); stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level','native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt') - if not os.path.exists(txt): - open(txt,'w').write('\n'.join(locals()[name])+'\n') - - def check_conflicts(self, dist): - """Verify that there are no conflicting "old-style" packages""" - - return dist # XXX temporarily disable until new strategy is stable - from imp import find_module, get_suffixes - from glob import glob - - blockers = [] - names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr - - exts = {'.pyc':1, '.pyo':1} # get_suffixes() might leave one out - for ext,mode,typ in get_suffixes(): - exts[ext] = 1 - - for path,files in expand_paths([self.install_dir]+self.all_site_dirs): - for filename in files: - base,ext = os.path.splitext(filename) - if base in names: - if not ext: - # no extension, check for package - try: - f, filename, descr = find_module(base, [path]) - except ImportError: - continue - else: - if f: f.close() - if filename not in blockers: - blockers.append(filename) - elif ext in exts and base!='site': # XXX ugh - blockers.append(os.path.join(path,filename)) - if blockers: - self.found_conflicts(dist, blockers) - - return dist - - def found_conflicts(self, dist, blockers): - if self.delete_conflicting: - log.warn("Attempting to delete conflicting packages:") - return self.delete_blockers(blockers) - - msg = """\ -------------------------------------------------------------------------- -CONFLICT WARNING: - -The following modules or packages have the same names as modules or -packages being installed, and will be *before* the installed packages in -Python's search path. You MUST remove all of the relevant files and -directories before you will be able to use the package(s) you are -installing: - - %s - -""" % '\n '.join(blockers) - - if self.ignore_conflicts_at_my_risk: - msg += """\ -(Note: you can run EasyInstall on '%s' with the ---delete-conflicting option to attempt deletion of the above files -and/or directories.) -""" % dist.project_name - else: - msg += """\ -Note: you can attempt this installation again with EasyInstall, and use -either the --delete-conflicting (-D) option or the ---ignore-conflicts-at-my-risk option, to either delete the above files -and directories, or to ignore the conflicts, respectively. Note that if -you ignore the conflicts, the installed package(s) may not work. -""" - msg += """\ -------------------------------------------------------------------------- -""" - sys.stderr.write(msg) - sys.stderr.flush() - if not self.ignore_conflicts_at_my_risk: - raise DistutilsError("Installation aborted due to conflicts") - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += """ - -Because this distribution was installed --multi-version or --install-dir, -before you can import modules from this package in an application, you -will need to 'import pkg_resources' and then use a 'require()' call -similar to one of these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher -""" - if self.install_dir not in map(normalize_path,sys.path): - msg += """ - -Note also that the installation directory must be on sys.path at runtime for -this to work. (e.g. by being the application's script directory, by being on -PYTHONPATH, or by being added to sys.path by your code.) -""" - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return """\nExtracted editable version of %(spec)s to %(dirname)s - -If it uses setuptools in its setup script, you can activate it in -"development" mode by going to that directory and running:: - - %(python)s setup.py develop - -See the setuptools documentation for the "develop" command for more info. -""" % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose>2: - v = 'v' * (self.verbose - 1) - args.insert(0,'-'+v) - elif self.verbose<2: - args.insert(0,'-q') - if self.dry_run: - args.insert(0,'-n') - log.info( - "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit, v: - raise DistutilsError("Setup script exited with %s" % (v.args[0],)) - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - args.append(dist_dir) - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def update_pth(self,dist): - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if self.multi_version or d.location != dist.location: - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if not self.dry_run: - - self.pth_file.save() - - if dist.key=='setuptools': - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir,'setuptools.pth') - if os.path.islink(filename): os.unlink(filename) - f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location)+'\n') - f.close() - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = [] - - def pf(src,dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - self.unpack_progress(src,dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - - - def byte_compile(self, to_compile): - from distutils.util import byte_compile - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - - - - - - - - - - - - - - def no_default_version_msg(self): - return """bad install directory or PYTHONPATH - -You are attempting to install a package to a directory that is not -on PYTHONPATH and which Python does not read ".pth" files from. The -installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s - -and your PYTHONPATH environment variable currently contains: - - %r - -Here are some of your options for correcting the problem: - -* You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - -* You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - -* You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - http://peak.telecommunity.com/EasyInstall.html#custom-installation-locations - -Please make the appropriate changes for your system and try again.""" % ( - self.install_dir, os.environ.get('PYTHONPATH','') - ) - - - - - - - - - - - def install_site_py(self): - """Make sure there's a site.py in the target dir, if needed""" - - if self.sitepy_installed: - return # already did it, or don't need to - - sitepy = os.path.join(self.install_dir, "site.py") - source = resource_string("setuptools", "site-patch.py") - current = "" - - if os.path.exists(sitepy): - log.debug("Checking existing site.py in %s", self.install_dir) - current = open(sitepy,'rb').read() - if not current.startswith('def __boot():'): - raise DistutilsError( - "%s is not a setuptools-generated site.py; please" - " remove it." % sitepy - ) - - if current != source: - log.info("Creating %s", sitepy) - if not self.dry_run: - ensure_directory(sitepy) - f = open(sitepy,'wb') - f.write(source) - f.close() - self.byte_compile([sitepy]) - - self.sitepy_installed = True - - - - - - - - - - - - - INSTALL_SCHEMES = dict( - posix = dict( - install_dir = '$base/lib/python$py_version_short/site-packages', - script_dir = '$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir = '$base/Lib/site-packages', - script_dir = '$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME) - for attr,val in scheme.items(): - if getattr(self,attr,None) is None: - setattr(self,attr,val) - - from distutils.util import subst_vars - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - - - - - - - - - -def get_site_dirs(): - # return a list of 'site' dirs - sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep)) - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) - else: - sitedirs.extend( - [prefix, os.path.join(prefix, "lib", "site-packages")] - ) - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) - for plat_specific in (0,1): - site_lib = get_python_lib(plat_specific) - if site_lib not in sitedirs: sitedirs.append(site_lib) - - sitedirs = map(normalize_path, sitedirs) - return sitedirs - - -def expand_paths(inputs): - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth','setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname,name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if not line.startswith("import"): - line = normalize_path(line.rstrip()) - if line not in seen: - seen[line] = 1 - if not os.path.isdir(line): - continue - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a ConfigParser.RawConfigParser, or None - """ - f = open(dist_filename,'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended-12) - - import struct, StringIO, ConfigParser - tag, cfglen, bmlen = struct.unpack("egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/') - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts)==3 and parts[2]=='PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts)<>2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0] in ('PURELIB','PLATLIB'): - for pth in yield_lines(z.read(name)): - pth = pth.strip().replace('\\','/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0],pth)), '')) - finally: - z.close() - - prefixes.sort(); prefixes.reverse() - return prefixes - - -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename): - self.filename = filename - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load(); Environment.__init__(self, [], None, None) - for path in yield_lines(self.paths): - map(self.add, find_distributions(path, True)) - - def _load(self): - self.paths = [] - saw_import = False - seen = {} - if os.path.isfile(self.filename): - for line in open(self.filename,'rt'): - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir,path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - data = '\n'.join(map(self.make_relative,self.paths)) - if data: - log.debug("Saving %s", self.filename) - data = ( - "import sys; sys.__plen = len(sys.path)\n" - "%s\n" - "import sys; new=sys.path[sys.__plen:];" - " del sys.path[sys.__plen:];" - " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;" - " sys.__egginsert = p+len(new)\n" - ) % data - - if os.path.islink(self.filename): - os.unlink(self.filename) - f = open(self.filename,'wb') - f.write(data); f.close() - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - def add(self,dist): - """Add `dist` to the distribution map""" - if dist.location not in self.paths: - self.paths.append(dist.location); self.dirty = True - Environment.add(self,dist) - - def remove(self,dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location); self.dirty = True - Environment.remove(self,dist) - - - def make_relative(self,path): - if normalize_path(os.path.dirname(path))==self.basedir: - return os.path.basename(path) - return path - - -def get_script_header(script_text, executable=sys_executable): - """Create a #! line, getting options (if any) from script_text""" - from distutils.command.build_scripts import first_line_re - first, rest = (script_text+'\n').split('\n',1) - match = first_line_re.match(first) - options = '' - if match: - script_text = rest - options = match.group(1) or '' - if options: - options = ' '+options - return "#!%(executable)s%(options)s\n" % locals() - - -def auto_chmod(func, arg, exc): - if func is os.remove and os.name=='nt': - os.chmod(arg, stat.S_IWRITE) - return func(arg) - exc = sys.exc_info() - raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg))) - - -def uncache_zipdir(path): - """Ensure that the zip directory cache doesn't have stale info for path""" - from zipimport import _zip_directory_cache as zdc - if path in zdc: - del zdc[path] - else: - path = normalize_path(path) - for p in zdc: - if normalize_path(p)==path: - del zdc[p] - return - - -def get_script_args(dist, executable=sys_executable): - """Yield write_script() argument tuples for a distribution's entrypoints""" - spec = str(dist.as_requirement()) - header = get_script_header("", executable) - for group in 'console_scripts', 'gui_scripts': - for name,ep in dist.get_entry_map(group).items(): - script_text = ( - "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n" - "__requires__ = %(spec)r\n" - "import sys\n" - "from pkg_resources import load_entry_point\n" - "\n" - "sys.exit(\n" - " load_entry_point(%(spec)r, %(group)r, %(name)r)()\n" - ")\n" - ) % locals() - if sys.platform=='win32': - # On Windows, add a .py extension and an .exe launcher - if group=='gui_scripts': - ext, launcher = '-script.pyw', 'gui.exe' - old = ['.pyw'] - new_header = re.sub('(?i)python.exe','pythonw.exe',header) - else: - ext, launcher = '-script.py', 'cli.exe' - old = ['.py','.pyc','.pyo'] - new_header = re.sub('(?i)pythonw.exe','pythonw.exe',header) - - if os.path.exists(new_header[2:-1]): - hdr = new_header - else: - hdr = header - yield (name+ext, hdr+script_text, 't', [name+x for x in old]) - yield ( - name+'.exe', resource_string('setuptools', launcher), - 'b' # write in binary mode - ) - else: - # On other platforms, we assume the right thing to do is to - # just write the stub with no extension. - yield (name, header+script_text) - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error, err: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error, err: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - - - - - - - -def main(argv=None, **kw): - from setuptools import setup - from setuptools.dist import Distribution - import distutils.core - - USAGE = """\ -usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help -""" - - def gen_usage (script_name): - script = os.path.basename(script_name) - return USAGE % vars() - - def with_ei_usage(f): - old_gen_usage = distutils.core.gen_usage - try: - distutils.core.gen_usage = gen_usage - return f() - finally: - distutils.core.gen_usage = old_gen_usage - - class DistributionWithoutHelpCommands(Distribution): - def _show_help(self,*args,**kw): - with_ei_usage(lambda: Distribution._show_help(self,*args,**kw)) - - if argv is None: - argv = sys.argv[1:] - - with_ei_usage(lambda: - setup( - script_args = ['-q','easy_install', '-v']+argv, - script_name = sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw - ) - ) diff --git a/Lib/setuptools/command/egg_info.py b/Lib/setuptools/command/egg_info.py deleted file mode 100755 index b68fb39..0000000 --- a/Lib/setuptools/command/egg_info.py +++ /dev/null @@ -1,365 +0,0 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -# This module should be kept compatible with Python 2.3 -import os, re -from setuptools import Command -from distutils.errors import * -from distutils import log -from setuptools.command.sdist import sdist -from distutils import file_util -from distutils.util import convert_path -from distutils.filelist import FileList -from pkg_resources import parse_requirements, safe_name, parse_version, \ - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename -from sdist import walk_revctrl - -class egg_info(Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ] - - boolean_options = ['tag-date','tag-svn-revision'] - - def initialize_options (self): - self.egg_name = None - self.egg_version = None - self.egg_base = None - self.egg_info = None - self.tag_build = None - self.tag_svn_revision = 0 - self.tag_date = 0 - self.broken_egg_info = False - - def finalize_options (self): - self.egg_name = safe_name(self.distribution.get_name()) - self.egg_version = self.tagged_version() - - try: - list( - parse_requirements('%s==%s' % (self.egg_name,self.egg_version)) - ) - except ValueError: - raise DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name,self.egg_version) - ) - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('',os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name)+'.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: self.check_broken_egg_info() - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - if pd is not None and pd.key==self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) - self.distribution._patched_dist = None - - - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn( - "%s not set in setup(), but %s exists", what, filename - ) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - - - - def run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in iter_entry_points('egg_info.writers'): - writer = ep.load(installer=installer) - writer(self, ep.name, os.path.join(self.egg_info,ep.name)) - self.find_sources() - - def tagged_version(self): - version = self.distribution.get_version() - if self.tag_build: - version+=self.tag_build - if self.tag_svn_revision and ( - os.path.exists('.svn') or os.path.exists('PKG-INFO') - ): version += '-r%s' % self.get_svn_revision() - if self.tag_date: - import time; version += time.strftime("-%Y%m%d") - return safe_version(version) - - def get_svn_revision(self): - revision = 0 - urlre = re.compile('url="([^"]+)"') - revre = re.compile('committed-rev="(\d+)"') - for base,dirs,files in os.walk(os.curdir): - if '.svn' not in dirs: - dirs[:] = [] - continue # no sense walking uncontrolled subdirs - dirs.remove('.svn') - f = open(os.path.join(base,'.svn','entries')) - data = f.read() - f.close() - dirurl = urlre.search(data).group(1) # get repository URL - if base==os.curdir: - base_url = dirurl+'/' # save the root url - elif not dirurl.startswith(base_url): - dirs[:] = [] - continue # not part of the same svn tree, skip it - for match in revre.finditer(data): - revision = max(revision, int(match.group(1))) - return str(revision or get_pkg_info_revision()) - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info,"SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - def check_broken_egg_info(self): - bei = self.egg_name+'.egg-info' - if self.egg_base != os.curdir: - bei = os.path.join(self.egg_base, bei) - if os.path.exists(bei): - log.warn( - "-"*78+'\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n'+'-'*78, - bei, self.egg_info - ) - self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now - -class FileList(FileList): - """File list that accepts only existing, platform-independent paths""" - - def append(self, item): - path = convert_path(item) - if os.path.exists(path): - self.files.append(path) - - - - - - - - - - - -class manifest_maker(sdist): - - template = "MANIFEST.in" - - def initialize_options (self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList() - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.filelist.findall() - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def write_manifest (self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - files = self.filelist.files - if os.sep!='/': - files = [f.replace(os.sep,'/') for f in files] - self.execute(file_util.write_file, (self.manifest, files), - "writing manifest file '%s'" % self.manifest) - - - - - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) - - def prune_file_list (self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1) - - - - - - - - - - - - - - - - - - - - - - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution,'zip_safe',None) - import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe) - -def warn_depends_obsolete(cmd, basename, filename): - if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - -def write_requirements(cmd, basename, filename): - dist = cmd.distribution - data = ['\n'.join(yield_lines(dist.install_requires or ()))] - for extra,reqs in (dist.extras_require or {}).items(): - data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) - cmd.write_or_delete_file("requirements", filename, ''.join(data)) - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [k.split('.',1)[0] - for k in cmd.distribution.iter_distribution_names() - ] - ) - cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n') - - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value)+'\n' - cmd.write_or_delete_file(argname, filename, value, force) - -def write_entries(cmd, basename, filename): - ep = cmd.distribution.entry_points - - if isinstance(ep,basestring) or ep is None: - data = ep - elif ep is not None: - data = [] - for section, contents in ep.items(): - if not isinstance(contents,basestring): - contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(map(str,contents.values())) - data.append('[%s]\n%s\n\n' % (section,contents)) - data = ''.join(data) - - cmd.write_or_delete_file('entry points', filename, data, True) - -def get_pkg_info_revision(): - # See if we can get a -r### off of PKG-INFO, in case this is an sdist of - # a subversion revision - # - if os.path.exists('PKG-INFO'): - f = open('PKG-INFO','rU') - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - return 0 diff --git a/Lib/setuptools/command/install.py b/Lib/setuptools/command/install.py deleted file mode 100644 index bfb9af5..0000000 --- a/Lib/setuptools/command/install.py +++ /dev/null @@ -1,101 +0,0 @@ -import setuptools, sys -from distutils.command.install import install as _install -from distutils.errors import DistutilsArgError - -class install(_install): - """Use easy_install to install the package, w/dependencies""" - - user_options = _install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = _install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - sub_commands = [ - cmd for cmd in _install.sub_commands if cmd[0] not in _nc - ] + new_commands - - def initialize_options(self): - _install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - self.no_compile = None # make DISTUTILS_DEBUG work right! - - def finalize_options(self): - _install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - # We always ignore extra_path, because we install as .egg or .egg-info - self.path_file = None - self.extra_dirs = '' - - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return _install.run(self) - - # Attempt to detect whether we were called from setup() or by another - # command. If we were called by setup(), our caller will be the - # 'run_command' method in 'distutils.dist', and *its* caller will be - # the 'run_commands' method. If we were called any other way, our - # immediate caller *might* be 'run_command', but it won't have been - # called by 'run_commands'. This is slightly kludgy, but seems to - # work. - # - caller = sys._getframe(2) - caller_module = caller.f_globals.get('__name__','') - caller_name = caller.f_code.co_name - - if caller_module != 'distutils.dist' or caller_name!='run_commands': - # We weren't called from the command line or setup(), so we - # should run in backward-compatibility mode to support bdist_* - # commands. - _install.run(self) - else: - self.do_egg_install() - - - - - - - - - - - - - def do_egg_install(self): - - from setuptools.command.easy_install import easy_install - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run() - setuptools.bootstrap_install_from = None diff --git a/Lib/setuptools/command/install_egg_info.py b/Lib/setuptools/command/install_egg_info.py deleted file mode 100755 index 193e91a..0000000 --- a/Lib/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,81 +0,0 @@ -from setuptools import Command -from setuptools.archive_util import unpack_archive -from distutils import log, dir_util -import os, shutil, pkg_resources - -class install_egg_info(Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name()+'.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - self.run_command('egg_info') - target = self.target - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink,(self.target,),"Removing "+self.target) - if not self.dry_run: - pkg_resources.ensure_directory(self.target) - self.execute(self.copytree, (), - "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src,dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/','CVS/': - if src.startswith(skip) or '/'+skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - unpack_archive(self.source, self.target, skimmer) - - def install_namespaces(self): - nsp = (self.distribution.namespace_packages or [])[:] - if not nsp: return - nsp.sort() # set up shorter names first - filename,ext = os.path.splitext(self.target) - filename += '-nspkg.pth'; self.outputs.append(filename) - log.info("Installing %s",filename) - if not self.dry_run: - f = open(filename,'wb') - for pkg in nsp: - pth = tuple(pkg.split('.')) - f.write( - "import sys,new,os; " - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " - "*%(pth)r); " - "ie = os.path.exists(os.path.join(p,'__init__.py')); " - "m = not ie and " - "sys.modules.setdefault(%(pkg)r,new.module(%(pkg)r)); " - "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " - "(p not in mp) and mp.append(p)\n" - % locals() - ) - f.close() diff --git a/Lib/setuptools/command/install_lib.py b/Lib/setuptools/command/install_lib.py deleted file mode 100644 index 96c8dfe..0000000 --- a/Lib/setuptools/command/install_lib.py +++ /dev/null @@ -1,76 +0,0 @@ -from distutils.command.install_lib import install_lib as _install_lib -import os - -class install_lib(_install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def _bytecode_filenames (self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - if not py_file.endswith('.py'): - continue - if self.compile: - bytecode_files.append(py_file + "c") - if self.optimize > 0: - bytecode_files.append(py_file + "o") - - return bytecode_files - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - exclude = {} - nsp = self.distribution.namespace_packages - - if (nsp and self.get_finalized_command('install') - .single_version_externally_managed - ): - for pkg in nsp: - parts = pkg.split('.') - while parts: - pkgdir = os.path.join(self.install_dir, *parts) - for f in '__init__.py', '__init__.pyc', '__init__.pyo': - exclude[os.path.join(pkgdir,f)] = 1 - parts.pop() - return exclude - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return _install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)",dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = _install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs diff --git a/Lib/setuptools/command/install_scripts.py b/Lib/setuptools/command/install_scripts.py deleted file mode 100755 index 69558bf..0000000 --- a/Lib/setuptools/command/install_scripts.py +++ /dev/null @@ -1,56 +0,0 @@ -from distutils.command.install_scripts import install_scripts \ - as _install_scripts -from easy_install import get_script_args, sys_executable -from pkg_resources import Distribution, PathMetadata, ensure_directory -import os -from distutils import log - -class install_scripts(_install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - _install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - self.run_command("egg_info") - if self.distribution.scripts: - _install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - executable = getattr(bs_cmd,'executable',sys_executable) - - for args in get_script_args(dist, executable): - self.write_script(*args) - - - - - - - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - try: - os.chmod(target,0755) - except (AttributeError, os.error): - pass diff --git a/Lib/setuptools/command/rotate.py b/Lib/setuptools/command/rotate.py deleted file mode 100755 index 8aab312..0000000 --- a/Lib/setuptools/command/rotate.py +++ /dev/null @@ -1,57 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -class rotate(Command): - """Delete older distributions""" - - description = "delete older distributions, keeping N newest files" - user_options = [ - ('match=', 'm', "patterns to match (required)"), - ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), - ] - - boolean_options = [] - - def initialize_options(self): - self.match = None - self.dist_dir = None - self.keep = None - - def finalize_options(self): - if self.match is None: - raise DistutilsOptionError( - "Must specify one or more (comma-separated) match patterns " - "(e.g. '.zip' or '.egg')" - ) - if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") - try: - self.keep = int(self.keep) - except ValueError: - raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, basestring): - self.match = [ - convert_path(p.strip()) for p in self.match.split(',') - ] - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - def run(self): - self.run_command("egg_info") - from glob import glob - for pattern in self.match: - pattern = self.distribution.get_name()+'*'+pattern - files = glob(os.path.join(self.dist_dir,pattern)) - files = [(os.path.getmtime(f),f) for f in files] - files.sort() - files.reverse() - - log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] - for (t,f) in files: - log.info("Deleting %s", f) - if not self.dry_run: - os.unlink(f) diff --git a/Lib/setuptools/command/saveopts.py b/Lib/setuptools/command/saveopts.py deleted file mode 100755 index 9c58d72..0000000 --- a/Lib/setuptools/command/saveopts.py +++ /dev/null @@ -1,24 +0,0 @@ -import distutils, os -from setuptools import Command -from setuptools.command.setopt import edit_config, option_base - -class saveopts(option_base): - """Save command-line options to a file""" - - description = "save supplied options to setup.cfg or other config file" - - def run(self): - dist = self.distribution - commands = dist.command_options.keys() - settings = {} - - for cmd in commands: - - if cmd=='saveopts': - continue # don't save our own options! - - for opt,(src,val) in dist.get_option_dict(cmd).items(): - if src=="command line": - settings.setdefault(cmd,{})[opt] = val - - edit_config(self.filename, settings, self.dry_run) diff --git a/Lib/setuptools/command/sdist.py b/Lib/setuptools/command/sdist.py deleted file mode 100755 index 829cd3c..0000000 --- a/Lib/setuptools/command/sdist.py +++ /dev/null @@ -1,163 +0,0 @@ -from distutils.command.sdist import sdist as _sdist -from distutils.util import convert_path -import os, re, sys, pkg_resources - -entities = [ - ("<","<"), (">", ">"), (""", '"'), ("'", "'"), - ("&", "&") -] - -def unescape(data): - for old,new in entities: - data = data.replace(old,new) - return data - -def re_finder(pattern, postproc=None): - def find(dirname, filename): - f = open(filename,'rU') - data = f.read() - f.close() - for match in pattern.finditer(data): - path = match.group(1) - if postproc: - path = postproc(path) - yield joinpath(dirname,path) - return find - -def joinpath(prefix,suffix): - if not prefix: - return suffix - return os.path.join(prefix,suffix) - - - - - - - - - - - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - -def _default_revctrl(dirname=''): - for path, finder in finders: - path = joinpath(dirname,path) - if os.path.isfile(path): - for path in finder(dirname,path): - if os.path.isfile(path): - yield path - elif os.path.isdir(path): - for item in _default_revctrl(path): - yield item - -def externals_finder(dirname, filename): - """Find any 'svn:externals' directories""" - found = False - f = open(filename,'rb') - for line in iter(f.readline, ''): # can't use direct iter! - parts = line.split() - if len(parts)==2: - kind,length = parts - data = f.read(int(length)) - if kind=='K' and data=='svn:externals': - found = True - elif kind=='V' and found: - f.close() - break - else: - f.close() - return - - for line in data.splitlines(): - parts = line.split() - if parts: - yield joinpath(dirname, parts[0]) - - -finders = [ - (convert_path('CVS/Entries'), - re_finder(re.compile(r"^\w?/([^/]+)/", re.M))), - (convert_path('.svn/entries'), - re_finder( - re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I), - unescape - ) - ), - (convert_path('.svn/dir-props'), externals_finder), -] - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -class sdist(_sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ] - - negative_opt = {} - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt')) - - self.check_metadata() - self.make_distribution() - - dist_files = getattr(self.distribution,'dist_files',[]) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def read_template(self): - try: - _sdist.read_template(self) - except: - # grody hack to close the template file (MANIFEST.in) - # this prevents easy_install's attempt at deleting the file from - # dying and thus masking the real error - sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() - raise diff --git a/Lib/setuptools/command/setopt.py b/Lib/setuptools/command/setopt.py deleted file mode 100755 index e0c1058..0000000 --- a/Lib/setuptools/command/setopt.py +++ /dev/null @@ -1,158 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] - - -def config_file(kind="local"): - """Get the filename of the distutils, local, global, or per-user config - - `kind` must be one of "local", "global", or "user" - """ - if kind=='local': - return 'setup.cfg' - if kind=='global': - return os.path.join( - os.path.dirname(distutils.__file__),'distutils.cfg' - ) - if kind=='user': - dot = os.name=='posix' and '.' or '' - return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) - raise ValueError( - "config_file() type must be 'local', 'global', or 'user'", kind - ) - - - - - - - - - - - - - - - -def edit_config(filename, settings, dry_run=False): - """Edit a configuration file to include `settings` - - `settings` is a dictionary of dictionaries or ``None`` values, keyed by - command/section name. A ``None`` value means to delete the entire section, - while a dictionary lists settings to be changed or deleted in that section. - A setting of ``None`` means to delete that setting. - """ - from ConfigParser import RawConfigParser - log.debug("Reading configuration from %s", filename) - opts = RawConfigParser() - opts.read([filename]) - for section, options in settings.items(): - if options is None: - log.info("Deleting section [%s] from %s", section, filename) - opts.remove_section(section) - else: - if not opts.has_section(section): - log.debug("Adding new section [%s] to %s", section, filename) - opts.add_section(section) - for option,value in options.items(): - if value is None: - log.debug("Deleting %s.%s from %s", - section, option, filename - ) - opts.remove_option(section,option) - if not opts.options(section): - log.info("Deleting empty [%s] section from %s", - section, filename) - opts.remove_section(section) - else: - log.debug( - "Setting %s.%s to %r in %s", - section, option, value, filename - ) - opts.set(section,option,value) - - log.info("Writing %s", filename) - if not dry_run: - f = open(filename,'w'); opts.write(f); f.close() - -class option_base(Command): - """Abstract base class for commands that mess with config files""" - - user_options = [ - ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), - ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), - ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), - ] - - boolean_options = [ - 'global-config', 'user-config', - ] - - def initialize_options(self): - self.global_config = None - self.user_config = None - self.filename = None - - def finalize_options(self): - filenames = [] - if self.global_config: - filenames.append(config_file('global')) - if self.user_config: - filenames.append(config_file('user')) - if self.filename is not None: - filenames.append(self.filename) - if not filenames: - filenames.append(config_file('local')) - if len(filenames)>1: - raise DistutilsOptionError( - "Must specify only one configuration file option", - filenames - ) - self.filename, = filenames - - - - -class setopt(option_base): - """Save command-line options to a file""" - - description = "set an option in setup.cfg or another config file" - - user_options = [ - ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.command = None - self.option = None - self.set_value = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.command is None or self.option is None: - raise DistutilsOptionError("Must specify --command *and* --option") - if self.set_value is None and not self.remove: - raise DistutilsOptionError("Must specify --set-value or --remove") - - def run(self): - edit_config( - self.filename, { - self.command: {self.option.replace('-','_'):self.set_value} - }, - self.dry_run - ) diff --git a/Lib/setuptools/command/test.py b/Lib/setuptools/command/test.py deleted file mode 100644 index 01fca35..0000000 --- a/Lib/setuptools/command/test.py +++ /dev/null @@ -1,119 +0,0 @@ -from setuptools import Command -from distutils.errors import DistutilsOptionError -import sys -from pkg_resources import * -from unittest import TestLoader, main - -class ScanningLoader(TestLoader): - - def loadTestsFromModule(self, module): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - tests = [] - if module.__name__!='setuptools.tests.doctest': # ugh - tests.append(TestLoader.loadTestsFromModule(self,module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file!='__init__.py': - submodule = module.__name__+'.'+file[:-3] - else: - if resource_exists( - module.__name__, file+'/__init__.py' - ): - submodule = module.__name__+'.'+file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests)!=1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -class test(Command): - - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build" - - user_options = [ - ('test-module=','m', "Run 'test_suite' in specified module"), - ('test-suite=','s', - "Test suite to run (e.g. 'some_module.test_suite')"), - ] - - def initialize_options(self): - self.test_suite = None - self.test_module = None - self.test_loader = None - - - def finalize_options(self): - - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module+".test_suite" - elif self.test_module: - raise DistutilsOptionError( - "You may specify a module or a suite, but not both" - ) - - self.test_args = [self.test_suite] - - if self.verbose: - self.test_args.insert(0,'--verbose') - if self.test_loader is None: - self.test_loader = getattr(self.distribution,'test_loader',None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - - - - def run(self): - # Ensure metadata is up-to-date - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - if self.test_suite: - cmd = ' '.join(self.test_args) - if self.dry_run: - self.announce('skipping "unittest %s" (dry run)' % cmd) - else: - self.announce('running "unittest %s"' % cmd) - self.run_tests() - - - def run_tests(self): - import unittest - old_path = sys.path[:] - ei_cmd = self.get_finalized_command("egg_info") - path_item = normalize_path(ei_cmd.egg_base) - metadata = PathMetadata( - path_item, normalize_path(ei_cmd.egg_info) - ) - dist = Distribution(path_item, metadata, project_name=ei_cmd.egg_name) - working_set.add(dist) - require(str(dist.as_requirement())) - loader_ep = EntryPoint.parse("x="+self.test_loader) - loader_class = loader_ep.load(require=False) - unittest.main( - None, None, [unittest.__file__]+self.test_args, - testLoader = loader_class() - ) diff --git a/Lib/setuptools/command/upload.py b/Lib/setuptools/command/upload.py deleted file mode 100755 index 644c400..0000000 --- a/Lib/setuptools/command/upload.py +++ /dev/null @@ -1,178 +0,0 @@ -"""distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to PyPI).""" - -from distutils.errors import * -from distutils.core import Command -from distutils.spawn import spawn -from distutils import log -from md5 import md5 -import os -import socket -import platform -import ConfigParser -import httplib -import base64 -import urlparse -import cStringIO as StringIO - -class upload(Command): - - description = "upload binary package to PyPI" - - DEFAULT_REPOSITORY = 'http://www.python.org/pypi' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('sign', 's', - 'sign files to upload using gpg'), - ('identity=', 'i', 'GPG identity used to sign files'), - ] - boolean_options = ['show-response', 'sign'] - - def initialize_options(self): - self.username = '' - self.password = '' - self.repository = '' - self.show_response = 0 - self.sign = False - self.identity = None - - def finalize_options(self): - if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) - if os.environ.has_key('HOME'): - rc = os.path.join(os.environ['HOME'], '.pypirc') - if os.path.exists(rc): - self.announce('Using PyPI login from %s' % rc) - config = ConfigParser.ConfigParser({ - 'username':'', - 'password':'', - 'repository':''}) - config.read(rc) - if not self.repository: - self.repository = config.get('server-login', 'repository') - if not self.username: - self.username = config.get('server-login', 'username') - if not self.password: - self.password = config.get('server-login', 'password') - if not self.repository: - self.repository = self.DEFAULT_REPOSITORY - - def run(self): - if not self.distribution.dist_files: - raise DistutilsOptionError("No dist file created in earlier command") - for command, pyversion, filename in self.distribution.dist_files: - self.upload_file(command, pyversion, filename) - - def upload_file(self, command, pyversion, filename): - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - content = open(filename,'rb').read() - basename = os.path.basename(filename) - comment = '' - if command=='bdist_egg' and self.distribution.has_ext_modules(): - comment = "built on %s" % platform.platform(terse=1) - data = { - ':action':'file_upload', - 'protcol_version':'1', - 'name':self.distribution.get_name(), - 'version':self.distribution.get_version(), - 'content':(basename,content), - 'filetype':command, - 'pyversion':pyversion, - 'md5_digest':md5(content).hexdigest(), - } - if command == 'bdist_rpm': - dist, version, id = platform.dist() - if dist: - comment = 'built for %s %s' % (dist, version) - elif command == 'bdist_dumb': - comment = 'built for %s' % platform.platform(terse=1) - data['comment'] = comment - - if self.sign: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - open(filename+".asc").read()) - - # set up the authentication - auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip() - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = StringIO.StringIO() - for key, value in data.items(): - # handle multiple entries for the same name - if type(value) != type([]): - value = [value] - for value in value: - if type(value) is tuple: - fn = ';filename="%s"' % value[0] - value = value[1] - else: - fn = "" - value = str(value) - body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) - body.write(fn) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - body.write('\n') # write an extra newline (lurve Macs) - body.write(end_boundary) - body.write("\n") - body = body.getvalue() - - self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urlparse.urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - http = httplib.HTTPConnection(netloc) - elif schema == 'https': - http = httplib.HTTPSConnection(netloc) - else: - raise AssertionError, "unsupported schema "+schema - - data = '' - loglevel = log.INFO - try: - http.connect() - http.putrequest("POST", url) - http.putheader('Content-type', - 'multipart/form-data; boundary=%s'%boundary) - http.putheader('Content-length', str(len(body))) - http.putheader('Authorization', auth) - http.endheaders() - http.send(body) - except socket.error, e: - self.announce(e.msg, log.ERROR) - return - - r = http.getresponse() - if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) - else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) - if self.show_response: - print '-'*75, r.read(), '-'*75 diff --git a/Lib/setuptools/depends.py b/Lib/setuptools/depends.py deleted file mode 100644 index 68d8194..0000000 --- a/Lib/setuptools/depends.py +++ /dev/null @@ -1,239 +0,0 @@ -from __future__ import generators -import sys, imp, marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from distutils.version import StrictVersion, LooseVersion - -__all__ = [ - 'Require', 'find_module', 'get_module_constant', 'extract_constant' -] - -class Require: - """A prerequisite to building or installing a distribution""" - - def __init__(self,name,requested_version,module,homepage='', - attribute=None,format=None - ): - - if format is None and requested_version is not None: - format = StrictVersion - - if format is not None: - requested_version = format(requested_version) - if attribute is None: - attribute = '__version__' - - self.__dict__.update(locals()) - del self.self - - - def full_name(self): - """Return full package/distribution name, w/version""" - if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) - return self.name - - - def version_ok(self,version): - """Is 'version' sufficiently up-to-date?""" - return self.attribute is None or self.format is None or \ - str(version)<>"unknown" and version >= self.requested_version - - - def get_version(self, paths=None, default="unknown"): - - """Get version number of installed module, 'None', or 'default' - - Search 'paths' for module. If not found, return 'None'. If found, - return the extracted version attribute, or 'default' if no version - attribute was specified, or the value cannot be determined without - importing the module. The version is formatted according to the - requirement's version format (if any), unless it is 'None' or the - supplied 'default'. - """ - - if self.attribute is None: - try: - f,p,i = find_module(self.module,paths) - if f: f.close() - return default - except ImportError: - return None - - v = get_module_constant(self.module,self.attribute,default,paths) - - if v is not None and v is not default and self.format is not None: - return self.format(v) - - return v - - - def is_present(self,paths=None): - """Return true if dependency is present on 'paths'""" - return self.get_version(paths) is not None - - - def is_current(self,paths=None): - """Return true if dependency is present and up-to-date on 'paths'""" - version = self.get_version(paths) - if version is None: - return False - return self.version_ok(version) - - -def _iter_code(code): - - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - from array import array - from dis import HAVE_ARGUMENT, EXTENDED_ARG - - bytes = array('b',code.co_code) - eof = len(code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr=HAVE_ARGUMENT: - - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg - ptr += 3 - - if op==EXTENDED_ARG: - extended_arg = arg * 65536L - continue - - else: - arg = None - ptr += 1 - - yield op,arg - - - - - - - - - - -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - - parts = module.split('.') - - while parts: - part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) - - if kind==PKG_DIRECTORY: - parts = parts or ['__init__'] - paths = [path] - - elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) - - return info - - - - - - - - - - - - - - - - - - - - - - - - -def get_module_constant(module, symbol, default=-1, paths=None): - - """Find 'module' by searching 'paths', and extract 'symbol' - - Return 'None' if 'module' does not exist on 'paths', or it does not define - 'symbol'. If the module defines 'symbol' as a constant, return the - constant. Otherwise, return 'default'.""" - - try: - f, path, (suffix,mode,kind) = find_module(module,paths) - except ImportError: - # Module doesn't exist - return None - - try: - if kind==PY_COMPILED: - f.read(8) # skip magic & date - code = marshal.load(f) - elif kind==PY_FROZEN: - code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: - code = compile(f.read(), path, 'exec') - else: - # Not something we can parse; we'll have to import it. :( - if module not in sys.modules: - imp.load_module(module,f,path,(suffix,mode,kind)) - return getattr(sys.modules[module],symbol,None) - - finally: - if f: - f.close() - - return extract_constant(code,symbol,default) - - - - - - - - -def extract_constant(code,symbol,default=-1): - - """Extract the constant value of 'symbol' from 'code' - - If the name 'symbol' is bound to a constant value by the Python code - object 'code', return that value. If 'symbol' is bound to an expression, - return 'default'. Otherwise, return 'None'. - - Return value is based on the first assignment to 'symbol'. 'symbol' must - be a global, or at least a non-"fast" local in the code block. That is, - only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' - must be present in 'code.co_names'. - """ - - if symbol not in code.co_names: - # name's not there, can't possibly be an assigment - return None - - name_idx = list(code.co_names).index(symbol) - - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 - - const = default - - for op, arg in _iter_code(code): - - if op==LOAD_CONST: - const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): - return const - else: - const = default diff --git a/Lib/setuptools/dist.py b/Lib/setuptools/dist.py deleted file mode 100644 index f0417c1..0000000 --- a/Lib/setuptools/dist.py +++ /dev/null @@ -1,798 +0,0 @@ -__all__ = ['Distribution'] - -from distutils.core import Distribution as _Distribution -from setuptools.depends import Require -from setuptools.command.install import install -from setuptools.command.sdist import sdist -from setuptools.command.install_lib import install_lib -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd -import os - -def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded - - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls - -_Distribution = _get_unpatched(_Distribution) - -sequence = tuple, list - -def check_importable(dist, attr, value): - try: - ep = pkg_resources.EntryPoint.parse('x='+value) - assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) - ) - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list or None""" - try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) - ) - -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) - - for nsp in value: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " + - "namespace package %r" % nsp - ) - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - for k,v in value.items(): - list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - raise DistutilsSetupError( - "%r must be a boolean value (got %r)" % (attr,value) - ) - - - -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(pkg_resources.parse_requirements(value)) - except (TypeError,ValueError): - raise DistutilsSetupError( - "%r must be a string or list of strings " - "containing valid project/version requirement specifiers" % (attr,) - ) - -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError, e: - raise DistutilsSetupError(e) - - -def check_test_suite(dist, attr, value): - if not isinstance(value,basestring): - raise DistutilsSetupError("test_suite must be a string") - - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) - except TypeError: - break - else: - return - raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " - "wildcard patterns" - ) - - - - -class Distribution(_Distribution): - """Distribution with support for features, tests, and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'features' -- a dictionary mapping option names to 'setuptools.Feature' - objects. Features are a portion of the distribution that can be - included or excluded based on user options, inter-feature dependencies, - and availability on the current system. Excluded features are omitted - from all setup commands, including source and binary distributions, so - you can create multiple distributions from the same source tree. - Feature names should be valid Python identifiers, except that they may - contain the '-' (minus) sign. Features can be included or excluded - via the command line options '--with-X' and '--without-X', where 'X' is - the name of the feature. Whether a feature is included by default, and - whether you are allowed to control this from the command line, is - determined by the Feature object. See the 'Feature' class for more - information. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. They are used by the feature subsystem to configure the - distribution for the included and excluded features. - """ - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__ (self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - self.require_features = [] - self.features = {} - self.dist_files = [] - self.patch_missing_pkg_info(attrs) - # Make sure we have any eggs needed to interpret 'attrs' - if attrs and 'dependency_links' in attrs: - self.dependency_links = attrs.pop('dependency_links') - assert_string_list(self,'dependency_links',self.dependency_links) - if attrs and 'setup_requires' in attrs: - self.fetch_build_eggs(attrs.pop('setup_requires')) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - if not hasattr(self,ep.name): - setattr(self,ep.name,None) - _Distribution.__init__(self,attrs) - if isinstance(self.metadata.version, (int,long,float)): - # Some people apparently take "version number" too literally :) - self.metadata.version = str(self.metadata.version) - - def parse_command_line(self): - """Process features after parsing command line options""" - result = _Distribution.parse_command_line(self) - if self.features: - self._finalize_features() - return result - - def _feature_attrname(self,name): - """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - from pkg_resources import working_set, parse_requirements - for dist in working_set.resolve( - parse_requirements(requires), installer=self.fetch_build_egg - ): - working_set.add(dist) - - def finalize_options(self): - _Distribution.finalize_options(self) - if self.features: - self._set_global_opts_from_features() - - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) - if value is not None: - ep.require(installer=self.fetch_build_egg) - ep.load()(self, ep.name, value) - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - try: - cmd = self._egg_fetcher - except AttributeError: - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) - dist.parse_config_files() - opts = dist.get_option_dict('easy_install') - keep = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts' - ) - for key in opts.keys(): - if key not in keep: - del opts[key] # don't use any other settings - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1].split() + links - opts['find_links'] = ('setup', links) - cmd = easy_install( - dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report = True - ) - cmd.ensure_finalized() - self._egg_fetcher = cmd - return cmd.easy_install(req) - - def _set_global_opts_from_features(self): - """Add --with-X/--without-X options based on optional features""" - - go = [] - no = self.negative_opt.copy() - - for name,feature in self.features.items(): - self._set_feature(name,None) - feature.validate(self) - - if feature.optional: - descr = feature.description - incdef = ' (default)' - excdef='' - if not feature.include_by_default(): - excdef, incdef = incdef, excdef - - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name - - self.global_options = self.feature_options = go + self.global_options - self.negative_opt = self.feature_negopt = no - - - - - - - - - - - - - - - - - - - def _finalize_features(self): - """Add/remove features and resolve dependencies between them""" - - # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): - enabled = self.feature_is_included(name) - if enabled or (enabled is None and feature.include_by_default()): - feature.include_in(self) - self._set_feature(name,1) - - # Then disable the rest, so that off-by-default features don't - # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): - if not self.feature_is_included(name): - feature.exclude_from(self) - self._set_feature(name,0) - - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - for ep in pkg_resources.iter_entry_points('distutils.commands',command): - ep.require(installer=self.fetch_build_egg) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - cmdclass = ep.load(False) # don't require extras, we're not running - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - - - - - def _set_feature(self,name,status): - """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) - - def feature_is_included(self,name): - """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) - - def include_feature(self,name): - """Request inclusion of feature named 'name'""" - - if self.feature_is_included(name)==0: - descr = self.features[name].description - raise DistutilsOptionError( - descr + " is required, but was excluded or is not available" - ) - self.features[name].include_in(self) - self._set_feature(name,1) - - def include(self,**attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) - if include: - include(v) - else: - self._include_misc(k,v) - - def exclude_package(self,package): - """Remove packages, modules, and extensions in named package""" - - pfx = package+'.' - if self.packages: - self.packages = [ - p for p in self.packages - if p<>package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules - if p<>package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p for p in self.ext_modules - if p.name<>package and not p.name.startswith(pfx) - ] - - - def has_contents_for(self,package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package+'.' - - for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): - return True - - - - - - - - - - - def _exclude_misc(self,name,value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is not None and not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self,name,[item for item in old if item not in value]) - - def _include_misc(self,name,value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - else: - setattr(self,name,old+[item for item in value if item not in old]) - - def exclude(self,**attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k,v) - - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - map(self.exclude_package, packages) - - - - - - - - - - - - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src,alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - args[:1] = shlex.split(alias,True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - - - - - - - - - - - - - - - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd,opts in self.command_options.items(): - - for opt,(src,val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_','-') - - if val==0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val==1: - val = None - - d.setdefault(cmd,{})[opt] = val - - return d - - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext,tuple): - name,buildinfo = ext - yield name - else: - yield ext.name - -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - - - - - - - - - - - - - - - - - - - - - -class Feature: - """A subset of the distribution that can be excluded if unneeded/wanted - - Features are created using these keyword arguments: - - 'description' -- a short, human readable description of the feature, to - be used in error messages, and option help messages. - - 'standard' -- if true, the feature is included by default if it is - available on the current system. Otherwise, the feature is only - included if requested via a command line '--with-X' option, or if - another included feature requires it. The default setting is 'False'. - - 'available' -- if true, the feature is available for installation on the - current system. The default setting is 'True'. - - 'optional' -- if true, the feature's inclusion can be controlled from the - command line, using the '--with-X' or '--without-X' options. If - false, the feature's inclusion status is determined automatically, - based on 'availabile', 'standard', and whether any other feature - requires it. The default setting is 'True'. - - 'require_features' -- a string or sequence of strings naming features - that should also be included if this feature is included. Defaults to - empty list. May also contain 'Require' objects that should be - added/removed from the distribution. - - 'remove' -- a string or list of strings naming packages to be removed - from the distribution if this feature is *not* included. If the - feature *is* included, this argument is ignored. This argument exists - to support removing features that "crosscut" a distribution, such as - defining a 'tests' feature that removes all the 'tests' subpackages - provided by other features. The default for this argument is an empty - list. (Note: the named package(s) or modules must exist in the base - distribution when the 'setup()' function is initially called.) - - other keywords -- any other keyword arguments are saved, and passed to - the distribution's 'include()' and 'exclude()' methods when the - feature is included or excluded, respectively. So, for example, you - could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be - added or removed from the distribution as appropriate. - - A feature must include at least one 'requires', 'remove', or other - keyword argument. Otherwise, it can't affect the distribution in any way. - Note also that you can subclass 'Feature' to create your own specialized - feature types that modify the distribution in other ways when included or - excluded. See the docstrings for the various methods here for more detail. - Aside from the methods, the only feature attributes that distributions look - at are 'description' and 'optional'. - """ - def __init__(self, description, standard=False, available=True, - optional=True, require_features=(), remove=(), **extras - ): - - self.description = description - self.standard = standard - self.available = available - self.optional = optional - if isinstance(require_features,(str,Require)): - require_features = require_features, - - self.require_features = [ - r for r in require_features if isinstance(r,str) - ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er - - if isinstance(remove,str): - remove = remove, - self.remove = remove - self.extras = extras - - if not remove and not require_features and not extras: - raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or at least one" - " of 'packages', 'py_modules', etc." - ) - - def include_by_default(self): - """Should this feature be included by default?""" - return self.available and self.standard - - def include_in(self,dist): - - """Ensure feature and its requirements are included in distribution - - You may override this in a subclass to perform additional operations on - the distribution. Note that this method may be called more than once - per feature, and so should be idempotent. - - """ - - if not self.available: - raise DistutilsPlatformError( - self.description+" is required," - "but is not available on this platform" - ) - - dist.include(**self.extras) - - for f in self.require_features: - dist.include_feature(f) - - - - def exclude_from(self,dist): - - """Ensure feature is excluded from distribution - - You may override this in a subclass to perform additional operations on - the distribution. This method will be called at most once per - feature, and only after all included features have been asked to - include themselves. - """ - - dist.exclude(**self.extras) - - if self.remove: - for item in self.remove: - dist.exclude_package(item) - - - - def validate(self,dist): - - """Verify that feature makes sense in context of distribution - - This method is called by the distribution just before it parses its - command line. It checks to ensure that the 'remove' attribute, if any, - contains only valid package/module names that are present in the base - distribution when 'setup()' is called. You may override it in a - subclass to perform any other required validation of the feature - against a target distribution. - """ - - for item in self.remove: - if not dist.has_contents_for(item): - raise DistutilsSetupError( - "%s wants to be able to remove %s, but the distribution" - " doesn't contain any packages or modules under %s" - % (self.description, item, item) - ) diff --git a/Lib/setuptools/extension.py b/Lib/setuptools/extension.py deleted file mode 100644 index cfcf55b..0000000 --- a/Lib/setuptools/extension.py +++ /dev/null @@ -1,35 +0,0 @@ -from distutils.core import Extension as _Extension -from dist import _get_unpatched -_Extension = _get_unpatched(_Extension) - -try: - from Pyrex.Distutils.build_ext import build_ext -except ImportError: - have_pyrex = False -else: - have_pyrex = True - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - if not have_pyrex: - # convert .pyx extensions to .c - def __init__(self,*args,**kw): - _Extension.__init__(self,*args,**kw) - sources = [] - for s in self.sources: - if s.endswith('.pyx'): - sources.append(s[:-3]+'c') - else: - sources.append(s) - self.sources = sources - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" - -import sys, distutils.core, distutils.extension -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/Lib/setuptools/gui.exe b/Lib/setuptools/gui.exe deleted file mode 100755 index 63ff35f..0000000 Binary files a/Lib/setuptools/gui.exe and /dev/null differ diff --git a/Lib/setuptools/package_index.py b/Lib/setuptools/package_index.py deleted file mode 100755 index 107e222..0000000 --- a/Lib/setuptools/package_index.py +++ /dev/null @@ -1,674 +0,0 @@ -"""PyPI and direct package downloading""" - -import sys, os.path, re, urlparse, urllib2, shutil, random, socket -from pkg_resources import * -from distutils import log -from distutils.errors import DistutilsError -from md5 import md5 -from fnmatch import translate - -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') -HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) -# this is here to fix emacs' cruddy broken syntax highlighting -PYPI_MD5 = re.compile( - '([^<]+)\n\s+\\(md5\\)' -) - -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() - -__all__ = [ - 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', - 'interpret_distro_name', -] - - -def parse_bdist_wininst(name): - """Return (base,pyversion) or (None,None) for possible .exe name""" - - lower = name.lower() - base, py_ver = None, None - - if lower.endswith('.exe'): - if lower.endswith('.win32.exe'): - base = name[:-10] - elif lower.startswith('.win32-py',-16): - py_ver = name[-7:-4] - base = name[:-16] - - return base,py_ver - -def egg_info_for_url(url): - scheme, server, path, parameters, query, fragment = urlparse.urlparse(url) - base = urllib2.unquote(path.split('/')[-1]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment - -def distros_for_url(url, metadata=None): - """Yield egg or source distribution objects that might be found at a URL""" - base, fragment = egg_info_for_url(url) - dists = distros_for_location(url, base, metadata) - if fragment and not dists: - match = EGG_FRAGMENT.match(fragment) - if match: - return interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST - ) - return dists - -def distros_for_location(location, basename, metadata=None): - """Yield egg or source distribution objects based on basename""" - if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip - if basename.endswith('.egg'): # only one, unambiguous interpretation - return [Distribution.from_location(location, basename, metadata)] - - if basename.endswith('.exe'): - win_base, py_ver = parse_bdist_wininst(basename) - if win_base is not None: - return interpret_distro_name( - location, win_base, metadata, py_ver, BINARY_DIST, "win32" - ) - - # Try source distro extensions (.zip, .tgz, etc.) - # - for ext in EXTENSIONS: - if basename.endswith(ext): - basename = basename[:-len(ext)] - return interpret_distro_name(location, basename, metadata) - return [] # no extension matched - - -def distros_for_filename(filename, metadata=None): - """Yield possible egg or source distribution objects based on a filename""" - return distros_for_location( - normalize_path(filename), os.path.basename(filename), metadata - ) - - -def interpret_distro_name(location, basename, metadata, - py_version=None, precedence=SOURCE_DIST, platform=None -): - """Generate alternative interpretations of a source distro name - - Note: if `location` is a filesystem filename, you should call - ``pkg_resources.normalize_path()`` on it before passing it to this - routine! - """ - - # Generate alternative interpretations of a source distro name - # Because some packages are ambiguous as to name/versions split - # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" - # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, - # the spurious interpretations should be ignored, because in the event - # there's also an "adns" package, the spurious "python-1.1.0" version will - # compare lower than any numeric version number, and is therefore unlikely - # to match a request for it. It's still a potential problem, though, and - # in the long run PyPI and the distutils should go for "safe" names and - # versions in distribution archive names (sdist and bdist). - - parts = basename.split('-') - for p in range(1,len(parts)+1): - yield Distribution( - location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform - ) - - - - - -class PackageIndex(Environment): - """A distribution index that scans web pages for download URLs""" - - def __init__(self,index_url="http://www.python.org/pypi",hosts=('*',),*args,**kw): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] - self.scanned_urls = {} - self.fetched_urls = {} - self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match - self.to_scan = [] - - def process_url(self, url, retrieve=False): - """Evaluate a URL as a possible download, and maybe retrieve it""" - url = fix_sf_url(url) - if url in self.scanned_urls and not retrieve: - return - self.scanned_urls[url] = True - if not URL_SCHEME(url): - self.process_filename(url) - return - else: - dists = list(distros_for_url(url)) - if dists: - if not self.url_ok(url): - return - self.debug("Found link: %s", url) - - if dists or not retrieve or url in self.fetched_urls: - map(self.add, dists) - return # don't need the actual page - - if not self.url_ok(url): - self.fetched_urls[url] = True - return - - self.info("Reading %s", url) - f = self.open_url(url) - self.fetched_urls[url] = self.fetched_urls[f.url] = True - - - if 'html' not in f.headers['content-type'].lower(): - f.close() # not html, we can't process it - return - - base = f.url # handle redirects - page = f.read() - f.close() - if url.startswith(self.index_url): - page = self.process_index(url, page) - - for match in HREF.finditer(page): - link = urlparse.urljoin(base, match.group(1)) - self.process_url(link) - - def process_filename(self, fn, nested=False): - # process filenames or directories - if not os.path.exists(fn): - self.warn("Not found: %s", url) - return - - if os.path.isdir(fn) and not nested: - path = os.path.realpath(fn) - for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) - - dists = distros_for_filename(fn) - if dists: - self.debug("Found: %s", fn) - map(self.add, dists) - - def url_ok(self, url, fatal=False): - if self.allows(urlparse.urlparse(url)[1]): - return True - msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n" - if fatal: - raise DistutilsError(msg % url) - else: - self.warn(msg, url) - - - - def process_index(self,url,page): - """Process the contents of a PyPI page""" - def scan(link): - # Process a URL to see if it's for a package page - if link.startswith(self.index_url): - parts = map( - urllib2.unquote, link[len(self.index_url):].split('/') - ) - if len(parts)==2: - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True - return to_filename(pkg), to_filename(ver) - return None, None - - if url==self.index_url or 'Index of Packages' in page: - # process an index page into the package-page index - for match in HREF.finditer(page): - scan( urlparse.urljoin(url, match.group(1)) ) - else: - pkg,ver = scan(url) # ensure this page is in the page index - # process individual package page - for tag in ("Home Page", "Download URL"): - pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) - if match: - # Process the found URL - new_url = urlparse.urljoin(url, match.group(1)) - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if pkg and ver: - new_url+='#egg=%s-%s' % (pkg,ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - return PYPI_MD5.sub( - lambda m: '%s' % m.group(1,3,2), page - ) - - def need_version_info(self, url): - self.scan_all( - "Page at %s links to .py file(s) without version info; an index " - "scan is required.", url - ) - - def scan_all(self, msg=None, *args): - if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) - self.warn( - "Scanning index of all packages (this may take a while)" - ) - self.scan_url(self.index_url) - - def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') - - if not self.package_pages.get(requirement.key): - # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') - - if not self.package_pages.get(requirement.key): - # We couldn't find the target package, so search the index page too - self.warn( - "Couldn't find index page for %r (maybe misspelled?)", - requirement.unsafe_name - ) - self.scan_all() - - for url in self.package_pages.get(requirement.key,()): - # scan each page that might be related to the desired package - self.scan_url(url) - - def obtain(self, requirement, installer=None): - self.prescan(); self.find_packages(requirement) - for dist in self[requirement.key]: - if dist in requirement: - return dist - self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) - - def check_md5(self, cs, info, filename, tfp): - if re.match('md5=[0-9a-f]{32}$', info): - self.debug("Validating md5 checksum for %s", filename) - if cs.hexdigest()<>info[4:]: - tfp.close() - os.unlink(filename) - raise DistutilsError( - "MD5 validation failed for "+os.path.basename(filename)+ - "; possible download problem?" - ) - - def add_find_links(self, urls): - """Add `urls` to the list that will be prescanned for searches""" - for url in urls: - if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory - or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link - ): - # then go ahead and process it now - self.scan_url(url) - else: - # otherwise, defer retrieval till later - self.to_scan.append(url) - - def prescan(self): - """Scan urls scheduled for prescanning (e.g. --find-links)""" - if self.to_scan: - map(self.scan_url, self.to_scan) - self.to_scan = None # from now on, go ahead and process immediately - - - - - - - - - - - def download(self, spec, tmpdir): - """Locate and/or download `spec` to `tmpdir`, returning a local path - - `spec` may be a ``Requirement`` object, or a string containing a URL, - an existing local filename, or a project/version requirement spec - (i.e. the string form of a ``Requirement`` object). If it is the URL - of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one - that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is - automatically created alongside the downloaded file. - - If `spec` is a ``Requirement`` object or a string containing a - project/version requirement spec, this method returns the location of - a matching distribution (possibly after downloading it to `tmpdir`). - If `spec` is a locally existing file or directory name, it is simply - returned unchanged. If `spec` is a URL, it is downloaded to a subpath - of `tmpdir`, and the local filename is returned. Various errors may be - raised if a problem occurs during downloading. - """ - if not isinstance(spec,Requirement): - scheme = URL_SCHEME(spec) - if scheme: - # It's a url, download it to tmpdir - found = self._download_url(scheme.group(1), spec, tmpdir) - base, fragment = egg_info_for_url(spec) - if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) - return found - elif os.path.exists(spec): - # Existing file or directory, just return it - return spec - else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) - - - def fetch_distribution(self, - requirement, tmpdir, force_scan=False, source=False, develop_ok=False - ): - """Obtain a distribution suitable for fulfilling `requirement` - - `requirement` must be a ``pkg_resources.Requirement`` instance. - If necessary, or if the `force_scan` flag is set, the requirement is - searched for in the (online) package index as well as the locally - installed packages. If a distribution matching `requirement` is found, - the returned distribution's ``location`` is the value you would have - gotten from calling the ``download()`` method with the matching - distribution's URL or filename. If no matching distribution is found, - ``None`` is returned. - - If the `source` flag is set, only source distributions and source - checkout links will be considered. Unless the `develop_ok` flag is - set, development and system eggs (i.e., those using the ``.egg-info`` - format) will be ignored. - """ - - # process a Requirement - self.info("Searching for %s", requirement) - skipped = {} - - def find(req): - # Find a matching distribution; may be called more than once - - for dist in self[req.key]: - - if dist.precedence==DEVELOP_DIST and not develop_ok: - if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) - skipped[dist] = 1 - continue - - if dist in req and (dist.precedence<=SOURCE_DIST or not source): - self.info("Best match: %s", dist) - return dist.clone( - location=self.download(dist.location, tmpdir) - ) - - if force_scan: - self.prescan() - self.find_packages(requirement) - - dist = find(requirement) - if dist is None and self.to_scan is not None: - self.prescan() - dist = find(requirement) - - if dist is None and not force_scan: - self.find_packages(requirement) - dist = find(requirement) - - if dist is None: - self.warn( - "No local packages or download links found for %s%s", - (source and "a source distribution of " or ""), - requirement, - ) - return dist - - def fetch(self, requirement, tmpdir, force_scan=False, source=False): - """Obtain a file suitable for fulfilling `requirement` - - DEPRECATED; use the ``fetch_distribution()`` method now instead. For - backward compatibility, this routine is identical but returns the - ``location`` of the downloaded distribution instead of a distribution - object. - """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) - if dist is not None: - return dist.location - return None - - - - - - - - - def gen_setup(self, filename, fragment, tmpdir): - match = EGG_FRAGMENT.match(fragment); #import pdb; pdb.set_trace() - dists = match and [d for d in - interpret_distro_name(filename, match.group(1), None) if d.version - ] or [] - - if len(dists)==1: # unambiguous ``#egg`` fragment - basename = os.path.basename(filename) - - # Make sure the file has been downloaded to the temp dir. - if os.path.dirname(filename) != tmpdir: - dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): - shutil.copy2(filename, dst) - filename=dst - - file = open(os.path.join(tmpdir, 'setup.py'), 'w') - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] - ) - ) - file.close() - return filename - - elif match: - raise DistutilsError( - "Can't unambiguously interpret project/version identifier %r; " - "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) - ) - else: - raise DistutilsError( - "Can't process plain .py files without an '#egg=name-version'" - " suffix to enable automatic setup script generation." - ) - - dl_blocksize = 8192 - def _download_to(self, url, filename): - self.url_ok(url,True) # raises error if not allowed - self.info("Downloading %s", url) - # Download the file - fp, tfp, info = None, None, None - try: - if '#' in url: - url, info = url.split('#', 1) - fp = self.open_url(url) - if isinstance(fp, urllib2.HTTPError): - raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) - ) - cs = md5() - headers = fp.info() - blocknum = 0 - bs = self.dl_blocksize - size = -1 - if "content-length" in headers: - size = int(headers["Content-Length"]) - self.reporthook(url, filename, blocknum, bs, size) - tfp = open(filename,'wb') - while True: - block = fp.read(bs) - if block: - cs.update(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - if info: self.check_md5(cs, info, filename, tfp) - return headers - finally: - if fp: fp.close() - if tfp: tfp.close() - - def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op - - def retry_sf_download(self, url, filename): - try: - return self._download_to(url, filename) - except: - scheme, server, path, param, query, frag = urlparse.urlparse(url) - if server!='dl.sourceforge.net': - raise - - mirror = get_sf_ip() - - while _sf_mirrors: - self.warn("Download failed: %s", sys.exc_info()[1]) - url = urlparse.urlunparse((scheme, mirror, path, param, '', frag)) - try: - return self._download_to(url, filename) - except: - _sf_mirrors.remove(mirror) # don't retry the same mirror - mirror = get_sf_ip() - - raise # fail if no mirror works - - - - - - - - - - - - - - - - - - - - - - def open_url(self, url): - try: - return urllib2.urlopen(url) - except urllib2.HTTPError, v: - return v - except urllib2.URLError, v: - raise DistutilsError("Download error: %s" % v.reason) - - - def _download_url(self, scheme, url, tmpdir): - - # Determine download filename - # - name = filter(None,urlparse.urlparse(url)[2].split('/')) - if name: - name = name[-1] - while '..' in name: - name = name.replace('..','.').replace('\\','_') - else: - name = "__downloaded__" # default if URL has no path contents - - if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download - - filename = os.path.join(tmpdir,name) - - # Download the file - # - if scheme=='svn' or scheme.startswith('svn+'): - return self._download_svn(url, filename) - else: - headers = self.retry_sf_download(url, filename) - if 'html' in headers['content-type'].lower(): - return self._download_html(url, headers, filename, tmpdir) - else: - return filename - - def scan_url(self, url): - self.process_url(url, True) - - - def _download_html(self, url, headers, filename, tmpdir): - file = open(filename) - for line in file: - if line.strip(): - # Check for a subversion index page - if re.search(r'Revision \d+:', line): - # it's a subversion index page: - file.close() - os.unlink(filename) - return self._download_svn(url, filename) - break # not an index page - file.close() - os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) - - def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake - self.info("Doing subversion checkout from %s to %s", url, filename) - os.system("svn checkout -q %s %s" % (url, filename)) - return filename - - def debug(self, msg, *args): - log.debug(msg, *args) - - def info(self, msg, *args): - log.info(msg, *args) - - def warn(self, msg, *args): - log.warn(msg, *args) - - - - - - - - - - - - -def fix_sf_url(url): - scheme, server, path, param, query, frag = urlparse.urlparse(url) - if server!='prdownloads.sourceforge.net': - return url - return urlparse.urlunparse( - (scheme, 'dl.sourceforge.net', 'sourceforge'+path, param, '', frag) - ) - -_sf_mirrors = [] - -def get_sf_ip(): - if not _sf_mirrors: - try: - _sf_mirrors[:] = socket.gethostbyname_ex('dl.sourceforge.net')[-1] - except socket.error: - # DNS-bl0ck1n9 f1r3w4llz sUx0rs! - _sf_mirrors[:] = ['dl.sourceforge.net'] - return random.choice(_sf_mirrors) diff --git a/Lib/setuptools/sandbox.py b/Lib/setuptools/sandbox.py deleted file mode 100755 index 606944b..0000000 --- a/Lib/setuptools/sandbox.py +++ /dev/null @@ -1,203 +0,0 @@ -import os, sys, __builtin__, tempfile -_os = sys.modules[os.name] -_open = open -from distutils.errors import DistutilsError -__all__ = [ - "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", -] - -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - - old_dir = os.getcwd() - save_argv = sys.argv[:] - save_path = sys.path[:] - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - temp_dir = os.path.join(setup_dir,'temp') - if not os.path.isdir(temp_dir): os.makedirs(temp_dir) - save_tmp = tempfile.tempdir - - try: - tempfile.tempdir = temp_dir - os.chdir(setup_dir) - try: - sys.argv[:] = [setup_script]+list(args) - sys.path.insert(0, setup_dir) - DirectorySandbox(setup_dir).run( - lambda: execfile( - "setup.py", - {'__file__':setup_script, '__name__':'__main__'} - ) - ) - except SystemExit, v: - if v.args and v.args[0]: - raise - # Normal exit, just return - finally: - os.chdir(old_dir) - sys.path[:] = save_path - sys.argv[:] = save_argv - tempfile.tempdir = save_tmp - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source,name)) - - def run(self, func): - """Run 'func' under os sandboxing""" - try: - self._copy(self) - __builtin__.open = __builtin__.file = self._open - self._active = True - return func() - finally: - self._active = False - __builtin__.open = __builtin__.file = _open - self._copy(_os) - - - def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): - if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) - return wrap - - - for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) - - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) - return wrap - - _open = _mk_single_path_wrapper('file', _open) - for name in [ - "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", - "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", - "startfile", "mkfifo", "mknod", "pathconf", "access" - ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) - - - def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) - if self._active: - return self._remap_output(name, retval) - return retval - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) - - def _validate_path(self,path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self,operation,path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) - ) - - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys([ - "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", - "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", - ]) - - def __init__(self,sandbox): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - raise SandboxViolation(operation, args, kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) - - def tmpnam(self): - self._violation("tmpnam") - - def _ok(self,path): - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - if realpath==self._sandbox or realpath.startswith(self._prefix): - return True - finally: - self._active = active - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src,dst) - - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - def __str__(self): - return """SandboxViolation: %s%r %s - -The package setup script has attempted to modify files on your system -that are not within the EasyInstall build area, and has been aborted. - -This package cannot be safely installed by EasyInstall, and may not -support alternate installation locations even if you run its setup -script by hand. Please inform the package's author and the EasyInstall -maintainers to find out if a fix or workaround is available.""" % self.args diff --git a/Lib/setuptools/site-patch.py b/Lib/setuptools/site-patch.py deleted file mode 100755 index b1b27b9..0000000 --- a/Lib/setuptools/site-patch.py +++ /dev/null @@ -1,74 +0,0 @@ -def __boot(): - import sys, imp, os, os.path - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys,'path_importer_cache',{}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path - - for item in stdpath: - if item==mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - stream, path, descr = imp.find_module('site',[item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site',stream,path,descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - #print "loaded", __file__ - - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d,nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p,np = makepath(item) - - if np==nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - -if __name__=='site': - __boot() - del __boot diff --git a/Lib/setuptools/tests/__init__.py b/Lib/setuptools/tests/__init__.py deleted file mode 100644 index 8a767dc..0000000 --- a/Lib/setuptools/tests/__init__.py +++ /dev/null @@ -1,364 +0,0 @@ -"""Tests for the 'setuptools' package""" - -from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader -import distutils.core, distutils.cmd -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -import setuptools, setuptools.dist -from setuptools import Feature -from distutils.core import Extension -from setuptools.depends import extract_constant, get_module_constant -from setuptools.depends import find_module, Require -from distutils.version import StrictVersion, LooseVersion -from distutils.util import convert_path -import sys, os.path - -def additional_tests(): - import doctest - return doctest.DocFileSuite( - 'api_tests.txt', optionflags=doctest.ELLIPSIS, package=__name__, - ) - - -def makeSetup(**args): - """Return distribution from 'setup(**args)', without executing commands""" - - distutils.core._setup_stop_after = "commandline" - - # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) - - try: - return setuptools.setup(**args) - finally: - distutils.core_setup_stop_after = None - - - - - - - -class DependsTests(TestCase): - - def testExtractConst(self): - - from setuptools.depends import extract_constant - - def f1(): - global x,y,z - x = "test" - y = z - - # unrecognized name - self.assertEqual(extract_constant(f1.func_code,'q', -1), None) - - # constant assigned - self.assertEqual(extract_constant(f1.func_code,'x', -1), "test") - - # expression assigned - self.assertEqual(extract_constant(f1.func_code,'y', -1), -1) - - # recognized name, not assigned - self.assertEqual(extract_constant(f1.func_code,'z', -1), None) - - - def testFindModule(self): - self.assertRaises(ImportError, find_module, 'no-such.-thing') - self.assertRaises(ImportError, find_module, 'setuptools.non-existent') - f,p,i = find_module('setuptools.tests'); f.close() - - def testModuleExtract(self): - from distutils import __version__ - self.assertEqual( - get_module_constant('distutils','__version__'), __version__ - ) - self.assertEqual( - get_module_constant('sys','version'), sys.version - ) - self.assertEqual( - get_module_constant('setuptools.tests','__doc__'),__doc__ - ) - - def testRequire(self): - - req = Require('Distutils','1.0.3','distutils') - - self.assertEqual(req.name, 'Distutils') - self.assertEqual(req.module, 'distutils') - self.assertEqual(req.requested_version, '1.0.3') - self.assertEqual(req.attribute, '__version__') - self.assertEqual(req.full_name(), 'Distutils-1.0.3') - - from distutils import __version__ - self.assertEqual(req.get_version(), __version__) - self.failUnless(req.version_ok('1.0.9')) - self.failIf(req.version_ok('0.9.1')) - self.failIf(req.version_ok('unknown')) - - self.failUnless(req.is_present()) - self.failUnless(req.is_current()) - - req = Require('Distutils 3000','03000','distutils',format=LooseVersion) - self.failUnless(req.is_present()) - self.failIf(req.is_current()) - self.failIf(req.version_ok('unknown')) - - req = Require('Do-what-I-mean','1.0','d-w-i-m') - self.failIf(req.is_present()) - self.failIf(req.is_current()) - - req = Require('Tests', None, 'tests', homepage="http://example.com") - self.assertEqual(req.format, None) - self.assertEqual(req.attribute, None) - self.assertEqual(req.requested_version, None) - self.assertEqual(req.full_name(), 'Tests') - self.assertEqual(req.homepage, 'http://example.com') - - paths = [os.path.dirname(p) for p in __path__] - self.failUnless(req.is_present(paths)) - self.failUnless(req.is_current(paths)) - - - -class DistroTests(TestCase): - - def setUp(self): - self.e1 = Extension('bar.ext',['bar.c']) - self.e2 = Extension('c.y', ['y.c']) - - self.dist = makeSetup( - packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, - ) - - - def testDistroType(self): - self.failUnless(isinstance(self.dist,setuptools.dist.Distribution)) - - - def testExcludePackage(self): - self.dist.exclude_package('a') - self.assertEqual(self.dist.packages, ['b','c']) - - self.dist.exclude_package('b') - self.assertEqual(self.dist.packages, ['c']) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) - - self.dist.exclude_package('c') - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - # test removals from unspecified options - makeSetup().exclude_package('x') - - - - - - - - def testIncludeExclude(self): - # remove an extension - self.dist.exclude(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2]) - - # add it back in - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - # should not add duplicate - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - def testEmpty(self): - dist = makeSetup() - dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - dist = makeSetup() - dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - - def testContents(self): - self.failUnless(self.dist.has_contents_for('a')) - self.dist.exclude_package('a') - self.failIf(self.dist.has_contents_for('a')) - - self.failUnless(self.dist.has_contents_for('b')) - self.dist.exclude_package('b') - self.failIf(self.dist.has_contents_for('b')) - - self.failUnless(self.dist.has_contents_for('c')) - self.dist.exclude_package('c') - self.failIf(self.dist.has_contents_for('c')) - - - - - def testInvalidIncludeExclude(self): - self.assertRaises(DistutilsSetupError, - self.dist.include, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, ext_modules={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, ext_modules={'x':'y'} - ) - - self.assertRaises(DistutilsSetupError, - self.dist.include, package_dir=['q'] - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, package_dir=['q'] - ) - - - - - - - - - - - - - - - -class FeatureTests(TestCase): - - def setUp(self): - self.req = Require('Distutils','1.0.3','distutils') - self.dist = makeSetup( - features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], - py_modules=['bar_et'], remove=['bar.ext'], - ), - 'baz': Feature( - "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] - ), - 'dwim': Feature("DWIM", available=False, remove='bazish'), - }, - script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] - ) - - def testDefaults(self): - self.failIf( - Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - ) - self.failUnless( - Feature("test",standard=True,remove='x').include_by_default() - ) - # Feature must have either kwargs, removes, or require_features - self.assertRaises(DistutilsSetupError, Feature, "test") - - def testAvailability(self): - self.assertRaises( - DistutilsPlatformError, - self.dist.features['dwim'].include_in, self.dist - ) - - def testFeatureOptions(self): - dist = self.dist - self.failUnless( - ('with-dwim',None,'include DWIM') in dist.feature_options - ) - self.failUnless( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options - ) - self.failUnless( - ('with-bar',None,'include bar (default)') in dist.feature_options - ) - self.failUnless( - ('without-bar',None,'exclude bar') in dist.feature_options - ) - self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') - self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') - self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') - self.failIf('without-baz' in dist.feature_negopt) - - def testUseFeatures(self): - dist = self.dist - self.assertEqual(dist.with_foo,1) - self.assertEqual(dist.with_bar,0) - self.assertEqual(dist.with_baz,1) - self.failIf('bar_et' in dist.py_modules) - self.failIf('pkg.bar' in dist.packages) - self.failUnless('pkg.baz' in dist.packages) - self.failUnless('scripts/baz_it' in dist.scripts) - self.failUnless(('libfoo','foo/foofoo.c') in dist.libraries) - self.assertEqual(dist.ext_modules,[]) - self.assertEqual(dist.require_features, [self.req]) - - # If we ask for bar, it should fail because we explicitly disabled - # it on the command line - self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') - - def testFeatureWithInvalidRemove(self): - self.assertRaises( - SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} - ) - -class TestCommandTests(TestCase): - - def testTestIsCommand(self): - test_cmd = makeSetup().get_command_obj('test') - self.failUnless(isinstance(test_cmd, distutils.cmd.Command)) - - def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) - ts1 = ts1.get_command_obj('test') - ts1.ensure_finalized() - self.assertEqual(ts1.test_suite, 'foo.tests.suite') - - def testDefaultSuite(self): - ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') - ts2.ensure_finalized() - self.assertEqual(ts2.test_suite, 'bar.tests.suite') - - def testDefaultWModuleOnCmdLine(self): - ts3 = makeSetup( - test_suite='bar.tests', - script_args=['test','-m','foo.tests'] - ).get_command_obj('test') - ts3.ensure_finalized() - self.assertEqual(ts3.test_module, 'foo.tests') - self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') - - def testConflictingOptions(self): - ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] - ).get_command_obj('test') - self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) - - def testNoSuite(self): - ts5 = makeSetup().get_command_obj('test') - ts5.ensure_finalized() - self.assertEqual(ts5.test_suite, None) diff --git a/Lib/setuptools/tests/api_tests.txt b/Lib/setuptools/tests/api_tests.txt deleted file mode 100755 index 735ad8d..0000000 --- a/Lib/setuptools/tests/api_tests.txt +++ /dev/null @@ -1,330 +0,0 @@ -Pluggable Distributions of Python Software -========================================== - -Distributions -------------- - -A "Distribution" is a collection of files that represent a "Release" of a -"Project" as of a particular point in time, denoted by a -"Version":: - - >>> import sys, pkg_resources - >>> from pkg_resources import Distribution - >>> Distribution(project_name="Foo", version="1.2") - Foo 1.2 - -Distributions have a location, which can be a filename, URL, or really anything -else you care to use:: - - >>> dist = Distribution( - ... location="http://example.com/something", - ... project_name="Bar", version="0.9" - ... ) - - >>> dist - Bar 0.9 (http://example.com/something) - - -Distributions have various introspectable attributes:: - - >>> dist.location - 'http://example.com/something' - - >>> dist.project_name - 'Bar' - - >>> dist.version - '0.9' - - >>> dist.py_version == sys.version[:3] - True - - >>> print dist.platform - None - -Including various computed attributes:: - - >>> from pkg_resources import parse_version - >>> dist.parsed_version == parse_version(dist.version) - True - - >>> dist.key # case-insensitive form of the project name - 'bar' - -Distributions are compared (and hashed) by version first:: - - >>> Distribution(version='1.0') == Distribution(version='1.0') - True - >>> Distribution(version='1.0') == Distribution(version='1.1') - False - >>> Distribution(version='1.0') < Distribution(version='1.1') - True - -but also by project name (case-insensitive), platform, Python version, -location, etc.:: - - >>> Distribution(project_name="Foo",version="1.0") == \ - ... Distribution(project_name="Foo",version="1.0") - True - - >>> Distribution(project_name="Foo",version="1.0") == \ - ... Distribution(project_name="foo",version="1.0") - True - - >>> Distribution(project_name="Foo",version="1.0") == \ - ... Distribution(project_name="Foo",version="1.1") - False - - >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \ - ... Distribution(project_name="Foo",py_version="2.4",version="1.0") - False - - >>> Distribution(location="spam",version="1.0") == \ - ... Distribution(location="spam",version="1.0") - True - - >>> Distribution(location="spam",version="1.0") == \ - ... Distribution(location="baz",version="1.0") - False - - - -Hash and compare distribution by prio/plat - -Get version from metadata -provider capabilities -egg_name() -as_requirement() -from_location, from_filename (w/path normalization) - -Releases may have zero or more "Requirements", which indicate -what releases of another project the release requires in order to -function. A Requirement names the other project, expresses some criteria -as to what releases of that project are acceptable, and lists any "Extras" -that the requiring release may need from that project. (An Extra is an -optional feature of a Release, that can only be used if its additional -Requirements are satisfied.) - - - -The Working Set ---------------- - -A collection of active distributions is called a Working Set. Note that a -Working Set can contain any importable distribution, not just pluggable ones. -For example, the Python standard library is an importable distribution that -will usually be part of the Working Set, even though it is not pluggable. -Similarly, when you are doing development work on a project, the files you are -editing are also a Distribution. (And, with a little attention to the -directory names used, and including some additional metadata, such a -"development distribution" can be made pluggable as well.) - - >>> from pkg_resources import WorkingSet - -A working set's entries are the sys.path entries that correspond to the active -distributions. By default, the working set's entries are the items on -``sys.path``:: - - >>> ws = WorkingSet() - >>> ws.entries == sys.path - True - -But you can also create an empty working set explicitly, and add distributions -to it:: - - >>> ws = WorkingSet([]) - >>> ws.add(dist) - >>> ws.entries - ['http://example.com/something'] - >>> dist in ws - True - >>> Distribution('foo',version="") in ws - False - -And you can iterate over its distributions:: - - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -Adding the same distribution more than once is a no-op:: - - >>> ws.add(dist) - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -For that matter, adding multiple distributions for the same project also does -nothing, because a working set can only hold one active distribution per -project -- the first one added to it:: - - >>> ws.add( - ... Distribution( - ... 'http://example.com/something', project_name="Bar", - ... version="7.2" - ... ) - ... ) - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -You can append a path entry to a working set using ``add_entry()``:: - - >>> ws.entries - ['http://example.com/something'] - >>> ws.add_entry(pkg_resources.__file__) - >>> ws.entries - ['http://example.com/something', '...pkg_resources.py...'] - -Multiple additions result in multiple entries, even if the entry is already in -the working set (because ``sys.path`` can contain the same entry more than -once):: - - >>> ws.add_entry(pkg_resources.__file__) - >>> ws.entries - ['...example.com...', '...pkg_resources...', '...pkg_resources...'] - -And you can specify the path entry a distribution was found under, using the -optional second parameter to ``add()``:: - - >>> ws = WorkingSet([]) - >>> ws.add(dist,"foo") - >>> ws.entries - ['foo'] - -But even if a distribution is found under multiple path entries, it still only -shows up once when iterating the working set: - - >>> ws.add_entry(ws.entries[0]) - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -You can ask a WorkingSet to ``find()`` a distribution matching a requirement:: - - >>> from pkg_resources import Requirement - >>> print ws.find(Requirement.parse("Foo==1.0")) # no match, return None - None - - >>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution - Bar 0.9 (http://example.com/something) - -Note that asking for a conflicting version of a distribution already in a -working set triggers a ``pkg_resources.VersionConflict`` error: - - >>> ws.find(Requirement.parse("Bar==1.0")) # doctest: +NORMALIZE_WHITESPACE - Traceback (most recent call last): - ... - VersionConflict: (Bar 0.9 (http://example.com/something), - Requirement.parse('Bar==1.0')) - -You can subscribe a callback function to receive notifications whenever a new -distribution is added to a working set. The callback is immediately invoked -once for each existing distribution in the working set, and then is called -again for new distributions added thereafter:: - - >>> def added(dist): print "Added", dist - >>> ws.subscribe(added) - Added Bar 0.9 - >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12") - >>> ws.add(foo12) - Added Foo 1.2 - -Note, however, that only the first distribution added for a given project name -will trigger a callback, even during the initial ``subscribe()`` callback:: - - >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14") - >>> ws.add(foo14) # no callback, because Foo 1.2 is already active - - >>> ws = WorkingSet([]) - >>> ws.add(foo12) - >>> ws.add(foo14) - >>> ws.subscribe(added) - Added Foo 1.2 - -And adding a callback more than once has no effect, either:: - - >>> ws.subscribe(added) # no callbacks - - # and no double-callbacks on subsequent additions, either - >>> just_a_test = Distribution(project_name="JustATest", version="0.99") - >>> ws.add(just_a_test) - Added JustATest 0.99 - - -Finding Plugins ---------------- - -``WorkingSet`` objects can be used to figure out what plugins in an -``Environment`` can be loaded without any resolution errors:: - - >>> from pkg_resources import Environment - - >>> plugins = Environment([]) # normally, a list of plugin directories - >>> plugins.add(foo12) - >>> plugins.add(foo14) - >>> plugins.add(just_a_test) - -In the simplest case, we just get the newest version of each distribution in -the plugin environment:: - - >>> ws = WorkingSet([]) - >>> ws.find_plugins(plugins) - ([JustATest 0.99, Foo 1.4 (f14)], {}) - -But if there's a problem with a version conflict or missing requirements, the -method falls back to older versions, and the error info dict will contain an -exception instance for each unloadable plugin:: - - >>> ws.add(foo12) # this will conflict with Foo 1.4 - >>> ws.find_plugins(plugins) - ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)}) - -But if you disallow fallbacks, the failed plugin will be skipped instead of -trying older versions:: - - >>> ws.find_plugins(plugins, fallback=False) - ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)}) - - - -Platform Compatibility Rules ----------------------------- - -On the Mac, there are potential compatibility issues for modules compiled -on newer versions of Mac OS X than what the user is running. Additionally, -Mac OS X will soon have two platforms to contend with: Intel and PowerPC. - -Basic equality works as on other platforms:: - - >>> from pkg_resources import compatible_platforms as cp - >>> reqd = 'macosx-10.4-ppc' - >>> cp(reqd, reqd) - True - >>> cp("win32", reqd) - False - -Distributions made on other machine types are not compatible:: - - >>> cp("macosx-10.4-i386", reqd) - False - -Distributions made on earlier versions of the OS are compatible, as -long as they are from the same top-level version. The patchlevel version -number does not matter:: - - >>> cp("macosx-10.4-ppc", reqd) - True - >>> cp("macosx-10.3-ppc", reqd) - True - >>> cp("macosx-10.5-ppc", reqd) - False - >>> cp("macosx-9.5-ppc", reqd) - False - -Backwards compatibility for packages made via earlier versions of -setuptools is provided as well:: - - >>> cp("darwin-8.2.0-Power_Macintosh", reqd) - True - >>> cp("darwin-7.2.0-Power_Macintosh", reqd) - True - >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc") - False - diff --git a/Lib/setuptools/tests/test_resources.py b/Lib/setuptools/tests/test_resources.py deleted file mode 100644 index f32c72e..0000000 --- a/Lib/setuptools/tests/test_resources.py +++ /dev/null @@ -1,483 +0,0 @@ -from unittest import TestCase, makeSuite -from pkg_resources import * -import pkg_resources, sys -from sets import ImmutableSet - -class Metadata(EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self,*pairs): - self.metadata = dict(pairs) - - def has_metadata(self,name): - return name in self.metadata - - def get_metadata(self,name): - return self.metadata[name] - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - - -class DistroTests(TestCase): - - def testCollection(self): - # empty path should produce no distributions - ad = Environment([], platform=None, python=None) - self.assertEqual(list(ad), []) - self.assertEqual(ad['FooPkg'],[]) - - ad.add(Distribution.from_filename("FooPkg-1.3_1.egg")) - ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")) - ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - self.failUnless(ad['FooPkg']) - - # But only 1 package - self.assertEqual(list(ad), ['foopkg']) - - - - # Distributions sort by version - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] - ) - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] - ) - # And inserting adds them in order - ad.add(Distribution.from_filename("FooPkg-1.9.egg")) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] - ) - - ws = WorkingSet([]) - foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg") - foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg") - req, = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - self.assertEqual(ad.best_match(req,ws).version, '1.9') - # If a matching distro is already installed, should return only that - ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4') - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]); ws.add(foo12); ws.add(foo14) - self.assertRaises(VersionConflict, ad.best_match, req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14); - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - def checkFooPkg(self,d): - self.assertEqual(d.project_name, "FooPkg") - self.assertEqual(d.key, "foopkg") - self.assertEqual(d.version, "1.3-1") - self.assertEqual(d.py_version, "2.4") - self.assertEqual(d.platform, "win32") - self.assertEqual(d.parsed_version, parse_version("1.3-1")) - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - self.assertEqual(d.py_version, sys.version[:3]) - self.assertEqual(d.platform, None) - - def testDistroParse(self): - d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg") - self.checkFooPkg(d) - d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", - metadata = Metadata( - ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") - ) - ) - self.checkFooPkg(d) - - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - self.assertEqual( - list(dist.requires(extras)), - list(parse_requirements(txt)) - ) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - - def testResolve(self): - ad = Environment([]); ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - self.assertEqual( list(ws.resolve([],ad)), [] ) - # Request something not in the collection -> DistributionNotFound - self.assertRaises( - DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad - ) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) - ) - ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - self.assertEqual(targets, [Foo]) - map(ws.add,targets) - self.assertRaises(VersionConflict, ws.resolve, - parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - self.assertRaises( - DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad - ) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - self.assertEqual( - list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] - ) - # Requests for conflicting versions produce VersionConflict - self.assertRaises( VersionConflict, - ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad - ) - - def testDistroDependsOptions(self): - d = self.distRequires(""" - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""") - self.checkRequires(d,"Twisted>=1.5") - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen","fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"] - ) - self.assertRaises(UnknownExtra, d.requires, ["foo"]) - - - - - - - - - - - - - - - - - -class EntryPointTests(TestCase): - - def assertfields(self, ep): - self.assertEqual(ep.name,"foo") - self.assertEqual(ep.module_name,"setuptools.tests.test_resources") - self.assertEqual(ep.attrs, ("EntryPointTests",)) - self.assertEqual(ep.extras, ("x",)) - self.failUnless(ep.load() is EntryPointTests) - self.assertEqual( - str(ep), - "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ) - - def setUp(self): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) - - def testBasics(self): - ep = EntryPoint( - "foo", "setuptools.tests.test_resources", ["EntryPointTests"], - ["x"], self.dist - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - self.assertEqual(ep.name,"bar baz") - self.assertEqual(ep.module_name,"spammity") - self.assertEqual(ep.attrs, ()) - self.assertEqual(ep.extras, ("ping",)) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - self.assertEqual(ep.name,"fizzly") - self.assertEqual(ep.module_name,"wocka") - self.assertEqual(ep.attrs, ("foo",)) - self.assertEqual(ep.extras, ()) - - def testRejects(self): - for ep in [ - "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", - ]: - try: EntryPoint.parse(ep) - except ValueError: pass - else: raise AssertionError("Should've been bad", ep) - - def checkSubMap(self, m): - self.assertEqual(str(m), - "{" - "'feature2': EntryPoint.parse(" - "'feature2 = another.module:SomeClass [extra1,extra2]'), " - "'feature1': EntryPoint.parse(" - "'feature1 = somemodule:somefunction')" - "}" - ) - - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") - self.assertRaises(ValueError, EntryPoint.parse_group, "x", - ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz':self.submap_str}) - self.checkSubMap(m['xyz']) - self.assertEqual(m.keys(),['xyz']) - m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) - self.checkSubMap(m['xyz']) - self.assertEqual(m.keys(),['xyz']) - self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) - self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) - - -class RequirementsTests(TestCase): - - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - self.assertEqual(str(r),"Twisted>=1.2") - self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") - self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) - self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) - self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) - - def testOrdering(self): - r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) - r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) - self.assertEqual(r1,r2) - self.assertEqual(str(r1),str(r2)) - self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") - - def testBasicContains(self): - r = Requirement("Twisted", [('>=','1.2')], ()) - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - self.failUnless(parse_version('1.2') in r) - self.failUnless(parse_version('1.1') not in r) - self.failUnless('1.2' in r) - self.failUnless('1.1' not in r) - self.failUnless(foo_dist not in r) - self.failUnless(twist11 not in r) - self.failUnless(twist12 in r) - - def testAdvancedContains(self): - r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") - for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): - self.failUnless(v in r, (v,r)) - for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): - self.failUnless(v not in r, (v,r)) - - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") - self.assertEqual(r1,r2) - self.assertEqual(r1,r3) - self.assertEqual(r1.extras, ("foo","bar")) - self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized - self.assertEqual(hash(r1), hash(r2)) - self.assertEqual( - hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), - ImmutableSet(["foo","bar"]))) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("setuptools==0.3a2") - r2 = Requirement.parse("setuptools!=0.3a4") - d = Distribution.from_filename - - self.failIf(d("setuptools-0.3a4.egg") in r1) - self.failIf(d("setuptools-0.3a1.egg") in r1) - self.failIf(d("setuptools-0.3a4.egg") in r2) - - self.failUnless(d("setuptools-0.3a2.egg") in r1) - self.failUnless(d("setuptools-0.3a2.egg") in r2) - self.failUnless(d("setuptools-0.3a3.egg") in r2) - self.failUnless(d("setuptools-0.3a5.egg") in r2) - - - - - - - - - - - - - - -class ParseTests(TestCase): - - def testEmptyParse(self): - self.assertEqual(list(parse_requirements('')), []) - - def testYielding(self): - for inp,out in [ - ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), - (['x\n\n','y'], ['x','y']), - ]: - self.assertEqual(list(pkg_resources.yield_lines(inp)),out) - - def testSplitting(self): - self.assertEqual( - list( - pkg_resources.split_sections(""" - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - ) - ), - [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] - ) - self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) - - def testSafeName(self): - self.assertEqual(safe_name("adns-python"), "adns-python") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") - self.assertNotEqual(safe_name("peak.web"), "peak-web") - - def testSafeVersion(self): - self.assertEqual(safe_version("1.2-1"), "1.2-1") - self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") - self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") - self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") - self.assertEqual(safe_version("peak.web"), "peak.web") - - def testSimpleRequirements(self): - self.assertEqual( - list(parse_requirements('Twis-Ted>=1.2-1')), - [Requirement('Twis-Ted',[('>=','1.2-1')], ())] - ) - self.assertEqual( - list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), - [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] - ) - self.assertEqual( - Requirement.parse("FooBar==1.99a3"), - Requirement("FooBar", [('==','1.99a3')], ()) - ) - self.assertRaises(ValueError,Requirement.parse,">=2.3") - self.assertRaises(ValueError,Requirement.parse,"x\\") - self.assertRaises(ValueError,Requirement.parse,"x==2 q") - self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") - self.assertRaises(ValueError,Requirement.parse,"#") - - def testVersionEquality(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertEqual(p1,p2, (s1,s2,p1,p2)) - - c('1.2-rc1', '1.2rc1') - c('0.4', '0.4.0') - c('0.4.0.0', '0.4.0') - c('0.4.0-0', '0.4-0') - c('0pl1', '0.0pl1') - c('0pre1', '0.0c1') - c('0.0.0preview1', '0c1') - c('0.0c1', '0-rc1') - c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a') - - def testVersionOrdering(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.failUnless(p1: big-endian, std. size & alignment + !: same as > + +The remaining chars indicate types of args and must match exactly; +these can be preceded by a decimal repeat count: + x: pad byte (no data); c:char; b:signed byte; B:unsigned byte; + h:short; H:unsigned short; i:int; I:unsigned int; + l:long; L:unsigned long; f:float; d:double. +Special cases (preceding decimal count indicates length): + s:string (array of char); p: pascal string (with count byte). +Special case (only available in native format): + P:an integer type that is wide enough to hold a pointer. +Special case (not in native mode unless 'long long' in platform C): + q:long long; Q:unsigned long long +Whitespace between formats is ignored. + +The variable struct.error is an exception raised on errors. +""" +__version__ = '0.1' + +from _struct import Struct, error + +_MAXCACHE = 100 +_cache = {} + +def _compile(fmt): + # Internal: compile struct pattern + if len(_cache) >= _MAXCACHE: + _cache.clear() + s = Struct(fmt) + _cache[fmt] = s + return s + +def calcsize(fmt): + """ + Return size of C struct described by format string fmt. + See struct.__doc__ for more on format strings. + """ + try: + o = _cache[fmt] + except KeyError: + o = _compile(fmt) + return o.size + +def pack(fmt, *args): + """ + Return string containing values v1, v2, ... packed according to fmt. + See struct.__doc__ for more on format strings. + """ + try: + o = _cache[fmt] + except KeyError: + o = _compile(fmt) + return o.pack(*args) + +def pack_to(fmt, buf, offset, *args): + """ + Pack the values v2, v2, ... according to fmt, write + the packed bytes into the writable buffer buf starting at offset. + See struct.__doc__ for more on format strings. + """ + try: + o = _cache[fmt] + except KeyError: + o = _compile(fmt) + return o.pack_to(buf, offset, *args) + +def unpack(fmt, s): + """ + Unpack the string, containing packed C structure data, according + to fmt. Requires len(string)==calcsize(fmt). + See struct.__doc__ for more on format strings. + """ + try: + o = _cache[fmt] + except KeyError: + o = _compile(fmt) + return o.unpack(s) + +def unpack_from(fmt, buf, offset=0): + """ + Unpack the buffer, containing packed C structure data, according to + fmt starting at offset. Requires len(buffer[offset:]) >= calcsize(fmt). + See struct.__doc__ for more on format strings. + """ + try: + o = _cache[fmt] + except KeyError: + o = _compile(fmt) + return o.unpack_from(buf, offset) diff --git a/Lib/subprocess.py b/Lib/subprocess.py index 3cd0df5..19fe847 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -872,7 +872,7 @@ class Popen(object): # object do the translation: It is based on stdio, which is # impossible to combine with select (unless forcing no # buffering). - if self.universal_newlines and hasattr(open, 'newlines'): + if self.universal_newlines and hasattr(file, 'newlines'): if stdout: stdout = self._translate_newlines(stdout) if stderr: @@ -1141,7 +1141,7 @@ class Popen(object): # object do the translation: It is based on stdio, which is # impossible to combine with select (unless forcing no # buffering). - if self.universal_newlines and hasattr(open, 'newlines'): + if self.universal_newlines and hasattr(file, 'newlines'): if stdout: stdout = self._translate_newlines(stdout) if stderr: diff --git a/Lib/tarfile.py b/Lib/tarfile.py index 0b3d477..061d0f5 100644 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -33,7 +33,7 @@ __version__ = "$Revision$" # $Source$ -version = "0.6.4" +version = "0.8.0" __author__ = "Lars Gustäbel (lars@gustaebel.de)" __date__ = "$Date$" __cvsid__ = "$Id$" @@ -132,21 +132,65 @@ TOEXEC = 0001 # execute/search by other #--------------------------------------------------------- # Some useful functions #--------------------------------------------------------- -def nts(s): - """Convert a null-terminated string buffer to a python string. + +def stn(s, length): + """Convert a python string to a null-terminated string buffer. """ - return s.rstrip(NUL) + return s[:length-1] + (length - len(s) - 1) * NUL + NUL -def calc_chksum(buf): - """Calculate the checksum for a member's header. It's a simple addition - of all bytes, treating the chksum field as if filled with spaces. - buf is a 512 byte long string buffer which holds the header. +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0200): + n = int(s.rstrip(NUL) or "0", 8) + else: + n = 0L + for i in xrange(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, posix=False): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = "%0*o" % (digits - 1, n) + NUL + else: + if posix: + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = "" + for i in xrange(digits - 1): + s = chr(n & 0377) + s + n >>= 8 + s = chr(0200) + s + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. """ - chk = 256 # chksum field is treated as blanks, - # so the initial value is 8 * ord(" ") - for c in buf[:148]: chk += ord(c) # sum up all bytes before chksum - for c in buf[156:]: chk += ord(c) # sum up all bytes after chksum - return chk + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum def copyfileobj(src, dst, length=None): """Copy length bytes from fileobj src to fileobj dst. @@ -163,13 +207,13 @@ def copyfileobj(src, dst, length=None): for b in xrange(blocks): buf = src.read(BUFSIZE) if len(buf) < BUFSIZE: - raise IOError, "end of file reached" + raise IOError("end of file reached") dst.write(buf) if remainder != 0: buf = src.read(remainder) if len(buf) < remainder: - raise IOError, "end of file reached" + raise IOError("end of file reached") dst.write(buf) return @@ -301,7 +345,7 @@ class _Stream: try: import zlib except ImportError: - raise CompressionError, "zlib module is not available" + raise CompressionError("zlib module is not available") self.zlib = zlib self.crc = zlib.crc32("") if mode == "r": @@ -313,7 +357,7 @@ class _Stream: try: import bz2 except ImportError: - raise CompressionError, "bz2 module is not available" + raise CompressionError("bz2 module is not available") if mode == "r": self.dbuf = "" self.cmp = bz2.BZ2Decompressor() @@ -389,9 +433,9 @@ class _Stream: # taken from gzip.GzipFile with some alterations if self.__read(2) != "\037\213": - raise ReadError, "not a gzip file" + raise ReadError("not a gzip file") if self.__read(1) != "\010": - raise CompressionError, "unsupported compression method" + raise CompressionError("unsupported compression method") flag = ord(self.__read(1)) self.__read(6) @@ -427,7 +471,7 @@ class _Stream: self.read(self.bufsize) self.read(remainder) else: - raise StreamError, "seeking backwards is not allowed" + raise StreamError("seeking backwards is not allowed") return self.pos def read(self, size=None): @@ -508,6 +552,69 @@ class _StreamProxy(object): self.fileobj.close() # class StreamProxy +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = "" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + b = [self.buf] + x = len(self.buf) + while x < size: + try: + raw = self.fileobj.read(self.blocksize) + data = self.bz2obj.decompress(raw) + b.append(data) + except EOFError: + break + x += len(data) + self.buf = "".join(b) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) + self.fileobj.close() +# class _BZ2Proxy + #------------------------ # Extraction file object #------------------------ @@ -581,7 +688,7 @@ class ExFileObject(object): """Read operation for regular files. """ if self.closed: - raise ValueError, "file is closed" + raise ValueError("file is closed") self.fileobj.seek(self.offset + self.pos) bytesleft = self.size - self.pos if size is None: @@ -595,7 +702,7 @@ class ExFileObject(object): """Read operation for sparse files. """ if self.closed: - raise ValueError, "file is closed" + raise ValueError("file is closed") if size is None: size = self.size - self.pos @@ -684,24 +791,24 @@ class TarInfo(object): of the member. """ - self.name = name # member name (dirnames must end with '/') - self.mode = 0666 # file permissions - self.uid = 0 # user id - self.gid = 0 # group id - self.size = 0 # file size - self.mtime = 0 # modification time - self.chksum = 0 # header checksum - self.type = REGTYPE # member type - self.linkname = "" # link name - self.uname = "user" # user name - self.gname = "group" # group name - self.devmajor = 0 #- - self.devminor = 0 #-for use with CHRTYPE and BLKTYPE - self.prefix = "" # prefix to filename or holding information - # about sparse files - - self.offset = 0 # the tar header starts here - self.offset_data = 0 # the file's data starts here + self.name = name # member name (dirnames must end with '/') + self.mode = 0666 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "user" # user name + self.gname = "group" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + self.prefix = "" # prefix to filename or information + # about sparse files + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here def __repr__(self): return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) @@ -710,95 +817,57 @@ class TarInfo(object): def frombuf(cls, buf): """Construct a TarInfo object from a 512 byte string buffer. """ + if len(buf) != BLOCKSIZE: + raise ValueError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise ValueError("empty header") + tarinfo = cls() - tarinfo.name = nts(buf[0:100]) - tarinfo.mode = int(buf[100:108], 8) - tarinfo.uid = int(buf[108:116],8) - tarinfo.gid = int(buf[116:124],8) - - # There are two possible codings for the size field we - # have to discriminate, see comment in tobuf() below. - if buf[124] != chr(0200): - tarinfo.size = long(buf[124:136], 8) - else: - tarinfo.size = 0L - for i in range(11): - tarinfo.size <<= 8 - tarinfo.size += ord(buf[125 + i]) - - tarinfo.mtime = long(buf[136:148], 8) - tarinfo.chksum = int(buf[148:156], 8) - tarinfo.type = buf[156:157] - tarinfo.linkname = nts(buf[157:257]) - tarinfo.uname = nts(buf[265:297]) - tarinfo.gname = nts(buf[297:329]) - try: - tarinfo.devmajor = int(buf[329:337], 8) - tarinfo.devminor = int(buf[337:345], 8) - except ValueError: - tarinfo.devmajor = tarinfo.devmajor = 0 + tarinfo.buf = buf + tarinfo.name = buf[0:100].rstrip(NUL) + tarinfo.mode = nti(buf[100:108]) + tarinfo.uid = nti(buf[108:116]) + tarinfo.gid = nti(buf[116:124]) + tarinfo.size = nti(buf[124:136]) + tarinfo.mtime = nti(buf[136:148]) + tarinfo.chksum = nti(buf[148:156]) + tarinfo.type = buf[156:157] + tarinfo.linkname = buf[157:257].rstrip(NUL) + tarinfo.uname = buf[265:297].rstrip(NUL) + tarinfo.gname = buf[297:329].rstrip(NUL) + tarinfo.devmajor = nti(buf[329:337]) + tarinfo.devminor = nti(buf[337:345]) tarinfo.prefix = buf[345:500] - # Some old tar programs represent a directory as a regular - # file with a trailing slash. - if tarinfo.isreg() and tarinfo.name.endswith("/"): - tarinfo.type = DIRTYPE - - # The prefix field is used for filenames > 100 in - # the POSIX standard. - # name = prefix + '/' + name - if tarinfo.type != GNUTYPE_SPARSE: - tarinfo.name = normpath(os.path.join(nts(tarinfo.prefix), tarinfo.name)) - - # Directory names should have a '/' at the end. - if tarinfo.isdir(): - tarinfo.name += "/" + if tarinfo.chksum not in calc_chksums(buf): + raise ValueError("invalid header") return tarinfo - def tobuf(self): + def tobuf(self, posix=False): """Return a tar header block as a 512 byte string. """ - # Prefer the size to be encoded as 11 octal ascii digits - # which is the most portable. If the size exceeds this - # limit (>= 8 GB), encode it as an 88-bit value which is - # a GNU tar feature. - if self.size <= MAXSIZE_MEMBER: - size = "%011o" % self.size - else: - s = self.size - size = "" - for i in range(11): - size = chr(s & 0377) + size - s >>= 8 - size = chr(0200) + size - - # The following code was contributed by Detlef Lannert. - parts = [] - for value, fieldsize in ( - (self.name, 100), - ("%07o" % (self.mode & 07777), 8), - ("%07o" % self.uid, 8), - ("%07o" % self.gid, 8), - (size, 12), - ("%011o" % self.mtime, 12), - (" ", 8), - (self.type, 1), - (self.linkname, 100), - (MAGIC, 6), - (VERSION, 2), - (self.uname, 32), - (self.gname, 32), - ("%07o" % self.devmajor, 8), - ("%07o" % self.devminor, 8), - (self.prefix, 155) - ): - l = len(value) - parts.append(value[:fieldsize] + (fieldsize - l) * NUL) - - buf = "".join(parts) - chksum = calc_chksum(buf) + parts = [ + stn(self.name, 100), + itn(self.mode & 07777, 8, posix), + itn(self.uid, 8, posix), + itn(self.gid, 8, posix), + itn(self.size, 12, posix), + itn(self.mtime, 12, posix), + " ", # checksum field + self.type, + stn(self.linkname, 100), + stn(MAGIC, 6), + stn(VERSION, 2), + stn(self.uname, 32), + stn(self.gname, 32), + itn(self.devmajor, 8, posix), + itn(self.devminor, 8, posix), + stn(self.prefix, 155) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts)) + chksum = calc_chksums(buf)[0] buf = buf[:148] + "%06o\0" % chksum + buf[155:] - buf += (BLOCKSIZE - len(buf)) * NUL self.buf = buf return buf @@ -857,7 +926,7 @@ class TarFile(object): self.name = name if len(mode) > 1 or mode not in "raw": - raise ValueError, "mode must be 'r', 'a' or 'w'" + raise ValueError("mode must be 'r', 'a' or 'w'") self._mode = mode self.mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] @@ -873,12 +942,12 @@ class TarFile(object): self.fileobj = fileobj # Init datastructures - self.closed = False - self.members = [] # list of members as TarInfo objects - self._loaded = False # flag if all members have been read - self.offset = 0L # current position in the archive file - self.inodes = {} # dictionary caching the inodes of - # archive members already added + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = 0L # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added if self._mode == "r": self.firstmember = None @@ -937,7 +1006,7 @@ class TarFile(object): """ if not name and not fileobj: - raise ValueError, "nothing to open" + raise ValueError("nothing to open") if mode in ("r", "r:*"): # Find out which *open() is appropriate for opening the file. @@ -947,7 +1016,7 @@ class TarFile(object): return func(name, "r", fileobj) except (ReadError, CompressionError): continue - raise ReadError, "file could not be opened successfully" + raise ReadError("file could not be opened successfully") elif ":" in mode: filemode, comptype = mode.split(":", 1) @@ -959,7 +1028,7 @@ class TarFile(object): if comptype in cls.OPEN_METH: func = getattr(cls, cls.OPEN_METH[comptype]) else: - raise CompressionError, "unknown compression type %r" % comptype + raise CompressionError("unknown compression type %r" % comptype) return func(name, filemode, fileobj) elif "|" in mode: @@ -968,7 +1037,7 @@ class TarFile(object): comptype = comptype or "tar" if filemode not in "rw": - raise ValueError, "mode must be 'r' or 'w'" + raise ValueError("mode must be 'r' or 'w'") t = cls(name, filemode, _Stream(name, filemode, comptype, fileobj, bufsize)) @@ -978,14 +1047,14 @@ class TarFile(object): elif mode in "aw": return cls.taropen(name, mode, fileobj) - raise ValueError, "undiscernible mode" + raise ValueError("undiscernible mode") @classmethod def taropen(cls, name, mode="r", fileobj=None): """Open uncompressed tar archive name for reading or writing. """ if len(mode) > 1 or mode not in "raw": - raise ValueError, "mode must be 'r', 'a' or 'w'" + raise ValueError("mode must be 'r', 'a' or 'w'") return cls(name, mode, fileobj) @classmethod @@ -994,13 +1063,13 @@ class TarFile(object): Appending is not allowed. """ if len(mode) > 1 or mode not in "rw": - raise ValueError, "mode must be 'r' or 'w'" + raise ValueError("mode must be 'r' or 'w'") try: import gzip gzip.GzipFile except (ImportError, AttributeError): - raise CompressionError, "gzip module is not available" + raise CompressionError("gzip module is not available") pre, ext = os.path.splitext(name) pre = os.path.basename(pre) @@ -1021,7 +1090,7 @@ class TarFile(object): gzip.GzipFile(name, mode, compresslevel, fileobj) ) except IOError: - raise ReadError, "not a gzip file" + raise ReadError("not a gzip file") t._extfileobj = False return t @@ -1031,12 +1100,12 @@ class TarFile(object): Appending is not allowed. """ if len(mode) > 1 or mode not in "rw": - raise ValueError, "mode must be 'r' or 'w'." + raise ValueError("mode must be 'r' or 'w'.") try: import bz2 except ImportError: - raise CompressionError, "bz2 module is not available" + raise CompressionError("bz2 module is not available") pre, ext = os.path.splitext(name) pre = os.path.basename(pre) @@ -1047,12 +1116,14 @@ class TarFile(object): tarname = pre + ext if fileobj is not None: - raise ValueError, "no support for external file objects" + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) try: - t = cls.taropen(tarname, mode, bz2.BZ2File(name, mode, compresslevel=compresslevel)) + t = cls.taropen(tarname, mode, fileobj) except IOError: - raise ReadError, "not a bzip2 file" + raise ReadError("not a bzip2 file") t._extfileobj = False return t @@ -1094,7 +1165,7 @@ class TarFile(object): """ tarinfo = self._getmember(name) if tarinfo is None: - raise KeyError, "filename %r not found" % name + raise KeyError("filename %r not found" % name) return tarinfo def getmembers(self): @@ -1313,15 +1384,14 @@ class TarFile(object): if tarinfo.size > MAXSIZE_MEMBER: if self.posix: - raise ValueError, "file is too large (>= 8 GB)" + raise ValueError("file is too large (>= 8 GB)") else: self._dbg(2, "tarfile: Created GNU tar largefile header") if len(tarinfo.linkname) > LENGTH_LINK: if self.posix: - raise ValueError, "linkname is too long (>%d)" \ - % (LENGTH_LINK) + raise ValueError("linkname is too long (>%d)" % (LENGTH_LINK)) else: self._create_gnulong(tarinfo.linkname, GNUTYPE_LONGLINK) tarinfo.linkname = tarinfo.linkname[:LENGTH_LINK -1] @@ -1337,8 +1407,7 @@ class TarFile(object): prefix = prefix[:-1] if not prefix or len(name) > LENGTH_NAME: - raise ValueError, "name is too long (>%d)" \ - % (LENGTH_NAME) + raise ValueError("name is too long (>%d)" % (LENGTH_NAME)) tarinfo.name = name tarinfo.prefix = prefix @@ -1347,7 +1416,7 @@ class TarFile(object): tarinfo.name = tarinfo.name[:LENGTH_NAME - 1] self._dbg(2, "tarfile: Created GNU tar extension LONGNAME") - self.fileobj.write(tarinfo.tobuf()) + self.fileobj.write(tarinfo.tobuf(self.posix)) self.offset += BLOCKSIZE # If there's data to follow, append it. @@ -1464,7 +1533,7 @@ class TarFile(object): # A small but ugly workaround for the case that someone tries # to extract a (sym)link as a file-object from a non-seekable # stream of tar blocks. - raise StreamError, "cannot extract (sym)link as file object" + raise StreamError("cannot extract (sym)link as file object") else: # A (sym)link's file object is its target's file object. return self.extractfile(self._getmember(tarinfo.linkname, @@ -1564,13 +1633,13 @@ class TarFile(object): if hasattr(os, "mkfifo"): os.mkfifo(targetpath) else: - raise ExtractError, "fifo not supported by system" + raise ExtractError("fifo not supported by system") def makedev(self, tarinfo, targetpath): """Make a character or block device called targetpath. """ if not hasattr(os, "mknod") or not hasattr(os, "makedev"): - raise ExtractError, "special devices not supported by system" + raise ExtractError("special devices not supported by system") mode = tarinfo.mode if tarinfo.isblk(): @@ -1606,7 +1675,7 @@ class TarFile(object): try: shutil.copy2(linkpath, targetpath) except EnvironmentError, e: - raise IOError, "link could not be created" + raise IOError("link could not be created") def chown(self, tarinfo, targetpath): """Set owner of targetpath according to tarinfo. @@ -1634,7 +1703,7 @@ class TarFile(object): if sys.platform != "os2emx": os.chown(targetpath, u, g) except EnvironmentError, e: - raise ExtractError, "could not change owner" + raise ExtractError("could not change owner") def chmod(self, tarinfo, targetpath): """Set file permissions of targetpath according to tarinfo. @@ -1643,7 +1712,7 @@ class TarFile(object): try: os.chmod(targetpath, tarinfo.mode) except EnvironmentError, e: - raise ExtractError, "could not change mode" + raise ExtractError("could not change mode") def utime(self, tarinfo, targetpath): """Set modification time of targetpath according to tarinfo. @@ -1657,10 +1726,9 @@ class TarFile(object): try: os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) except EnvironmentError, e: - raise ExtractError, "could not change modification time" + raise ExtractError("could not change modification time") #-------------------------------------------------------------------------- - def next(self): """Return the next member of the archive as a TarInfo object, when TarFile is opened for reading. Return None if there is no more @@ -1678,70 +1746,91 @@ class TarFile(object): buf = self.fileobj.read(BLOCKSIZE) if not buf: return None + try: tarinfo = TarInfo.frombuf(buf) - except ValueError: + + # We shouldn't rely on this checksum, because some tar programs + # calculate it differently and it is merely validating the + # header block. We could just as well skip this part, which would + # have a slight effect on performance... + if tarinfo.chksum not in calc_chksums(buf): + self._dbg(1, "tarfile: Bad Checksum %r" % tarinfo.name) + + # Set the TarInfo object's offset to the current position of the + # TarFile and set self.offset to the position where the data blocks + # should begin. + tarinfo.offset = self.offset + self.offset += BLOCKSIZE + + tarinfo = self.proc_member(tarinfo) + + except ValueError, e: if self.ignore_zeros: - if buf.count(NUL) == BLOCKSIZE: - adj = "empty" - else: - adj = "invalid" - self._dbg(2, "0x%X: %s block" % (self.offset, adj)) + self._dbg(2, "0x%X: empty or invalid block: %s" % + (self.offset, e)) self.offset += BLOCKSIZE continue else: - # Block is empty or unreadable. if self.offset == 0: - # If the first block is invalid. That does not - # look like a tar archive we can handle. - raise ReadError,"empty, unreadable or compressed file" + raise ReadError("empty, unreadable or compressed " + "file: %s" % e) return None break - # We shouldn't rely on this checksum, because some tar programs - # calculate it differently and it is merely validating the - # header block. We could just as well skip this part, which would - # have a slight effect on performance... - if tarinfo.chksum != calc_chksum(buf): - self._dbg(1, "tarfile: Bad Checksum %r" % tarinfo.name) - - # Set the TarInfo object's offset to the current position of the - # TarFile and set self.offset to the position where the data blocks - # should begin. - tarinfo.offset = self.offset - self.offset += BLOCKSIZE + # Some old tar programs represent a directory as a regular + # file with a trailing slash. + if tarinfo.isreg() and tarinfo.name.endswith("/"): + tarinfo.type = DIRTYPE - # Check if the TarInfo object has a typeflag for which a callback - # method is registered in the TYPE_METH. If so, then call it. - if tarinfo.type in self.TYPE_METH: - return self.TYPE_METH[tarinfo.type](self, tarinfo) + # The prefix field is used for filenames > 100 in + # the POSIX standard. + # name = prefix + '/' + name + tarinfo.name = normpath(os.path.join(tarinfo.prefix.rstrip(NUL), + tarinfo.name)) - tarinfo.offset_data = self.offset - if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES: - # Skip the following data blocks. - self.offset += self._block(tarinfo.size) + # Directory names should have a '/' at the end. + if tarinfo.isdir(): + tarinfo.name += "/" self.members.append(tarinfo) return tarinfo #-------------------------------------------------------------------------- - # Below are some methods which are called for special typeflags in the - # next() method, e.g. for unwrapping GNU longname/longlink blocks. They - # are registered in TYPE_METH below. You can register your own methods - # with this mapping. - # A registered method is called with a TarInfo object as only argument. - # - # During its execution the method MUST perform the following tasks: - # 1. set tarinfo.offset_data to the position where the data blocks begin, - # if there is data to follow. - # 2. set self.offset to the position where the next member's header will + # The following are methods that are called depending on the type of a + # member. The entry point is proc_member() which is called with a TarInfo + # object created from the header block from the current offset. The + # proc_member() method can be overridden in a subclass to add custom + # proc_*() methods. A proc_*() method MUST implement the following + # operations: + # 1. Set tarinfo.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set self.offset to the position where the next member's header will # begin. - # 3. append the tarinfo object to self.members, if it is supposed to appear - # as a member of the TarFile object. - # 4. return tarinfo or another valid TarInfo object. + # 3. Return tarinfo or another valid TarInfo object. + def proc_member(self, tarinfo): + """Choose the right processing method for tarinfo depending + on its type and call it. + """ + if tarinfo.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self.proc_gnulong(tarinfo) + elif tarinfo.type == GNUTYPE_SPARSE: + return self.proc_sparse(tarinfo) + else: + return self.proc_builtin(tarinfo) + + def proc_builtin(self, tarinfo): + """Process a builtin type member or an unknown member + which will be treated as a regular file. + """ + tarinfo.offset_data = self.offset + if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + self.offset += self._block(tarinfo.size) + return tarinfo def proc_gnulong(self, tarinfo): - """Evaluate the blocks that hold a GNU longname + """Process the blocks that hold a GNU longname or longlink member. """ buf = "" @@ -1752,21 +1841,27 @@ class TarFile(object): self.offset += BLOCKSIZE count -= BLOCKSIZE - # Fetch the next header - next = self.next() + # Fetch the next header and process it. + b = self.fileobj.read(BLOCKSIZE) + t = TarInfo.frombuf(b) + t.offset = self.offset + self.offset += BLOCKSIZE + next = self.proc_member(t) + # Patch the TarInfo object from the next header with + # the longname information. next.offset = tarinfo.offset if tarinfo.type == GNUTYPE_LONGNAME: - next.name = nts(buf) + next.name = buf.rstrip(NUL) elif tarinfo.type == GNUTYPE_LONGLINK: - next.linkname = nts(buf) + next.linkname = buf.rstrip(NUL) return next def proc_sparse(self, tarinfo): - """Analyze a GNU sparse header plus extra headers. + """Process a GNU sparse header plus extra headers. """ - buf = tarinfo.tobuf() + buf = tarinfo.buf sp = _ringbuffer() pos = 386 lastpos = 0L @@ -1775,8 +1870,8 @@ class TarFile(object): # first header. for i in xrange(4): try: - offset = int(buf[pos:pos + 12], 8) - numbytes = int(buf[pos + 12:pos + 24], 8) + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) except ValueError: break if offset > lastpos: @@ -1787,7 +1882,7 @@ class TarFile(object): pos += 24 isextended = ord(buf[482]) - origsize = int(buf[483:495], 8) + origsize = nti(buf[483:495]) # If the isextended flag is given, # there are extra headers to process. @@ -1797,8 +1892,8 @@ class TarFile(object): pos = 0 for i in xrange(21): try: - offset = int(buf[pos:pos + 12], 8) - numbytes = int(buf[pos + 12:pos + 24], 8) + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) except ValueError: break if offset > lastpos: @@ -1818,17 +1913,11 @@ class TarFile(object): self.offset += self._block(tarinfo.size) tarinfo.size = origsize - self.members.append(tarinfo) - return tarinfo + # Clear the prefix field so that it is not used + # as a pathname in next(). + tarinfo.prefix = "" - # The type mapping for the next() method. The keys are single character - # strings, the typeflag. The values are methods which are called when - # next() encounters such a typeflag. - TYPE_METH = { - GNUTYPE_LONGNAME: proc_gnulong, - GNUTYPE_LONGLINK: proc_gnulong, - GNUTYPE_SPARSE: proc_sparse - } + return tarinfo #-------------------------------------------------------------------------- # Little helper methods: @@ -1873,9 +1962,9 @@ class TarFile(object): corresponds to TarFile's mode. """ if self.closed: - raise IOError, "%s is closed" % self.__class__.__name__ + raise IOError("%s is closed" % self.__class__.__name__) if mode is not None and self._mode not in mode: - raise IOError, "bad operation for mode %r" % self._mode + raise IOError("bad operation for mode %r" % self._mode) def __iter__(self): """Provide an iterator object. @@ -2011,7 +2100,7 @@ class TarFileCompat: elif compression == TAR_GZIPPED: self.tarfile = TarFile.gzopen(file, mode) else: - raise ValueError, "unknown compression constant" + raise ValueError("unknown compression constant") if mode[0:1] == "r": members = self.tarfile.getmembers() for m in members: diff --git a/Lib/test/exception_hierarchy.txt b/Lib/test/exception_hierarchy.txt index 9ed92d0..58131d7 100644 --- a/Lib/test/exception_hierarchy.txt +++ b/Lib/test/exception_hierarchy.txt @@ -15,6 +15,7 @@ BaseException | | +-- IOError | | +-- OSError | | +-- WindowsError (Windows) + | | +-- VMSError (VMS) | +-- EOFError | +-- ImportError | +-- LookupError @@ -43,4 +44,4 @@ BaseException +-- SyntaxWarning +-- UserWarning +-- FutureWarning - +-- OverflowWarning [not generated by the interpreter] + +-- ImportWarning diff --git a/Lib/test/output/test_logging b/Lib/test/output/test_logging index 7be3a3e..c0d6e06 100644 --- a/Lib/test/output/test_logging +++ b/Lib/test/output/test_logging @@ -488,12 +488,12 @@ INFO:a.b.c.d:Info 5 -- log_test4 begin --------------------------------------------------- config0: ok. config1: ok. -config2: -config3: +config2: +config3: -- log_test4 end --------------------------------------------------- -- log_test5 begin --------------------------------------------------- ERROR:root:just testing -... Don't panic! +... Don't panic! -- log_test5 end --------------------------------------------------- -- logrecv output begin --------------------------------------------------- ERR -> CRITICAL: Message 0 (via logrecv.tcp.ERR) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 85e1dea..5b9da56 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -4,7 +4,8 @@ import cPickle import pickletools import copy_reg -from test.test_support import TestFailed, have_unicode, TESTFN +from test.test_support import TestFailed, have_unicode, TESTFN, \ + run_with_locale # Tests that try a number of pickle protocols should have a # for proto in protocols: @@ -527,6 +528,11 @@ class AbstractPickleTests(unittest.TestCase): got = self.loads(p) self.assertEqual(n, got) + @run_with_locale('LC_ALL', 'de_DE', 'fr_FR') + def test_float_format(self): + # make sure that floats are formatted locale independent + self.assertEqual(self.dumps(1.2)[0:3], 'F1.') + def test_reduce(self): pass diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py index 566e54b..86961b0 100755 --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -25,6 +25,7 @@ Command line options: -N: nocoverdir -- Put coverage files alongside modules -L: runleaks -- run the leaks(1) command just before exit -R: huntrleaks -- search for reference leaks (needs debug build, v. slow) +-M: memlimit -- run very large memory-consuming tests If non-option arguments are present, they are names for tests to run, unless -x is given, in which case they are names for tests not to run. @@ -63,6 +64,19 @@ of times further it is run and 'fname' is the name of the file the reports are written to. These parameters all have defaults (5, 4 and "reflog.txt" respectively), so the minimal invocation is '-R ::'. +-M runs tests that require an exorbitant amount of memory. These tests +typically try to ascertain containers keep working when containing more than +2 bilion objects, and only work on 64-bit systems. The passed-in memlimit, +which is a string in the form of '2.5Gb', determines howmuch memory the +tests will limit themselves to (but they may go slightly over.) The number +shouldn't be more memory than the machine has (including swap memory). You +should also keep in mind that swap memory is generally much, much slower +than RAM, and setting memlimit to all available RAM or higher will heavily +tax the machine. On the other hand, it is no use running these tests with a +limit of less than 2.5Gb, and many require more than 20Gb. Tests that expect +to use more than memlimit memory will be skipped. The big-memory tests +generally run very, very long. + -u is used to specify which special resource intensive tests to run, such as those requiring large file support or network connectivity. The argument is a comma-separated list of words indicating the @@ -124,6 +138,14 @@ if sys.maxint > 0x7fffffff: warnings.filterwarnings("ignore", "hex/oct constants", FutureWarning, "") +# Ignore ImportWarnings that only occur in the source tree, +# (because of modules with the same name as source-directories in Modules/) +for mod in ("ctypes", "gzip", "zipfile", "tarfile", "encodings.zlib_codec", + "test.test_zipimport", "test.test_zlib", "test.test_zipfile", + "test.test_codecs", "test.string_tests"): + warnings.filterwarnings(module=".*%s$" % (mod,), + action="ignore", category=ImportWarning) + # MacOSX (a.k.a. Darwin) has a default stack size that is too small # for deeply recursive regular expressions. We see this as crashes in # the Python test suite when running test_re.py and test_sre.py. The @@ -180,12 +202,12 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, generate=False, test_support.record_original_stdout(sys.stdout) try: - opts, args = getopt.getopt(sys.argv[1:], 'hvgqxsrf:lu:t:TD:NLR:w', + opts, args = getopt.getopt(sys.argv[1:], 'hvgqxsrf:lu:t:TD:NLR:wM:', ['help', 'verbose', 'quiet', 'generate', 'exclude', 'single', 'random', 'fromfile', 'findleaks', 'use=', 'threshold=', 'trace', 'coverdir=', 'nocoverdir', 'runleaks', - 'huntrleaks=', 'verbose2', + 'huntrleaks=', 'verbose2', 'memlimit=', ]) except getopt.error, msg: usage(2, msg) @@ -241,6 +263,8 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, generate=False, huntrleaks[1] = int(huntrleaks[1]) if len(huntrleaks[2]) == 0: huntrleaks[2] = "reflog.txt" + elif o in ('-M', '--memlimit'): + test_support.set_memlimit(a) elif o in ('-u', '--use'): u = [x.lower() for x in a.split(',')] for r in u: @@ -521,6 +545,7 @@ def runtest(test, generate, verbose, quiet, testdir=None, huntrleaks=False): def cleanup(): import _strptime, linecache, warnings, dircache import urlparse, urllib, urllib2, mimetypes, doctest + import struct from distutils.dir_util import _path_created _path_created.clear() warnings.filters[:] = fs @@ -537,6 +562,7 @@ def runtest(test, generate, verbose, quiet, testdir=None, huntrleaks=False): dircache.reset() linecache.clearcache() mimetypes._default_mime_types() + struct._cache.clear() doctest.master = None if indirect_test: def run_the_test(): diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py index aab98c2..489af20 100644 --- a/Lib/test/string_tests.py +++ b/Lib/test/string_tests.py @@ -243,29 +243,72 @@ class CommonTest(unittest.TestCase): self.checkequal(['a', 'b', 'c d'], 'a b c d', 'split', None, 2) self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, 3) self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, + sys.maxint-1) self.checkequal(['a b c d'], 'a b c d', 'split', None, 0) + self.checkequal(['a b c d'], ' a b c d', 'split', None, 0) self.checkequal(['a', 'b', 'c d'], 'a b c d', 'split', None, 2) + self.checkequal([], ' ', 'split') + self.checkequal(['a'], ' a ', 'split') + self.checkequal(['a', 'b'], ' a b ', 'split') + self.checkequal(['a', 'b '], ' a b ', 'split', None, 1) + self.checkequal(['a', 'b c '], ' a b c ', 'split', None, 1) + self.checkequal(['a', 'b', 'c '], ' a b c ', 'split', None, 2) + self.checkequal(['a', 'b'], '\n\ta \t\r b \v ', 'split') + aaa = ' a '*20 + self.checkequal(['a']*20, aaa, 'split') + self.checkequal(['a'] + [aaa[4:]], aaa, 'split', None, 1) + self.checkequal(['a']*19 + ['a '], aaa, 'split', None, 19) + # by a char self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|') + self.checkequal(['a|b|c|d'], 'a|b|c|d', 'split', '|', 0) self.checkequal(['a', 'b|c|d'], 'a|b|c|d', 'split', '|', 1) self.checkequal(['a', 'b', 'c|d'], 'a|b|c|d', 'split', '|', 2) self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', 3) self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', + sys.maxint-2) self.checkequal(['a|b|c|d'], 'a|b|c|d', 'split', '|', 0) self.checkequal(['a', '', 'b||c||d'], 'a||b||c||d', 'split', '|', 2) self.checkequal(['endcase ', ''], 'endcase |', 'split', '|') + self.checkequal(['', ' startcase'], '| startcase', 'split', '|') + self.checkequal(['', 'bothcase', ''], '|bothcase|', 'split', '|') self.checkequal(['a', '', 'b\x00c\x00d'], 'a\x00\x00b\x00c\x00d', 'split', '\x00', 2) + self.checkequal(['a']*20, ('a|'*20)[:-1], 'split', '|') + self.checkequal(['a']*15 +['a|a|a|a|a'], + ('a|'*20)[:-1], 'split', '|', 15) + # by string self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//') self.checkequal(['a', 'b//c//d'], 'a//b//c//d', 'split', '//', 1) self.checkequal(['a', 'b', 'c//d'], 'a//b//c//d', 'split', '//', 2) self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', 3) self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', + sys.maxint-10) self.checkequal(['a//b//c//d'], 'a//b//c//d', 'split', '//', 0) self.checkequal(['a', '', 'b////c////d'], 'a////b////c////d', 'split', '//', 2) self.checkequal(['endcase ', ''], 'endcase test', 'split', 'test') + self.checkequal(['', ' begincase'], 'test begincase', 'split', 'test') + self.checkequal(['', ' bothcase ', ''], 'test bothcase test', + 'split', 'test') + self.checkequal(['a', 'bc'], 'abbbc', 'split', 'bb') + self.checkequal(['', ''], 'aaa', 'split', 'aaa') + self.checkequal(['aaa'], 'aaa', 'split', 'aaa', 0) + self.checkequal(['ab', 'ab'], 'abbaab', 'split', 'ba') + self.checkequal(['aaaa'], 'aaaa', 'split', 'aab') + self.checkequal([''], '', 'split', 'aaa') + self.checkequal(['aa'], 'aa', 'split', 'aaa') + self.checkequal(['A', 'bobb'], 'Abbobbbobb', 'split', 'bbobb') + self.checkequal(['A', 'B', ''], 'AbbobbBbbobb', 'split', 'bbobb') + + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'split', 'BLAH') + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'split', 'BLAH', 19) + self.checkequal(['a']*18 + ['aBLAHa'], ('aBLAH'*20)[:-4], + 'split', 'BLAH', 18) # mixed use of str and unicode self.checkequal([u'a', u'b', u'c d'], 'a b c d', 'split', u' ', 2) @@ -273,6 +316,10 @@ class CommonTest(unittest.TestCase): # argument type self.checkraises(TypeError, 'hello', 'split', 42, 42, 42) + # null case + self.checkraises(ValueError, 'hello', 'split', '') + self.checkraises(ValueError, 'hello', 'split', '', 0) + def test_rsplit(self): self.checkequal(['this', 'is', 'the', 'rsplit', 'function'], 'this is the rsplit function', 'rsplit') @@ -283,29 +330,75 @@ class CommonTest(unittest.TestCase): self.checkequal(['a b', 'c', 'd'], 'a b c d', 'rsplit', None, 2) self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, 3) self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, + sys.maxint-20) self.checkequal(['a b c d'], 'a b c d', 'rsplit', None, 0) + self.checkequal(['a b c d'], 'a b c d ', 'rsplit', None, 0) self.checkequal(['a b', 'c', 'd'], 'a b c d', 'rsplit', None, 2) + self.checkequal([], ' ', 'rsplit') + self.checkequal(['a'], ' a ', 'rsplit') + self.checkequal(['a', 'b'], ' a b ', 'rsplit') + self.checkequal([' a', 'b'], ' a b ', 'rsplit', None, 1) + self.checkequal([' a b','c'], ' a b c ', 'rsplit', + None, 1) + self.checkequal([' a', 'b', 'c'], ' a b c ', 'rsplit', + None, 2) + self.checkequal(['a', 'b'], '\n\ta \t\r b \v ', 'rsplit', None, 88) + aaa = ' a '*20 + self.checkequal(['a']*20, aaa, 'rsplit') + self.checkequal([aaa[:-4]] + ['a'], aaa, 'rsplit', None, 1) + self.checkequal([' a a'] + ['a']*18, aaa, 'rsplit', None, 18) + + # by a char self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|') self.checkequal(['a|b|c', 'd'], 'a|b|c|d', 'rsplit', '|', 1) self.checkequal(['a|b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 2) self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 3) self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', + sys.maxint-100) self.checkequal(['a|b|c|d'], 'a|b|c|d', 'rsplit', '|', 0) self.checkequal(['a||b||c', '', 'd'], 'a||b||c||d', 'rsplit', '|', 2) self.checkequal(['', ' begincase'], '| begincase', 'rsplit', '|') + self.checkequal(['endcase ', ''], 'endcase |', 'rsplit', '|') + self.checkequal(['', 'bothcase', ''], '|bothcase|', 'rsplit', '|') + self.checkequal(['a\x00\x00b', 'c', 'd'], 'a\x00\x00b\x00c\x00d', 'rsplit', '\x00', 2) + self.checkequal(['a']*20, ('a|'*20)[:-1], 'rsplit', '|') + self.checkequal(['a|a|a|a|a']+['a']*15, + ('a|'*20)[:-1], 'rsplit', '|', 15) + # by string self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//') self.checkequal(['a//b//c', 'd'], 'a//b//c//d', 'rsplit', '//', 1) self.checkequal(['a//b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 2) self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 3) self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 4) + self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', + sys.maxint-5) self.checkequal(['a//b//c//d'], 'a//b//c//d', 'rsplit', '//', 0) self.checkequal(['a////b////c', '', 'd'], 'a////b////c////d', 'rsplit', '//', 2) self.checkequal(['', ' begincase'], 'test begincase', 'rsplit', 'test') + self.checkequal(['endcase ', ''], 'endcase test', 'rsplit', 'test') + self.checkequal(['', ' bothcase ', ''], 'test bothcase test', + 'rsplit', 'test') + self.checkequal(['ab', 'c'], 'abbbc', 'rsplit', 'bb') + self.checkequal(['', ''], 'aaa', 'rsplit', 'aaa') + self.checkequal(['aaa'], 'aaa', 'rsplit', 'aaa', 0) + self.checkequal(['ab', 'ab'], 'abbaab', 'rsplit', 'ba') + self.checkequal(['aaaa'], 'aaaa', 'rsplit', 'aab') + self.checkequal([''], '', 'rsplit', 'aaa') + self.checkequal(['aa'], 'aa', 'rsplit', 'aaa') + self.checkequal(['bbob', 'A'], 'bbobbbobbA', 'rsplit', 'bbobb') + self.checkequal(['', 'B', 'A'], 'bbobbBbbobbA', 'rsplit', 'bbobb') + + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'rsplit', 'BLAH') + self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'rsplit', 'BLAH', 19) + self.checkequal(['aBLAHa'] + ['a']*18, ('aBLAH'*20)[:-4], + 'rsplit', 'BLAH', 18) # mixed use of str and unicode self.checkequal([u'a b', u'c', u'd'], 'a b c d', 'rsplit', u' ', 2) @@ -313,6 +406,10 @@ class CommonTest(unittest.TestCase): # argument type self.checkraises(TypeError, 'hello', 'rsplit', 42, 42, 42) + # null case + self.checkraises(ValueError, 'hello', 'rsplit', '') + self.checkraises(ValueError, 'hello', 'rsplit', '', 0) + def test_strip(self): self.checkequal('hello', ' hello ', 'strip') self.checkequal('hello ', ' hello ', 'lstrip') @@ -376,6 +473,158 @@ class CommonTest(unittest.TestCase): self.checkraises(TypeError, 'hello', 'swapcase', 42) def test_replace(self): + EQ = self.checkequal + + # Operations on the empty string + EQ("", "", "replace", "", "") + + #EQ("A", "", "replace", "", "A") + # That was the correct result; this is the result we actually get + # now (for str, but not for unicode): + #EQ("", "", "replace", "", "A") + + EQ("", "", "replace", "A", "") + EQ("", "", "replace", "A", "A") + EQ("", "", "replace", "", "", 100) + EQ("", "", "replace", "", "", sys.maxint) + + # interleave (from=="", 'to' gets inserted everywhere) + EQ("A", "A", "replace", "", "") + EQ("*A*", "A", "replace", "", "*") + EQ("*1A*1", "A", "replace", "", "*1") + EQ("*-#A*-#", "A", "replace", "", "*-#") + EQ("*-A*-A*-", "AA", "replace", "", "*-") + EQ("*-A*-A*-", "AA", "replace", "", "*-", -1) + EQ("*-A*-A*-", "AA", "replace", "", "*-", sys.maxint) + EQ("*-A*-A*-", "AA", "replace", "", "*-", 4) + EQ("*-A*-A*-", "AA", "replace", "", "*-", 3) + EQ("*-A*-A", "AA", "replace", "", "*-", 2) + EQ("*-AA", "AA", "replace", "", "*-", 1) + EQ("AA", "AA", "replace", "", "*-", 0) + + # single character deletion (from=="A", to=="") + EQ("", "A", "replace", "A", "") + EQ("", "AAA", "replace", "A", "") + EQ("", "AAA", "replace", "A", "", -1) + EQ("", "AAA", "replace", "A", "", sys.maxint) + EQ("", "AAA", "replace", "A", "", 4) + EQ("", "AAA", "replace", "A", "", 3) + EQ("A", "AAA", "replace", "A", "", 2) + EQ("AA", "AAA", "replace", "A", "", 1) + EQ("AAA", "AAA", "replace", "A", "", 0) + EQ("", "AAAAAAAAAA", "replace", "A", "") + EQ("BCD", "ABACADA", "replace", "A", "") + EQ("BCD", "ABACADA", "replace", "A", "", -1) + EQ("BCD", "ABACADA", "replace", "A", "", sys.maxint) + EQ("BCD", "ABACADA", "replace", "A", "", 5) + EQ("BCD", "ABACADA", "replace", "A", "", 4) + EQ("BCDA", "ABACADA", "replace", "A", "", 3) + EQ("BCADA", "ABACADA", "replace", "A", "", 2) + EQ("BACADA", "ABACADA", "replace", "A", "", 1) + EQ("ABACADA", "ABACADA", "replace", "A", "", 0) + EQ("BCD", "ABCAD", "replace", "A", "") + EQ("BCD", "ABCADAA", "replace", "A", "") + EQ("BCD", "BCD", "replace", "A", "") + EQ("*************", "*************", "replace", "A", "") + EQ("^A^", "^"+"A"*1000+"^", "replace", "A", "", 999) + + # substring deletion (from=="the", to=="") + EQ("", "the", "replace", "the", "") + EQ("ater", "theater", "replace", "the", "") + EQ("", "thethe", "replace", "the", "") + EQ("", "thethethethe", "replace", "the", "") + EQ("aaaa", "theatheatheathea", "replace", "the", "") + EQ("that", "that", "replace", "the", "") + EQ("thaet", "thaet", "replace", "the", "") + EQ("here and re", "here and there", "replace", "the", "") + EQ("here and re and re", "here and there and there", + "replace", "the", "", sys.maxint) + EQ("here and re and re", "here and there and there", + "replace", "the", "", -1) + EQ("here and re and re", "here and there and there", + "replace", "the", "", 3) + EQ("here and re and re", "here and there and there", + "replace", "the", "", 2) + EQ("here and re and there", "here and there and there", + "replace", "the", "", 1) + EQ("here and there and there", "here and there and there", + "replace", "the", "", 0) + EQ("here and re and re", "here and there and there", "replace", "the", "") + + EQ("abc", "abc", "replace", "the", "") + EQ("abcdefg", "abcdefg", "replace", "the", "") + + # substring deletion (from=="bob", to=="") + EQ("bob", "bbobob", "replace", "bob", "") + EQ("bobXbob", "bbobobXbbobob", "replace", "bob", "") + EQ("aaaaaaa", "aaaaaaabob", "replace", "bob", "") + EQ("aaaaaaa", "aaaaaaa", "replace", "bob", "") + + # single character replace in place (len(from)==len(to)==1) + EQ("Who goes there?", "Who goes there?", "replace", "o", "o") + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O") + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", sys.maxint) + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", -1) + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", 3) + EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", 2) + EQ("WhO goes there?", "Who goes there?", "replace", "o", "O", 1) + EQ("Who goes there?", "Who goes there?", "replace", "o", "O", 0) + + EQ("Who goes there?", "Who goes there?", "replace", "a", "q") + EQ("who goes there?", "Who goes there?", "replace", "W", "w") + EQ("wwho goes there?ww", "WWho goes there?WW", "replace", "W", "w") + EQ("Who goes there!", "Who goes there?", "replace", "?", "!") + EQ("Who goes there!!", "Who goes there??", "replace", "?", "!") + + EQ("Who goes there?", "Who goes there?", "replace", ".", "!") + + # substring replace in place (len(from)==len(to) > 1) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**") + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", sys.maxint) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", -1) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", 4) + EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", 3) + EQ("Th** ** a tissue", "This is a tissue", "replace", "is", "**", 2) + EQ("Th** is a tissue", "This is a tissue", "replace", "is", "**", 1) + EQ("This is a tissue", "This is a tissue", "replace", "is", "**", 0) + EQ("cobob", "bobob", "replace", "bob", "cob") + EQ("cobobXcobocob", "bobobXbobobob", "replace", "bob", "cob") + EQ("bobob", "bobob", "replace", "bot", "bot") + + # replace single character (len(from)==1, len(to)>1) + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK") + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", -1) + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", sys.maxint) + EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", 2) + EQ("ReyKKjavik", "Reykjavik", "replace", "k", "KK", 1) + EQ("Reykjavik", "Reykjavik", "replace", "k", "KK", 0) + EQ("A----B----C----", "A.B.C.", "replace", ".", "----") + + EQ("Reykjavik", "Reykjavik", "replace", "q", "KK") + + # replace substring (len(from)>1, len(to)!=len(from)) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham") + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", sys.maxint) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", -1) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", 4) + EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam", + "replace", "spam", "ham", 3) + EQ("ham, ham, eggs and spam", "spam, spam, eggs and spam", + "replace", "spam", "ham", 2) + EQ("ham, spam, eggs and spam", "spam, spam, eggs and spam", + "replace", "spam", "ham", 1) + EQ("spam, spam, eggs and spam", "spam, spam, eggs and spam", + "replace", "spam", "ham", 0) + + EQ("bobob", "bobobob", "replace", "bobob", "bob") + EQ("bobobXbobob", "bobobobXbobobob", "replace", "bobob", "bob") + EQ("BOBOBOB", "BOBOBOB", "replace", "bob", "bobby") + + # self.checkequal('one@two!three!', 'one!two!three!', 'replace', '!', '@', 1) self.checkequal('onetwothree', 'one!two!three!', 'replace', '!', '') self.checkequal('one@two@three!', 'one!two!three!', 'replace', '!', '@', 2) @@ -403,6 +652,15 @@ class CommonTest(unittest.TestCase): self.checkraises(TypeError, 'hello', 'replace', 42, 'h') self.checkraises(TypeError, 'hello', 'replace', 'h', 42) + def test_replace_overflow(self): + # Check for overflow checking on 32 bit machines + if sys.maxint != 2147483647: + return + A2_16 = "A" * (2**16) + self.checkraises(OverflowError, A2_16, "replace", "", A2_16) + self.checkraises(OverflowError, A2_16, "replace", "A", A2_16) + self.checkraises(OverflowError, A2_16, "replace", "AA", A2_16+A2_16) + def test_zfill(self): self.checkequal('123', '123', 'zfill', 2) self.checkequal('123', '123', 'zfill', 3) @@ -720,6 +978,55 @@ class MixinStrUnicodeUserStringTest: else: self.checkcall(format, "__mod__", value) + def test_inplace_rewrites(self): + # Check that strings don't copy and modify cached single-character strings + self.checkequal('a', 'A', 'lower') + self.checkequal(True, 'A', 'isupper') + self.checkequal('A', 'a', 'upper') + self.checkequal(True, 'a', 'islower') + + self.checkequal('a', 'A', 'replace', 'A', 'a') + self.checkequal(True, 'A', 'isupper') + + self.checkequal('A', 'a', 'capitalize') + self.checkequal(True, 'a', 'islower') + + self.checkequal('A', 'a', 'swapcase') + self.checkequal(True, 'a', 'islower') + + self.checkequal('A', 'a', 'title') + self.checkequal(True, 'a', 'islower') + + def test_partition(self): + + self.checkequal(('this is the par', 'ti', 'tion method'), + 'this is the partition method', 'partition', 'ti') + + # from raymond's original specification + S = 'http://www.python.org' + self.checkequal(('http', '://', 'www.python.org'), S, 'partition', '://') + self.checkequal(('http://www.python.org', '', ''), S, 'partition', '?') + self.checkequal(('', 'http://', 'www.python.org'), S, 'partition', 'http://') + self.checkequal(('http://www.python.', 'org', ''), S, 'partition', 'org') + + self.checkraises(ValueError, S, 'partition', '') + self.checkraises(TypeError, S, 'partition', None) + + def test_rpartition(self): + + self.checkequal(('this is the rparti', 'ti', 'on method'), + 'this is the rpartition method', 'rpartition', 'ti') + + # from raymond's original specification + S = 'http://www.python.org' + self.checkequal(('http', '://', 'www.python.org'), S, 'rpartition', '://') + self.checkequal(('http://www.python.org', '', ''), S, 'rpartition', '?') + self.checkequal(('', 'http://', 'www.python.org'), S, 'rpartition', 'http://') + self.checkequal(('http://www.python.', 'org', ''), S, 'rpartition', 'org') + + self.checkraises(ValueError, S, 'rpartition', '') + self.checkraises(TypeError, S, 'rpartition', None) + class MixinStrStringUserStringTest: # Additional tests for 8bit strings, i.e. str, UserString and diff --git a/Lib/test/test_bigmem.py b/Lib/test/test_bigmem.py new file mode 100644 index 0000000..255428f --- /dev/null +++ b/Lib/test/test_bigmem.py @@ -0,0 +1,964 @@ +from test import test_support +from test.test_support import bigmemtest, _1G, _2G + +import unittest +import operator +import string +import sys + +# Bigmem testing houserules: +# +# - Try not to allocate too many large objects. It's okay to rely on +# refcounting semantics, but don't forget that 's = create_largestring()' +# doesn't release the old 's' (if it exists) until well after its new +# value has been created. Use 'del s' before the create_largestring call. +# +# - Do *not* compare large objects using assertEquals or similar. It's a +# lengty operation and the errormessage will be utterly useless due to +# its size. To make sure whether a result has the right contents, better +# to use the strip or count methods, or compare meaningful slices. +# +# - Don't forget to test for large indices, offsets and results and such, +# in addition to large sizes. +# +# - When repeating an object (say, a substring, or a small list) to create +# a large object, make the subobject of a length that is not a power of +# 2. That way, int-wrapping problems are more easily detected. +# +# - While the bigmemtest decorator speaks of 'minsize', all tests will +# actually be called with a much smaller number too, in the normal +# test run (5Kb currently.) This is so the tests themselves get frequent +# testing Consequently, always make all large allocations based on the +# passed-in 'size', and don't rely on the size being very large. Also, +# memuse-per-size should remain sane (less than a few thousand); if your +# test uses more, adjust 'size' upward, instead. + +class StrTest(unittest.TestCase): + @bigmemtest(minsize=_2G, memuse=2) + def test_capitalize(self, size): + SUBSTR = ' abc def ghi' + s = '-' * size + SUBSTR + caps = s.capitalize() + self.assertEquals(caps[-len(SUBSTR):], + SUBSTR.capitalize()) + self.assertEquals(caps.lstrip('-'), SUBSTR) + + @bigmemtest(minsize=_2G + 10, memuse=1) + def test_center(self, size): + SUBSTR = ' abc def ghi' + s = SUBSTR.center(size) + self.assertEquals(len(s), size) + lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2 + if len(s) % 2: + lpadsize += 1 + self.assertEquals(s[lpadsize:-rpadsize], SUBSTR) + self.assertEquals(s.strip(), SUBSTR.strip()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_count(self, size): + SUBSTR = ' abc def ghi' + s = '.' * size + SUBSTR + self.assertEquals(s.count('.'), size) + s += '.' + self.assertEquals(s.count('.'), size + 1) + self.assertEquals(s.count(' '), 3) + self.assertEquals(s.count('i'), 1) + self.assertEquals(s.count('j'), 0) + + @bigmemtest(minsize=0, memuse=1) + def test_decode(self, size): + pass + + @bigmemtest(minsize=0, memuse=1) + def test_encode(self, size): + pass + + @bigmemtest(minsize=_2G, memuse=2) + def test_endswith(self, size): + SUBSTR = ' abc def ghi' + s = '-' * size + SUBSTR + self.failUnless(s.endswith(SUBSTR)) + self.failUnless(s.endswith(s)) + s2 = '...' + s + self.failUnless(s2.endswith(s)) + self.failIf(s.endswith('a' + SUBSTR)) + self.failIf(SUBSTR.endswith(s)) + + @bigmemtest(minsize=_2G + 10, memuse=2) + def test_expandtabs(self, size): + s = '-' * size + tabsize = 8 + self.assertEquals(s.expandtabs(), s) + del s + slen, remainder = divmod(size, tabsize) + s = ' \t' * slen + s = s.expandtabs(tabsize) + self.assertEquals(len(s), size - remainder) + self.assertEquals(len(s.strip(' ')), 0) + + @bigmemtest(minsize=_2G, memuse=2) + def test_find(self, size): + SUBSTR = ' abc def ghi' + sublen = len(SUBSTR) + s = ''.join([SUBSTR, '-' * size, SUBSTR]) + self.assertEquals(s.find(' '), 0) + self.assertEquals(s.find(SUBSTR), 0) + self.assertEquals(s.find(' ', sublen), sublen + size) + self.assertEquals(s.find(SUBSTR, len(SUBSTR)), sublen + size) + self.assertEquals(s.find('i'), SUBSTR.find('i')) + self.assertEquals(s.find('i', sublen), + sublen + size + SUBSTR.find('i')) + self.assertEquals(s.find('i', size), + sublen + size + SUBSTR.find('i')) + self.assertEquals(s.find('j'), -1) + + @bigmemtest(minsize=_2G, memuse=2) + def test_index(self, size): + SUBSTR = ' abc def ghi' + sublen = len(SUBSTR) + s = ''.join([SUBSTR, '-' * size, SUBSTR]) + self.assertEquals(s.index(' '), 0) + self.assertEquals(s.index(SUBSTR), 0) + self.assertEquals(s.index(' ', sublen), sublen + size) + self.assertEquals(s.index(SUBSTR, sublen), sublen + size) + self.assertEquals(s.index('i'), SUBSTR.index('i')) + self.assertEquals(s.index('i', sublen), + sublen + size + SUBSTR.index('i')) + self.assertEquals(s.index('i', size), + sublen + size + SUBSTR.index('i')) + self.assertRaises(ValueError, s.index, 'j') + + @bigmemtest(minsize=_2G, memuse=2) + def test_isalnum(self, size): + SUBSTR = '123456' + s = 'a' * size + SUBSTR + self.failUnless(s.isalnum()) + s += '.' + self.failIf(s.isalnum()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_isalpha(self, size): + SUBSTR = 'zzzzzzz' + s = 'a' * size + SUBSTR + self.failUnless(s.isalpha()) + s += '.' + self.failIf(s.isalpha()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_isdigit(self, size): + SUBSTR = '123456' + s = '9' * size + SUBSTR + self.failUnless(s.isdigit()) + s += 'z' + self.failIf(s.isdigit()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_islower(self, size): + chars = ''.join([ chr(c) for c in range(255) if not chr(c).isupper() ]) + repeats = size // len(chars) + 2 + s = chars * repeats + self.failUnless(s.islower()) + s += 'A' + self.failIf(s.islower()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_isspace(self, size): + whitespace = ' \f\n\r\t\v' + repeats = size // len(whitespace) + 2 + s = whitespace * repeats + self.failUnless(s.isspace()) + s += 'j' + self.failIf(s.isspace()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_istitle(self, size): + SUBSTR = '123456' + s = ''.join(['A', 'a' * size, SUBSTR]) + self.failUnless(s.istitle()) + s += 'A' + self.failUnless(s.istitle()) + s += 'aA' + self.failIf(s.istitle()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_isupper(self, size): + chars = ''.join([ chr(c) for c in range(255) if not chr(c).islower() ]) + repeats = size // len(chars) + 2 + s = chars * repeats + self.failUnless(s.isupper()) + s += 'a' + self.failIf(s.isupper()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_join(self, size): + s = 'A' * size + x = s.join(['aaaaa', 'bbbbb']) + self.assertEquals(x.count('a'), 5) + self.assertEquals(x.count('b'), 5) + self.failUnless(x.startswith('aaaaaA')) + self.failUnless(x.endswith('Abbbbb')) + + @bigmemtest(minsize=_2G + 10, memuse=1) + def test_ljust(self, size): + SUBSTR = ' abc def ghi' + s = SUBSTR.ljust(size) + self.failUnless(s.startswith(SUBSTR + ' ')) + self.assertEquals(len(s), size) + self.assertEquals(s.strip(), SUBSTR.strip()) + + @bigmemtest(minsize=_2G + 10, memuse=2) + def test_lower(self, size): + s = 'A' * size + s = s.lower() + self.assertEquals(len(s), size) + self.assertEquals(s.count('a'), size) + + @bigmemtest(minsize=_2G + 10, memuse=1) + def test_lstrip(self, size): + SUBSTR = 'abc def ghi' + s = SUBSTR.rjust(size) + self.assertEquals(len(s), size) + self.assertEquals(s.lstrip(), SUBSTR.lstrip()) + del s + s = SUBSTR.ljust(size) + self.assertEquals(len(s), size) + stripped = s.lstrip() + self.failUnless(stripped is s) + + @bigmemtest(minsize=_2G + 10, memuse=2) + def test_replace(self, size): + replacement = 'a' + s = ' ' * size + s = s.replace(' ', replacement) + self.assertEquals(len(s), size) + self.assertEquals(s.count(replacement), size) + s = s.replace(replacement, ' ', size - 4) + self.assertEquals(len(s), size) + self.assertEquals(s.count(replacement), 4) + self.assertEquals(s[-10:], ' aaaa') + + @bigmemtest(minsize=_2G, memuse=2) + def test_rfind(self, size): + SUBSTR = ' abc def ghi' + sublen = len(SUBSTR) + s = ''.join([SUBSTR, '-' * size, SUBSTR]) + self.assertEquals(s.rfind(' '), sublen + size + SUBSTR.rfind(' ')) + self.assertEquals(s.rfind(SUBSTR), sublen + size) + self.assertEquals(s.rfind(' ', 0, size), SUBSTR.rfind(' ')) + self.assertEquals(s.rfind(SUBSTR, 0, sublen + size), 0) + self.assertEquals(s.rfind('i'), sublen + size + SUBSTR.rfind('i')) + self.assertEquals(s.rfind('i', 0, sublen), SUBSTR.rfind('i')) + self.assertEquals(s.rfind('i', 0, sublen + size), + SUBSTR.rfind('i')) + self.assertEquals(s.rfind('j'), -1) + + @bigmemtest(minsize=_2G, memuse=2) + def test_rindex(self, size): + SUBSTR = ' abc def ghi' + sublen = len(SUBSTR) + s = ''.join([SUBSTR, '-' * size, SUBSTR]) + self.assertEquals(s.rindex(' '), + sublen + size + SUBSTR.rindex(' ')) + self.assertEquals(s.rindex(SUBSTR), sublen + size) + self.assertEquals(s.rindex(' ', 0, sublen + size - 1), + SUBSTR.rindex(' ')) + self.assertEquals(s.rindex(SUBSTR, 0, sublen + size), 0) + self.assertEquals(s.rindex('i'), + sublen + size + SUBSTR.rindex('i')) + self.assertEquals(s.rindex('i', 0, sublen), SUBSTR.rindex('i')) + self.assertEquals(s.rindex('i', 0, sublen + size), + SUBSTR.rindex('i')) + self.assertRaises(ValueError, s.rindex, 'j') + + @bigmemtest(minsize=_2G + 10, memuse=1) + def test_rjust(self, size): + SUBSTR = ' abc def ghi' + s = SUBSTR.ljust(size) + self.failUnless(s.startswith(SUBSTR + ' ')) + self.assertEquals(len(s), size) + self.assertEquals(s.strip(), SUBSTR.strip()) + + @bigmemtest(minsize=_2G + 10, memuse=1) + def test_rstrip(self, size): + SUBSTR = ' abc def ghi' + s = SUBSTR.ljust(size) + self.assertEquals(len(s), size) + self.assertEquals(s.rstrip(), SUBSTR.rstrip()) + del s + s = SUBSTR.rjust(size) + self.assertEquals(len(s), size) + stripped = s.rstrip() + self.failUnless(stripped is s) + + # The test takes about size bytes to build a string, and then about + # sqrt(size) substrings of sqrt(size) in size and a list to + # hold sqrt(size) items. It's close but just over 2x size. + @bigmemtest(minsize=_2G, memuse=2.1) + def test_split_small(self, size): + # Crudely calculate an estimate so that the result of s.split won't + # take up an inordinate amount of memory + chunksize = int(size ** 0.5 + 2) + SUBSTR = 'a' + ' ' * chunksize + s = SUBSTR * chunksize + l = s.split() + self.assertEquals(len(l), chunksize) + self.assertEquals(set(l), set(['a'])) + del l + l = s.split('a') + self.assertEquals(len(l), chunksize + 1) + self.assertEquals(set(l), set(['', ' ' * chunksize])) + + # Allocates a string of twice size (and briefly two) and a list of + # size. Because of internal affairs, the s.split() call produces a + # list of size times the same one-character string, so we only + # suffer for the list size. (Otherwise, it'd cost another 48 times + # size in bytes!) Nevertheless, a list of size takes + # 8*size bytes. + @bigmemtest(minsize=_2G + 5, memuse=10) + def test_split_large(self, size): + s = ' a' * size + ' ' + l = s.split() + self.assertEquals(len(l), size) + self.assertEquals(set(l), set(['a'])) + del l + l = s.split('a') + self.assertEquals(len(l), size + 1) + self.assertEquals(set(l), set([' '])) + + @bigmemtest(minsize=_2G, memuse=2.1) + def test_splitlines(self, size): + # Crudely calculate an estimate so that the result of s.split won't + # take up an inordinate amount of memory + chunksize = int(size ** 0.5 + 2) // 2 + SUBSTR = ' ' * chunksize + '\n' + ' ' * chunksize + '\r\n' + s = SUBSTR * chunksize + l = s.splitlines() + self.assertEquals(len(l), chunksize * 2) + self.assertEquals(set(l), set([' ' * chunksize])) + + @bigmemtest(minsize=_2G, memuse=2) + def test_startswith(self, size): + SUBSTR = ' abc def ghi' + s = '-' * size + SUBSTR + self.failUnless(s.startswith(s)) + self.failUnless(s.startswith('-' * size)) + self.failIf(s.startswith(SUBSTR)) + + @bigmemtest(minsize=_2G, memuse=1) + def test_strip(self, size): + SUBSTR = ' abc def ghi ' + s = SUBSTR.rjust(size) + self.assertEquals(len(s), size) + self.assertEquals(s.strip(), SUBSTR.strip()) + del s + s = SUBSTR.ljust(size) + self.assertEquals(len(s), size) + self.assertEquals(s.strip(), SUBSTR.strip()) + + @bigmemtest(minsize=_2G, memuse=2) + def test_swapcase(self, size): + SUBSTR = "aBcDeFG12.'\xa9\x00" + sublen = len(SUBSTR) + repeats = size // sublen + 2 + s = SUBSTR * repeats + s = s.swapcase() + self.assertEquals(len(s), sublen * repeats) + self.assertEquals(s[:sublen * 3], SUBSTR.swapcase() * 3) + self.assertEquals(s[-sublen * 3:], SUBSTR.swapcase() * 3) + + @bigmemtest(minsize=_2G, memuse=2) + def test_title(self, size): + SUBSTR = 'SpaaHAaaAaham' + s = SUBSTR * (size // len(SUBSTR) + 2) + s = s.title() + self.failUnless(s.startswith((SUBSTR * 3).title())) + self.failUnless(s.endswith(SUBSTR.lower() * 3)) + + @bigmemtest(minsize=_2G, memuse=2) + def test_translate(self, size): + trans = string.maketrans('.aZ', '-!$') + SUBSTR = 'aZz.z.Aaz.' + sublen = len(SUBSTR) + repeats = size // sublen + 2 + s = SUBSTR * repeats + s = s.translate(trans) + self.assertEquals(len(s), repeats * sublen) + self.assertEquals(s[:sublen], SUBSTR.translate(trans)) + self.assertEquals(s[-sublen:], SUBSTR.translate(trans)) + self.assertEquals(s.count('.'), 0) + self.assertEquals(s.count('!'), repeats * 2) + self.assertEquals(s.count('z'), repeats * 3) + + @bigmemtest(minsize=_2G + 5, memuse=2) + def test_upper(self, size): + s = 'a' * size + s = s.upper() + self.assertEquals(len(s), size) + self.assertEquals(s.count('A'), size) + + @bigmemtest(minsize=_2G + 20, memuse=1) + def test_zfill(self, size): + SUBSTR = '-568324723598234' + s = SUBSTR.zfill(size) + self.failUnless(s.endswith('0' + SUBSTR[1:])) + self.failUnless(s.startswith('-0')) + self.assertEquals(len(s), size) + self.assertEquals(s.count('0'), size - len(SUBSTR)) + + @bigmemtest(minsize=_2G + 10, memuse=2) + def test_format(self, size): + s = '-' * size + sf = '%s' % (s,) + self.failUnless(s == sf) + del sf + sf = '..%s..' % (s,) + self.assertEquals(len(sf), len(s) + 4) + self.failUnless(sf.startswith('..-')) + self.failUnless(sf.endswith('-..')) + del s, sf + + size //= 2 + edge = '-' * size + s = ''.join([edge, '%s', edge]) + del edge + s = s % '...' + self.assertEquals(len(s), size * 2 + 3) + self.assertEquals(s.count('.'), 3) + self.assertEquals(s.count('-'), size * 2) + + @bigmemtest(minsize=_2G + 10, memuse=2) + def test_repr_small(self, size): + s = '-' * size + s = repr(s) + self.assertEquals(len(s), size + 2) + self.assertEquals(s[0], "'") + self.assertEquals(s[-1], "'") + self.assertEquals(s.count('-'), size) + del s + # repr() will create a string four times as large as this 'binary + # string', but we don't want to allocate much more than twice + # size in total. (We do extra testing in test_repr_large()) + size = size // 5 * 2 + s = '\x00' * size + s = repr(s) + self.assertEquals(len(s), size * 4 + 2) + self.assertEquals(s[0], "'") + self.assertEquals(s[-1], "'") + self.assertEquals(s.count('\\'), size) + self.assertEquals(s.count('0'), size * 2) + + @bigmemtest(minsize=_2G + 10, memuse=5) + def test_repr_large(self, size): + s = '\x00' * size + s = repr(s) + self.assertEquals(len(s), size * 4 + 2) + self.assertEquals(s[0], "'") + self.assertEquals(s[-1], "'") + self.assertEquals(s.count('\\'), size) + self.assertEquals(s.count('0'), size * 2) + + # This test is meaningful even with size < 2G, as long as the + # doubled string is > 2G (but it tests more if both are > 2G :) + @bigmemtest(minsize=_1G + 2, memuse=3) + def test_concat(self, size): + s = '.' * size + self.assertEquals(len(s), size) + s = s + s + self.assertEquals(len(s), size * 2) + self.assertEquals(s.count('.'), size * 2) + + # This test is meaningful even with size < 2G, as long as the + # repeated string is > 2G (but it tests more if both are > 2G :) + @bigmemtest(minsize=_1G + 2, memuse=3) + def test_repeat(self, size): + s = '.' * size + self.assertEquals(len(s), size) + s = s * 2 + self.assertEquals(len(s), size * 2) + self.assertEquals(s.count('.'), size * 2) + + @bigmemtest(minsize=_2G + 20, memuse=1) + def test_slice_and_getitem(self, size): + SUBSTR = '0123456789' + sublen = len(SUBSTR) + s = SUBSTR * (size // sublen) + stepsize = len(s) // 100 + stepsize = stepsize - (stepsize % sublen) + for i in range(0, len(s) - stepsize, stepsize): + self.assertEquals(s[i], SUBSTR[0]) + self.assertEquals(s[i:i + sublen], SUBSTR) + self.assertEquals(s[i:i + sublen:2], SUBSTR[::2]) + if i > 0: + self.assertEquals(s[i + sublen - 1:i - 1:-3], + SUBSTR[sublen::-3]) + # Make sure we do some slicing and indexing near the end of the + # string, too. + self.assertEquals(s[len(s) - 1], SUBSTR[-1]) + self.assertEquals(s[-1], SUBSTR[-1]) + self.assertEquals(s[len(s) - 10], SUBSTR[0]) + self.assertEquals(s[-sublen], SUBSTR[0]) + self.assertEquals(s[len(s):], '') + self.assertEquals(s[len(s) - 1:], SUBSTR[-1]) + self.assertEquals(s[-1:], SUBSTR[-1]) + self.assertEquals(s[len(s) - sublen:], SUBSTR) + self.assertEquals(s[-sublen:], SUBSTR) + self.assertEquals(len(s[:]), len(s)) + self.assertEquals(len(s[:len(s) - 5]), len(s) - 5) + self.assertEquals(len(s[5:-5]), len(s) - 10) + + self.assertRaises(IndexError, operator.getitem, s, len(s)) + self.assertRaises(IndexError, operator.getitem, s, len(s) + 1) + self.assertRaises(IndexError, operator.getitem, s, len(s) + 1<<31) + + @bigmemtest(minsize=_2G, memuse=2) + def test_contains(self, size): + SUBSTR = '0123456789' + edge = '-' * (size // 2) + s = ''.join([edge, SUBSTR, edge]) + del edge + self.failUnless(SUBSTR in s) + self.failIf(SUBSTR * 2 in s) + self.failUnless('-' in s) + self.failIf('a' in s) + s += 'a' + self.failUnless('a' in s) + + @bigmemtest(minsize=_2G + 10, memuse=2) + def test_compare(self, size): + s1 = '-' * size + s2 = '-' * size + self.failUnless(s1 == s2) + del s2 + s2 = s1 + 'a' + self.failIf(s1 == s2) + del s2 + s2 = '.' * size + self.failIf(s1 == s2) + + @bigmemtest(minsize=_2G + 10, memuse=1) + def test_hash(self, size): + # Not sure if we can do any meaningful tests here... Even if we + # start relying on the exact algorithm used, the result will be + # different depending on the size of the C 'long int'. Even this + # test is dodgy (there's no *guarantee* that the two things should + # have a different hash, even if they, in the current + # implementation, almost always do.) + s = '\x00' * size + h1 = hash(s) + del s + s = '\x00' * (size + 1) + self.failIf(h1 == hash(s)) + +class TupleTest(unittest.TestCase): + + # Tuples have a small, fixed-sized head and an array of pointers to + # data. Since we're testing 64-bit addressing, we can assume that the + # pointers are 8 bytes, and that thus that the tuples take up 8 bytes + # per size. + + # As a side-effect of testing long tuples, these tests happen to test + # having more than 2<<31 references to any given object. Hence the + # use of different types of objects as contents in different tests. + + @bigmemtest(minsize=_2G + 2, memuse=16) + def test_compare(self, size): + t1 = (u'',) * size + t2 = (u'',) * size + self.failUnless(t1 == t2) + del t2 + t2 = (u'',) * (size + 1) + self.failIf(t1 == t2) + del t2 + t2 = (1,) * size + self.failIf(t1 == t2) + + # Test concatenating into a single tuple of more than 2G in length, + # and concatenating a tuple of more than 2G in length separately, so + # the smaller test still gets run even if there isn't memory for the + # larger test (but we still let the tester know the larger test is + # skipped, in verbose mode.) + def basic_concat_test(self, size): + t = ((),) * size + self.assertEquals(len(t), size) + t = t + t + self.assertEquals(len(t), size * 2) + + @bigmemtest(minsize=_2G // 2 + 2, memuse=24) + def test_concat_small(self, size): + return self.basic_concat_test(size) + + @bigmemtest(minsize=_2G + 2, memuse=24) + def test_concat_large(self, size): + return self.basic_concat_test(size) + + @bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5) + def test_contains(self, size): + t = (1, 2, 3, 4, 5) * size + self.assertEquals(len(t), size * 5) + self.failUnless(5 in t) + self.failIf((1, 2, 3, 4, 5) in t) + self.failIf(0 in t) + + @bigmemtest(minsize=_2G + 10, memuse=8) + def test_hash(self, size): + t1 = (0,) * size + h1 = hash(t1) + del t1 + t2 = (0,) * (size + 1) + self.failIf(h1 == hash(t2)) + + @bigmemtest(minsize=_2G + 10, memuse=8) + def test_index_and_slice(self, size): + t = (None,) * size + self.assertEquals(len(t), size) + self.assertEquals(t[-1], None) + self.assertEquals(t[5], None) + self.assertEquals(t[size - 1], None) + self.assertRaises(IndexError, operator.getitem, t, size) + self.assertEquals(t[:5], (None,) * 5) + self.assertEquals(t[-5:], (None,) * 5) + self.assertEquals(t[20:25], (None,) * 5) + self.assertEquals(t[-25:-20], (None,) * 5) + self.assertEquals(t[size - 5:], (None,) * 5) + self.assertEquals(t[size - 5:size], (None,) * 5) + self.assertEquals(t[size - 6:size - 2], (None,) * 4) + self.assertEquals(t[size:size], ()) + self.assertEquals(t[size:size+5], ()) + + # Like test_concat, split in two. + def basic_test_repeat(self, size): + t = ('',) * size + self.assertEquals(len(t), size) + t = t * 2 + self.assertEquals(len(t), size * 2) + + @bigmemtest(minsize=_2G // 2 + 2, memuse=24) + def test_repeat_small(self, size): + return self.basic_test_repeat(size) + + @bigmemtest(minsize=_2G + 2, memuse=24) + def test_repeat_large(self, size): + return self.basic_test_repeat(size) + + # Like test_concat, split in two. + def basic_test_repr(self, size): + t = (0,) * size + s = repr(t) + # The repr of a tuple of 0's is exactly three times the tuple length. + self.assertEquals(len(s), size * 3) + self.assertEquals(s[:5], '(0, 0') + self.assertEquals(s[-5:], '0, 0)') + self.assertEquals(s.count('0'), size) + + @bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3) + def test_repr_small(self, size): + return self.basic_test_repr(size) + + @bigmemtest(minsize=_2G + 2, memuse=8 + 3) + def test_repr_large(self, size): + return self.basic_test_repr(size) + +class ListTest(unittest.TestCase): + + # Like tuples, lists have a small, fixed-sized head and an array of + # pointers to data, so 8 bytes per size. Also like tuples, we make the + # lists hold references to various objects to test their refcount + # limits. + + @bigmemtest(minsize=_2G + 2, memuse=16) + def test_compare(self, size): + l1 = [u''] * size + l2 = [u''] * size + self.failUnless(l1 == l2) + del l2 + l2 = [u''] * (size + 1) + self.failIf(l1 == l2) + del l2 + l2 = [2] * size + self.failIf(l1 == l2) + + # Test concatenating into a single list of more than 2G in length, + # and concatenating a list of more than 2G in length separately, so + # the smaller test still gets run even if there isn't memory for the + # larger test (but we still let the tester know the larger test is + # skipped, in verbose mode.) + def basic_test_concat(self, size): + l = [[]] * size + self.assertEquals(len(l), size) + l = l + l + self.assertEquals(len(l), size * 2) + + @bigmemtest(minsize=_2G // 2 + 2, memuse=24) + def test_concat_small(self, size): + return self.basic_test_concat(size) + + @bigmemtest(minsize=_2G + 2, memuse=24) + def test_concat_large(self, size): + return self.basic_test_concat(size) + + def basic_test_inplace_concat(self, size): + l = [sys.stdout] * size + l += l + self.assertEquals(len(l), size * 2) + self.failUnless(l[0] is l[-1]) + self.failUnless(l[size - 1] is l[size + 1]) + + @bigmemtest(minsize=_2G // 2 + 2, memuse=24) + def test_inplace_concat_small(self, size): + return self.basic_test_inplace_concat(size) + + @bigmemtest(minsize=_2G + 2, memuse=24) + def test_inplace_concat_large(self, size): + return self.basic_test_inplace_concat(size) + + @bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5) + def test_contains(self, size): + l = [1, 2, 3, 4, 5] * size + self.assertEquals(len(l), size * 5) + self.failUnless(5 in l) + self.failIf([1, 2, 3, 4, 5] in l) + self.failIf(0 in l) + + @bigmemtest(minsize=_2G + 10, memuse=8) + def test_hash(self, size): + l = [0] * size + self.failUnlessRaises(TypeError, hash, l) + + @bigmemtest(minsize=_2G + 10, memuse=8) + def test_index_and_slice(self, size): + l = [None] * size + self.assertEquals(len(l), size) + self.assertEquals(l[-1], None) + self.assertEquals(l[5], None) + self.assertEquals(l[size - 1], None) + self.assertRaises(IndexError, operator.getitem, l, size) + self.assertEquals(l[:5], [None] * 5) + self.assertEquals(l[-5:], [None] * 5) + self.assertEquals(l[20:25], [None] * 5) + self.assertEquals(l[-25:-20], [None] * 5) + self.assertEquals(l[size - 5:], [None] * 5) + self.assertEquals(l[size - 5:size], [None] * 5) + self.assertEquals(l[size - 6:size - 2], [None] * 4) + self.assertEquals(l[size:size], []) + self.assertEquals(l[size:size+5], []) + + l[size - 2] = 5 + self.assertEquals(len(l), size) + self.assertEquals(l[-3:], [None, 5, None]) + self.assertEquals(l.count(5), 1) + self.assertRaises(IndexError, operator.setitem, l, size, 6) + self.assertEquals(len(l), size) + + l[size - 7:] = [1, 2, 3, 4, 5] + size -= 2 + self.assertEquals(len(l), size) + self.assertEquals(l[-7:], [None, None, 1, 2, 3, 4, 5]) + + l[:7] = [1, 2, 3, 4, 5] + size -= 2 + self.assertEquals(len(l), size) + self.assertEquals(l[:7], [1, 2, 3, 4, 5, None, None]) + + del l[size - 1] + size -= 1 + self.assertEquals(len(l), size) + self.assertEquals(l[-1], 4) + + del l[-2:] + size -= 2 + self.assertEquals(len(l), size) + self.assertEquals(l[-1], 2) + + del l[0] + size -= 1 + self.assertEquals(len(l), size) + self.assertEquals(l[0], 2) + + del l[:2] + size -= 2 + self.assertEquals(len(l), size) + self.assertEquals(l[0], 4) + + # Like test_concat, split in two. + def basic_test_repeat(self, size): + l = [] * size + self.failIf(l) + l = [''] * size + self.assertEquals(len(l), size) + l = l * 2 + self.assertEquals(len(l), size * 2) + + @bigmemtest(minsize=_2G // 2 + 2, memuse=24) + def test_repeat_small(self, size): + return self.basic_test_repeat(size) + + @bigmemtest(minsize=_2G + 2, memuse=24) + def test_repeat_large(self, size): + return self.basic_test_repeat(size) + + def basic_test_inplace_repeat(self, size): + l = [''] + l *= size + self.assertEquals(len(l), size) + self.failUnless(l[0] is l[-1]) + del l + + l = [''] * size + l *= 2 + self.assertEquals(len(l), size * 2) + self.failUnless(l[size - 1] is l[-1]) + + @bigmemtest(minsize=_2G // 2 + 2, memuse=16) + def test_inplace_repeat_small(self, size): + return self.basic_test_inplace_repeat(size) + + @bigmemtest(minsize=_2G + 2, memuse=16) + def test_inplace_repeat_large(self, size): + return self.basic_test_inplace_repeat(size) + + def basic_test_repr(self, size): + l = [0] * size + s = repr(l) + # The repr of a list of 0's is exactly three times the list length. + self.assertEquals(len(s), size * 3) + self.assertEquals(s[:5], '[0, 0') + self.assertEquals(s[-5:], '0, 0]') + self.assertEquals(s.count('0'), size) + + @bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3) + def test_repr_small(self, size): + return self.basic_test_repr(size) + + @bigmemtest(minsize=_2G + 2, memuse=8 + 3) + def test_repr_large(self, size): + return self.basic_test_repr(size) + + # list overallocates ~1/8th of the total size (on first expansion) so + # the single list.append call puts memuse at 9 bytes per size. + @bigmemtest(minsize=_2G, memuse=9) + def test_append(self, size): + l = [object()] * size + l.append(object()) + self.assertEquals(len(l), size+1) + self.failUnless(l[-3] is l[-2]) + self.failIf(l[-2] is l[-1]) + + @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) + def test_count(self, size): + l = [1, 2, 3, 4, 5] * size + self.assertEquals(l.count(1), size) + self.assertEquals(l.count("1"), 0) + + def basic_test_extend(self, size): + l = [file] * size + l.extend(l) + self.assertEquals(len(l), size * 2) + self.failUnless(l[0] is l[-1]) + self.failUnless(l[size - 1] is l[size + 1]) + + @bigmemtest(minsize=_2G // 2 + 2, memuse=16) + def test_extend_small(self, size): + return self.basic_test_extend(size) + + @bigmemtest(minsize=_2G + 2, memuse=16) + def test_extend_large(self, size): + return self.basic_test_extend(size) + + @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) + def test_index(self, size): + l = [1L, 2L, 3L, 4L, 5L] * size + size *= 5 + self.assertEquals(l.index(1), 0) + self.assertEquals(l.index(5, size - 5), size - 1) + self.assertEquals(l.index(5, size - 5, size), size - 1) + self.assertRaises(ValueError, l.index, 1, size - 4, size) + self.assertRaises(ValueError, l.index, 6L) + + # This tests suffers from overallocation, just like test_append. + @bigmemtest(minsize=_2G + 10, memuse=9) + def test_insert(self, size): + l = [1.0] * size + l.insert(size - 1, "A") + size += 1 + self.assertEquals(len(l), size) + self.assertEquals(l[-3:], [1.0, "A", 1.0]) + + l.insert(size + 1, "B") + size += 1 + self.assertEquals(len(l), size) + self.assertEquals(l[-3:], ["A", 1.0, "B"]) + + l.insert(1, "C") + size += 1 + self.assertEquals(len(l), size) + self.assertEquals(l[:3], [1.0, "C", 1.0]) + self.assertEquals(l[size - 3:], ["A", 1.0, "B"]) + + @bigmemtest(minsize=_2G // 5 + 4, memuse=8 * 5) + def test_pop(self, size): + l = [u"a", u"b", u"c", u"d", u"e"] * size + size *= 5 + self.assertEquals(len(l), size) + + item = l.pop() + size -= 1 + self.assertEquals(len(l), size) + self.assertEquals(item, u"e") + self.assertEquals(l[-2:], [u"c", u"d"]) + + item = l.pop(0) + size -= 1 + self.assertEquals(len(l), size) + self.assertEquals(item, u"a") + self.assertEquals(l[:2], [u"b", u"c"]) + + item = l.pop(size - 2) + size -= 1 + self.assertEquals(len(l), size) + self.assertEquals(item, u"c") + self.assertEquals(l[-2:], [u"b", u"d"]) + + @bigmemtest(minsize=_2G + 10, memuse=8) + def test_remove(self, size): + l = [10] * size + self.assertEquals(len(l), size) + + l.remove(10) + size -= 1 + self.assertEquals(len(l), size) + + # Because of the earlier l.remove(), this append doesn't trigger + # a resize. + l.append(5) + size += 1 + self.assertEquals(len(l), size) + self.assertEquals(l[-2:], [10, 5]) + l.remove(5) + size -= 1 + self.assertEquals(len(l), size) + self.assertEquals(l[-2:], [10, 10]) + + @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) + def test_reverse(self, size): + l = [1, 2, 3, 4, 5] * size + l.reverse() + self.assertEquals(len(l), size * 5) + self.assertEquals(l[-5:], [5, 4, 3, 2, 1]) + self.assertEquals(l[:5], [5, 4, 3, 2, 1]) + + @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) + def test_sort(self, size): + l = [1, 2, 3, 4, 5] * size + l.sort() + self.assertEquals(len(l), size * 5) + self.assertEquals(l.count(1), size) + self.assertEquals(l[:10], [1] * 10) + self.assertEquals(l[-10:], [5] * 10) + +def test_main(): + test_support.run_unittest(StrTest, TupleTest, ListTest) + +if __name__ == '__main__': + if len(sys.argv) > 1: + test_support.set_memlimit(sys.argv[1]) + test_main() diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index ef4f407..71e2b0a 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1,7 +1,8 @@ # Python test set -- built-in functions import test.test_support, unittest -from test.test_support import fcmp, have_unicode, TESTFN, unlink, run_unittest +from test.test_support import fcmp, have_unicode, TESTFN, unlink, \ + run_unittest, run_with_locale from operator import neg import sys, warnings, cStringIO, random, UserDict @@ -528,33 +529,20 @@ class BuiltinTest(unittest.TestCase): # Implementation limitation in PyFloat_FromString() self.assertRaises(ValueError, float, unicode("1"*10000)) + @run_with_locale('LC_NUMERIC', 'fr_FR', 'de_DE') def test_float_with_comma(self): # set locale to something that doesn't use '.' for the decimal point - try: - import locale - orig_locale = locale.setlocale(locale.LC_NUMERIC) - locale.setlocale(locale.LC_NUMERIC, 'fr_FR') - except: - # if we can't set the locale, just ignore this test - return - - try: - self.assertEqual(locale.localeconv()['decimal_point'], ',') - except: - # this test is worthless, just skip it and reset the locale - locale.setlocale(locale.LC_NUMERIC, orig_locale) + import locale + if not locale.localeconv()['decimal_point'] == ',': return - try: - self.assertEqual(float(" 3,14 "), 3.14) - self.assertEqual(float(" +3,14 "), 3.14) - self.assertEqual(float(" -3,14 "), -3.14) - self.assertRaises(ValueError, float, " 0x3.1 ") - self.assertRaises(ValueError, float, " -0x3.p-1 ") - self.assertEqual(float(" 25.e-1 "), 2.5) - self.assertEqual(fcmp(float(" .25e-1 "), .025), 0) - finally: - locale.setlocale(locale.LC_NUMERIC, orig_locale) + self.assertEqual(float(" 3,14 "), 3.14) + self.assertEqual(float(" +3,14 "), 3.14) + self.assertEqual(float(" -3,14 "), -3.14) + self.assertRaises(ValueError, float, " 0x3.1 ") + self.assertRaises(ValueError, float, " -0x3.p-1 ") + self.assertEqual(float(" 25.e-1 "), 2.5) + self.assertEqual(fcmp(float(" .25e-1 "), .025), 0) def test_floatconversion(self): # Make sure that calls to __float__() work properly @@ -693,6 +681,84 @@ class BuiltinTest(unittest.TestCase): self.assertEqual(int('0123', 0), 83) self.assertEqual(int('0x123', 16), 291) + # SF bug 1334662: int(string, base) wrong answers + # Various representations of 2**32 evaluated to 0 + # rather than 2**32 in previous versions + + self.assertEqual(int('100000000000000000000000000000000', 2), 4294967296L) + self.assertEqual(int('102002022201221111211', 3), 4294967296L) + self.assertEqual(int('10000000000000000', 4), 4294967296L) + self.assertEqual(int('32244002423141', 5), 4294967296L) + self.assertEqual(int('1550104015504', 6), 4294967296L) + self.assertEqual(int('211301422354', 7), 4294967296L) + self.assertEqual(int('40000000000', 8), 4294967296L) + self.assertEqual(int('12068657454', 9), 4294967296L) + self.assertEqual(int('4294967296', 10), 4294967296L) + self.assertEqual(int('1904440554', 11), 4294967296L) + self.assertEqual(int('9ba461594', 12), 4294967296L) + self.assertEqual(int('535a79889', 13), 4294967296L) + self.assertEqual(int('2ca5b7464', 14), 4294967296L) + self.assertEqual(int('1a20dcd81', 15), 4294967296L) + self.assertEqual(int('100000000', 16), 4294967296L) + self.assertEqual(int('a7ffda91', 17), 4294967296L) + self.assertEqual(int('704he7g4', 18), 4294967296L) + self.assertEqual(int('4f5aff66', 19), 4294967296L) + self.assertEqual(int('3723ai4g', 20), 4294967296L) + self.assertEqual(int('281d55i4', 21), 4294967296L) + self.assertEqual(int('1fj8b184', 22), 4294967296L) + self.assertEqual(int('1606k7ic', 23), 4294967296L) + self.assertEqual(int('mb994ag', 24), 4294967296L) + self.assertEqual(int('hek2mgl', 25), 4294967296L) + self.assertEqual(int('dnchbnm', 26), 4294967296L) + self.assertEqual(int('b28jpdm', 27), 4294967296L) + self.assertEqual(int('8pfgih4', 28), 4294967296L) + self.assertEqual(int('76beigg', 29), 4294967296L) + self.assertEqual(int('5qmcpqg', 30), 4294967296L) + self.assertEqual(int('4q0jto4', 31), 4294967296L) + self.assertEqual(int('4000000', 32), 4294967296L) + self.assertEqual(int('3aokq94', 33), 4294967296L) + self.assertEqual(int('2qhxjli', 34), 4294967296L) + self.assertEqual(int('2br45qb', 35), 4294967296L) + self.assertEqual(int('1z141z4', 36), 4294967296L) + + # SF bug 1334662: int(string, base) wrong answers + # Checks for proper evaluation of 2**32 + 1 + self.assertEqual(int('100000000000000000000000000000001', 2), 4294967297L) + self.assertEqual(int('102002022201221111212', 3), 4294967297L) + self.assertEqual(int('10000000000000001', 4), 4294967297L) + self.assertEqual(int('32244002423142', 5), 4294967297L) + self.assertEqual(int('1550104015505', 6), 4294967297L) + self.assertEqual(int('211301422355', 7), 4294967297L) + self.assertEqual(int('40000000001', 8), 4294967297L) + self.assertEqual(int('12068657455', 9), 4294967297L) + self.assertEqual(int('4294967297', 10), 4294967297L) + self.assertEqual(int('1904440555', 11), 4294967297L) + self.assertEqual(int('9ba461595', 12), 4294967297L) + self.assertEqual(int('535a7988a', 13), 4294967297L) + self.assertEqual(int('2ca5b7465', 14), 4294967297L) + self.assertEqual(int('1a20dcd82', 15), 4294967297L) + self.assertEqual(int('100000001', 16), 4294967297L) + self.assertEqual(int('a7ffda92', 17), 4294967297L) + self.assertEqual(int('704he7g5', 18), 4294967297L) + self.assertEqual(int('4f5aff67', 19), 4294967297L) + self.assertEqual(int('3723ai4h', 20), 4294967297L) + self.assertEqual(int('281d55i5', 21), 4294967297L) + self.assertEqual(int('1fj8b185', 22), 4294967297L) + self.assertEqual(int('1606k7id', 23), 4294967297L) + self.assertEqual(int('mb994ah', 24), 4294967297L) + self.assertEqual(int('hek2mgm', 25), 4294967297L) + self.assertEqual(int('dnchbnn', 26), 4294967297L) + self.assertEqual(int('b28jpdn', 27), 4294967297L) + self.assertEqual(int('8pfgih5', 28), 4294967297L) + self.assertEqual(int('76beigh', 29), 4294967297L) + self.assertEqual(int('5qmcpqh', 30), 4294967297L) + self.assertEqual(int('4q0jto5', 31), 4294967297L) + self.assertEqual(int('4000001', 32), 4294967297L) + self.assertEqual(int('3aokq95', 33), 4294967297L) + self.assertEqual(int('2qhxjlj', 34), 4294967297L) + self.assertEqual(int('2br45qc', 35), 4294967297L) + self.assertEqual(int('1z141z5', 36), 4294967297L) + def test_intconversion(self): # Test __int__() class Foo0: @@ -886,6 +952,81 @@ class BuiltinTest(unittest.TestCase): self.assertRaises(ValueError, long, '53', 40) self.assertRaises(TypeError, long, 1, 12) + self.assertEqual(long('100000000000000000000000000000000', 2), + 4294967296) + self.assertEqual(long('102002022201221111211', 3), 4294967296) + self.assertEqual(long('10000000000000000', 4), 4294967296) + self.assertEqual(long('32244002423141', 5), 4294967296) + self.assertEqual(long('1550104015504', 6), 4294967296) + self.assertEqual(long('211301422354', 7), 4294967296) + self.assertEqual(long('40000000000', 8), 4294967296) + self.assertEqual(long('12068657454', 9), 4294967296) + self.assertEqual(long('4294967296', 10), 4294967296) + self.assertEqual(long('1904440554', 11), 4294967296) + self.assertEqual(long('9ba461594', 12), 4294967296) + self.assertEqual(long('535a79889', 13), 4294967296) + self.assertEqual(long('2ca5b7464', 14), 4294967296) + self.assertEqual(long('1a20dcd81', 15), 4294967296) + self.assertEqual(long('100000000', 16), 4294967296) + self.assertEqual(long('a7ffda91', 17), 4294967296) + self.assertEqual(long('704he7g4', 18), 4294967296) + self.assertEqual(long('4f5aff66', 19), 4294967296) + self.assertEqual(long('3723ai4g', 20), 4294967296) + self.assertEqual(long('281d55i4', 21), 4294967296) + self.assertEqual(long('1fj8b184', 22), 4294967296) + self.assertEqual(long('1606k7ic', 23), 4294967296) + self.assertEqual(long('mb994ag', 24), 4294967296) + self.assertEqual(long('hek2mgl', 25), 4294967296) + self.assertEqual(long('dnchbnm', 26), 4294967296) + self.assertEqual(long('b28jpdm', 27), 4294967296) + self.assertEqual(long('8pfgih4', 28), 4294967296) + self.assertEqual(long('76beigg', 29), 4294967296) + self.assertEqual(long('5qmcpqg', 30), 4294967296) + self.assertEqual(long('4q0jto4', 31), 4294967296) + self.assertEqual(long('4000000', 32), 4294967296) + self.assertEqual(long('3aokq94', 33), 4294967296) + self.assertEqual(long('2qhxjli', 34), 4294967296) + self.assertEqual(long('2br45qb', 35), 4294967296) + self.assertEqual(long('1z141z4', 36), 4294967296) + + self.assertEqual(long('100000000000000000000000000000001', 2), + 4294967297) + self.assertEqual(long('102002022201221111212', 3), 4294967297) + self.assertEqual(long('10000000000000001', 4), 4294967297) + self.assertEqual(long('32244002423142', 5), 4294967297) + self.assertEqual(long('1550104015505', 6), 4294967297) + self.assertEqual(long('211301422355', 7), 4294967297) + self.assertEqual(long('40000000001', 8), 4294967297) + self.assertEqual(long('12068657455', 9), 4294967297) + self.assertEqual(long('4294967297', 10), 4294967297) + self.assertEqual(long('1904440555', 11), 4294967297) + self.assertEqual(long('9ba461595', 12), 4294967297) + self.assertEqual(long('535a7988a', 13), 4294967297) + self.assertEqual(long('2ca5b7465', 14), 4294967297) + self.assertEqual(long('1a20dcd82', 15), 4294967297) + self.assertEqual(long('100000001', 16), 4294967297) + self.assertEqual(long('a7ffda92', 17), 4294967297) + self.assertEqual(long('704he7g5', 18), 4294967297) + self.assertEqual(long('4f5aff67', 19), 4294967297) + self.assertEqual(long('3723ai4h', 20), 4294967297) + self.assertEqual(long('281d55i5', 21), 4294967297) + self.assertEqual(long('1fj8b185', 22), 4294967297) + self.assertEqual(long('1606k7id', 23), 4294967297) + self.assertEqual(long('mb994ah', 24), 4294967297) + self.assertEqual(long('hek2mgm', 25), 4294967297) + self.assertEqual(long('dnchbnn', 26), 4294967297) + self.assertEqual(long('b28jpdn', 27), 4294967297) + self.assertEqual(long('8pfgih5', 28), 4294967297) + self.assertEqual(long('76beigh', 29), 4294967297) + self.assertEqual(long('5qmcpqh', 30), 4294967297) + self.assertEqual(long('4q0jto5', 31), 4294967297) + self.assertEqual(long('4000001', 32), 4294967297) + self.assertEqual(long('3aokq95', 33), 4294967297) + self.assertEqual(long('2qhxjlj', 34), 4294967297) + self.assertEqual(long('2br45qc', 35), 4294967297) + self.assertEqual(long('1z141z5', 36), 4294967297) + + def test_longconversion(self): # Test __long__() class Foo0: diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 018bec6..ec860d1 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -15,8 +15,11 @@ class CmdLineTest(unittest.TestCase): popen2._cleanup() return data - def exit_code(self, cmd_line): - return subprocess.call([sys.executable, cmd_line], stderr=subprocess.PIPE) + def exit_code(self, *args): + cmd_line = [sys.executable] + cmd_line.extend(args) + return subprocess.call(cmd_line, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) def test_directories(self): self.assertNotEqual(self.exit_code('.'), 0) @@ -50,6 +53,38 @@ class CmdLineTest(unittest.TestCase): version = 'Python %d.%d' % sys.version_info[:2] self.assertTrue(self.start_python('-V').startswith(version)) + def test_run_module(self): + # Test expected operation of the '-m' switch + # Switch needs an argument + self.assertNotEqual(self.exit_code('-m'), 0) + # Check we get an error for a nonexistent module + self.assertNotEqual( + self.exit_code('-m', 'fnord43520xyz'), + 0) + # Check the runpy module also gives an error for + # a nonexistent module + self.assertNotEqual( + self.exit_code('-m', 'runpy', 'fnord43520xyz'), + 0) + # All good if module is located and run successfully + self.assertEqual( + self.exit_code('-m', 'timeit', '-n', '1'), + 0) + + def test_run_code(self): + # Test expected operation of the '-c' switch + # Switch needs an argument + self.assertNotEqual(self.exit_code('-c'), 0) + # Check we get an error for an uncaught exception + self.assertNotEqual( + self.exit_code('-c', 'raise Exception'), + 0) + # All good if execution is successful + self.assertEqual( + self.exit_code('-c', 'pass'), + 0) + + def test_main(): test.test_support.run_unittest(CmdLineTest) diff --git a/Lib/test/test_codeccallbacks.py b/Lib/test/test_codeccallbacks.py index c6e56c9..159c86d 100644 --- a/Lib/test/test_codeccallbacks.py +++ b/Lib/test/test_codeccallbacks.py @@ -18,30 +18,12 @@ class PosReturn: self.pos = len(exc.object) return (u"", oldpos) -# A UnicodeEncodeError object without a start attribute -class NoStartUnicodeEncodeError(UnicodeEncodeError): - def __init__(self): - UnicodeEncodeError.__init__(self, "ascii", u"", 0, 1, "bad") - del self.start - # A UnicodeEncodeError object with a bad start attribute class BadStartUnicodeEncodeError(UnicodeEncodeError): def __init__(self): UnicodeEncodeError.__init__(self, "ascii", u"", 0, 1, "bad") self.start = [] -# A UnicodeEncodeError object without an end attribute -class NoEndUnicodeEncodeError(UnicodeEncodeError): - def __init__(self): - UnicodeEncodeError.__init__(self, "ascii", u"", 0, 1, "bad") - del self.end - -# A UnicodeEncodeError object without an object attribute -class NoObjectUnicodeEncodeError(UnicodeEncodeError): - def __init__(self): - UnicodeEncodeError.__init__(self, "ascii", u"", 0, 1, "bad") - del self.object - # A UnicodeEncodeError object with a bad object attribute class BadObjectUnicodeEncodeError(UnicodeEncodeError): def __init__(self): @@ -478,55 +460,15 @@ class CodecCallbackTest(unittest.TestCase): UnicodeError("ouch") ) self.assertRaises( - AttributeError, - codecs.replace_errors, - NoStartUnicodeEncodeError() - ) - self.assertRaises( - TypeError, - codecs.replace_errors, - BadStartUnicodeEncodeError() - ) - self.assertRaises( - AttributeError, - codecs.replace_errors, - NoEndUnicodeEncodeError() - ) - self.assertRaises( - AttributeError, - codecs.replace_errors, - NoObjectUnicodeEncodeError() - ) - self.assertRaises( TypeError, codecs.replace_errors, BadObjectUnicodeEncodeError() ) self.assertRaises( - AttributeError, - codecs.replace_errors, - NoEndUnicodeDecodeError() - ) - self.assertRaises( TypeError, codecs.replace_errors, BadObjectUnicodeDecodeError() ) - self.assertRaises( - AttributeError, - codecs.replace_errors, - NoStartUnicodeTranslateError() - ) - self.assertRaises( - AttributeError, - codecs.replace_errors, - NoEndUnicodeTranslateError() - ) - self.assertRaises( - AttributeError, - codecs.replace_errors, - NoObjectUnicodeTranslateError() - ) # With the correct exception, "replace" returns an "?" or u"\ufffd" replacement self.assertEquals( codecs.replace_errors(UnicodeEncodeError("ascii", u"\u3042", 0, 1, "ouch")), @@ -565,21 +507,6 @@ class CodecCallbackTest(unittest.TestCase): codecs.xmlcharrefreplace_errors, UnicodeTranslateError(u"\u3042", 0, 1, "ouch") ) - self.assertRaises( - AttributeError, - codecs.xmlcharrefreplace_errors, - NoStartUnicodeEncodeError() - ) - self.assertRaises( - AttributeError, - codecs.xmlcharrefreplace_errors, - NoEndUnicodeEncodeError() - ) - self.assertRaises( - AttributeError, - codecs.xmlcharrefreplace_errors, - NoObjectUnicodeEncodeError() - ) # Use the correct exception cs = (0, 1, 9, 10, 99, 100, 999, 1000, 9999, 10000, 0x3042) s = "".join(unichr(c) for c in cs) diff --git a/Lib/test/test_codecencodings_cn.py b/Lib/test/test_codecencodings_cn.py index 0638f4f..1bf8583 100644 --- a/Lib/test/test_codecencodings_cn.py +++ b/Lib/test/test_codecencodings_cn.py @@ -3,7 +3,6 @@ # test_codecencodings_cn.py # Codec encoding tests for PRC encodings. # -# $CJKCodecs: test_codecencodings_cn.py,v 1.2 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecencodings_hk.py b/Lib/test/test_codecencodings_hk.py index e7fad90..1cd020f 100644 --- a/Lib/test/test_codecencodings_hk.py +++ b/Lib/test/test_codecencodings_hk.py @@ -3,7 +3,6 @@ # test_codecencodings_hk.py # Codec encoding tests for HongKong encodings. # -# $CJKCodecs: test_codecencodings_hk.py,v 1.1 2004/07/10 17:35:20 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecencodings_jp.py b/Lib/test/test_codecencodings_jp.py index 483b7db..558598a 100644 --- a/Lib/test/test_codecencodings_jp.py +++ b/Lib/test/test_codecencodings_jp.py @@ -3,7 +3,6 @@ # test_codecencodings_jp.py # Codec encoding tests for Japanese encodings. # -# $CJKCodecs: test_codecencodings_jp.py,v 1.3 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecencodings_kr.py b/Lib/test/test_codecencodings_kr.py index 489c9f1..8139f76 100644 --- a/Lib/test/test_codecencodings_kr.py +++ b/Lib/test/test_codecencodings_kr.py @@ -3,7 +3,6 @@ # test_codecencodings_kr.py # Codec encoding tests for ROK encodings. # -# $CJKCodecs: test_codecencodings_kr.py,v 1.2 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecencodings_tw.py b/Lib/test/test_codecencodings_tw.py index fb8a4d0..7c59478 100644 --- a/Lib/test/test_codecencodings_tw.py +++ b/Lib/test/test_codecencodings_tw.py @@ -3,7 +3,6 @@ # test_codecencodings_tw.py # Codec encoding tests for ROC encodings. # -# $CJKCodecs: test_codecencodings_tw.py,v 1.2 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecmaps_cn.py b/Lib/test/test_codecmaps_cn.py index 25ecc02..8cbee76 100644 --- a/Lib/test/test_codecmaps_cn.py +++ b/Lib/test/test_codecmaps_cn.py @@ -3,7 +3,6 @@ # test_codecmaps_cn.py # Codec mapping tests for PRC encodings # -# $CJKCodecs: test_codecmaps_cn.py,v 1.3 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecmaps_hk.py b/Lib/test/test_codecmaps_hk.py index 2335c51..e7f7b96 100644 --- a/Lib/test/test_codecmaps_hk.py +++ b/Lib/test/test_codecmaps_hk.py @@ -3,7 +3,6 @@ # test_codecmaps_hk.py # Codec mapping tests for HongKong encodings # -# $CJKCodecs: test_codecmaps_hk.py,v 1.1 2004/07/10 17:35:20 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecmaps_jp.py b/Lib/test/test_codecmaps_jp.py index e75a5a8..08052d4 100644 --- a/Lib/test/test_codecmaps_jp.py +++ b/Lib/test/test_codecmaps_jp.py @@ -3,7 +3,6 @@ # test_codecmaps_jp.py # Codec mapping tests for Japanese encodings # -# $CJKCodecs: test_codecmaps_jp.py,v 1.3 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecmaps_kr.py b/Lib/test/test_codecmaps_kr.py index db65c01..7484a66 100644 --- a/Lib/test/test_codecmaps_kr.py +++ b/Lib/test/test_codecmaps_kr.py @@ -3,7 +3,6 @@ # test_codecmaps_kr.py # Codec mapping tests for ROK encodings # -# $CJKCodecs: test_codecmaps_kr.py,v 1.3 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_codecmaps_tw.py b/Lib/test/test_codecmaps_tw.py index 2d469b0..0b195f4 100644 --- a/Lib/test/test_codecmaps_tw.py +++ b/Lib/test/test_codecmaps_tw.py @@ -3,7 +3,6 @@ # test_codecmaps_tw.py # Codec mapping tests for ROC encodings # -# $CJKCodecs: test_codecmaps_tw.py,v 1.3 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support diff --git a/Lib/test/test_compiler.py b/Lib/test/test_compiler.py index a59d6aa..48f1643 100644 --- a/Lib/test/test_compiler.py +++ b/Lib/test/test_compiler.py @@ -26,6 +26,7 @@ class CompilerTest(unittest.TestCase): next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL print >>sys.__stdout__, \ ' testCompileLibrary still working, be patient...' + sys.__stdout__.flush() if not basename.endswith(".py"): continue @@ -55,6 +56,9 @@ class CompilerTest(unittest.TestCase): def testYieldExpr(self): compiler.compile("def g(): yield\n\n", "", "exec") + def testDefaultArgs(self): + self.assertRaises(SyntaxError, compiler.parse, "def foo(a=1, b): pass") + def testLineNo(self): # Test that all nodes except Module have a correct lineno attribute. filename = __file__ diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py index 97470c7..2cf39ae 100644 --- a/Lib/test/test_contextlib.py +++ b/Lib/test/test_contextlib.py @@ -51,7 +51,7 @@ class ContextManagerTestCase(unittest.TestCase): @contextmanager def whee(): yield - ctx = whee().__context__() + ctx = whee() ctx.__enter__() # Calling __exit__ should not result in an exception self.failIf(ctx.__exit__(TypeError, TypeError("foo"), None)) @@ -63,7 +63,7 @@ class ContextManagerTestCase(unittest.TestCase): yield except: yield - ctx = whoo().__context__() + ctx = whoo() ctx.__enter__() self.assertRaises( RuntimeError, ctx.__exit__, TypeError, TypeError("foo"), None @@ -146,6 +146,29 @@ class NestedTestCase(unittest.TestCase): else: self.fail("Didn't raise ZeroDivisionError") + def test_nested_right_exception(self): + state = [] + @contextmanager + def a(): + yield 1 + class b(object): + def __enter__(self): + return 2 + def __exit__(self, *exc_info): + try: + raise Exception() + except: + pass + try: + with nested(a(), b()) as (x, y): + 1/0 + except ZeroDivisionError: + self.assertEqual((x, y), (1, 2)) + except Exception: + self.fail("Reraised wrong exception") + else: + self.fail("Didn't raise ZeroDivisionError") + def test_nested_b_swallows(self): @contextmanager def a(): @@ -316,12 +339,12 @@ class DecimalContextTestCase(unittest.TestCase): orig_context = ctx.copy() try: ctx.prec = save_prec = decimal.ExtendedContext.prec + 5 - with decimal.ExtendedContext: + with decimal.ExtendedContext.get_manager(): self.assertEqual(decimal.getcontext().prec, decimal.ExtendedContext.prec) self.assertEqual(decimal.getcontext().prec, save_prec) try: - with decimal.ExtendedContext: + with decimal.ExtendedContext.get_manager(): self.assertEqual(decimal.getcontext().prec, decimal.ExtendedContext.prec) 1/0 diff --git a/Lib/test/test_cookielib.py b/Lib/test/test_cookielib.py index 49e7d47..991506c 100644 --- a/Lib/test/test_cookielib.py +++ b/Lib/test/test_cookielib.py @@ -695,6 +695,22 @@ class CookieTests(TestCase): 'foo=bar; domain=friendly.org; Version="1"') self.assertEquals(len(c), 0) + def test_strict_domain(self): + # Cookies whose domain is a country-code tld like .co.uk should + # not be set if CookiePolicy.strict_domain is true. + from cookielib import CookieJar, DefaultCookiePolicy + + cp = DefaultCookiePolicy(strict_domain=True) + cj = CookieJar(policy=cp) + interact_netscape(cj, "http://example.co.uk/", 'no=problemo') + interact_netscape(cj, "http://example.co.uk/", + 'okey=dokey; Domain=.example.co.uk') + self.assertEquals(len(cj), 2) + for pseudo_tld in [".co.uk", ".org.za", ".tx.us", ".name.us"]: + interact_netscape(cj, "http://example.%s/" % pseudo_tld, + 'spam=eggs; Domain=.co.uk') + self.assertEquals(len(cj), 2) + def test_two_component_domain_ns(self): # Netscape: .www.bar.com, www.bar.com, .bar.com, bar.com, no domain # should all get accepted, as should .acme.com, acme.com and no domain diff --git a/Lib/test/test_datetime.py b/Lib/test/test_datetime.py index 2528b4a..203bea1 100644 --- a/Lib/test/test_datetime.py +++ b/Lib/test/test_datetime.py @@ -1400,6 +1400,12 @@ class TestDateTime(TestDate): got = self.theclass.utcfromtimestamp(ts) self.verify_field_equality(expected, got) + def test_microsecond_rounding(self): + # Test whether fromtimestamp "rounds up" floats that are less + # than one microsecond smaller than an integer. + self.assertEquals(self.theclass.fromtimestamp(0.9999999), + self.theclass.fromtimestamp(1)) + def test_insane_fromtimestamp(self): # It's possible that some platform maps time_t to double, # and that this test will fail there. This test should diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index b17607d..443c962 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -1079,6 +1079,25 @@ output to match any substring in the actual output: ... # doctest: +NORMALIZE_WHITESPACE [0, 1, ..., 18, 19] +The SKIP flag causes an example to be skipped entirely. I.e., the +example is not run. It can be useful in contexts where doctest +examples serve as both documentation and test cases, and an example +should be included for documentation purposes, but should not be +checked (e.g., because its output is random, or depends on resources +which would be unavailable.) The SKIP flag can also be used for +'commenting out' broken examples. + + >>> import unavailable_resource # doctest: +SKIP + >>> unavailable_resource.do_something() # doctest: +SKIP + >>> unavailable_resource.blow_up() # doctest: +SKIP + Traceback (most recent call last): + ... + UncheckedBlowUpError: Nobody checks me. + + >>> import random + >>> print random.random() # doctest: +SKIP + 0.721216923889 + The REPORT_UDIFF flag causes failures that involve multi-line expected and actual outputs to be displayed using a unified diff: @@ -1281,6 +1300,26 @@ count as failures: ValueError: 2 (3, 5) +New option flags can also be registered, via register_optionflag(). Here +we reach into doctest's internals a bit. + + >>> unlikely = "UNLIKELY_OPTION_NAME" + >>> unlikely in doctest.OPTIONFLAGS_BY_NAME + False + >>> new_flag_value = doctest.register_optionflag(unlikely) + >>> unlikely in doctest.OPTIONFLAGS_BY_NAME + True + +Before 2.4.4/2.5, registering a name more than once erroneously created +more than one flag value. Here we verify that's fixed: + + >>> redundant_flag_value = doctest.register_optionflag(unlikely) + >>> redundant_flag_value == new_flag_value + True + +Clean up. + >>> del doctest.OPTIONFLAGS_BY_NAME[unlikely] + """ def option_directives(): r""" diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 7946142..8f995f7 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -81,14 +81,6 @@ try: x = undefined_variable except NameError: pass r(OverflowError) -# XXX -# Obscure: in 2.2 and 2.3, this test relied on changing OverflowWarning -# into an error, in order to trigger OverflowError. In 2.4, OverflowWarning -# should no longer be generated, so the focus of the test shifts to showing -# that OverflowError *isn't* generated. OverflowWarning should be gone -# in Python 2.5, and then the filterwarnings() call, and this comment, -# should go away. -warnings.filterwarnings("error", "", OverflowWarning, __name__) x = 1 for dummy in range(128): x += x # this simply shouldn't blow up @@ -224,3 +216,88 @@ if not sys.platform.startswith('java'): test_capi3() unlink(TESTFN) + +# test that exception attributes are happy. +try: str(u'Hello \u00E1') +except Exception, e: sampleUnicodeEncodeError = e +try: unicode('\xff') +except Exception, e: sampleUnicodeDecodeError = e +exceptionList = [ + ( BaseException, (), { 'message' : '', 'args' : () }), + ( BaseException, (1, ), { 'message' : 1, 'args' : ( 1, ) }), + ( BaseException, ('foo', ), { 'message' : 'foo', 'args' : ( 'foo', ) }), + ( BaseException, ('foo', 1), { 'message' : '', 'args' : ( 'foo', 1 ) }), + ( SystemExit, ('foo',), { 'message' : 'foo', 'args' : ( 'foo', ), + 'code' : 'foo' }), + ( IOError, ('foo',), { 'message' : 'foo', 'args' : ( 'foo', ), }), + ( IOError, ('foo', 'bar'), { 'message' : '', + 'args' : ('foo', 'bar'), }), + ( IOError, ('foo', 'bar', 'baz'), + { 'message' : '', 'args' : ('foo', 'bar'), }), + ( EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'), + { 'message' : '', 'args' : ('errnoStr', 'strErrorStr'), + 'strerror' : 'strErrorStr', + 'errno' : 'errnoStr', 'filename' : 'filenameStr' }), + ( EnvironmentError, (1, 'strErrorStr', 'filenameStr'), + { 'message' : '', 'args' : (1, 'strErrorStr'), + 'strerror' : 'strErrorStr', 'errno' : 1, + 'filename' : 'filenameStr' }), + ( SyntaxError, ('msgStr',), + { 'message' : 'msgStr', 'args' : ('msgStr', ), + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : None, 'lineno' : None, 'offset' : None, + 'text' : None }), + ( SyntaxError, ('msgStr', ('filenameStr', 'linenoStr', 'offsetStr', + 'textStr')), + { 'message' : '', 'args' : ('msgStr', ('filenameStr', + 'linenoStr', 'offsetStr', 'textStr' )), + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : 'filenameStr', 'lineno' : 'linenoStr', + 'offset' : 'offsetStr', 'text' : 'textStr' }), + ( SyntaxError, ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr', + 'textStr', 'print_file_and_lineStr'), + { 'message' : '', 'args' : ('msgStr', 'filenameStr', + 'linenoStr', 'offsetStr', 'textStr', + 'print_file_and_lineStr'), + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : None, 'lineno' : None, 'offset' : None, + 'text' : None }), + ( UnicodeError, (), + { 'message' : '', 'args' : (), }), + ( sampleUnicodeEncodeError, + { 'message' : '', 'args' : ('ascii', u'Hello \xe1', 6, 7, + 'ordinal not in range(128)'), + 'encoding' : 'ascii', 'object' : u'Hello \xe1', + 'start' : 6, 'reason' : 'ordinal not in range(128)' }), + ( sampleUnicodeDecodeError, + { 'message' : '', 'args' : ('ascii', '\xff', 0, 1, + 'ordinal not in range(128)'), + 'encoding' : 'ascii', 'object' : '\xff', + 'start' : 0, 'reason' : 'ordinal not in range(128)' }), + ( UnicodeTranslateError, (u"\u3042", 0, 1, "ouch"), + { 'message' : '', 'args' : (u'\u3042', 0, 1, 'ouch'), + 'object' : u'\u3042', 'reason' : 'ouch', + 'start' : 0, 'end' : 1 }), + ] +try: + exceptionList.append( + ( WindowsError, (1, 'strErrorStr', 'filenameStr'), + { 'message' : '', 'args' : (1, 'strErrorStr'), + 'strerror' : 'strErrorStr', + 'errno' : 22, 'filename' : 'filenameStr', + 'winerror' : 1 })) +except NameError: pass + +for args in exceptionList: + expected = args[-1] + try: + if len(args) == 2: raise args[0] + else: raise apply(args[0], args[1]) + except BaseException, e: + for checkArgName in expected.keys(): + if repr(getattr(e, checkArgName)) != repr(expected[checkArgName]): + raise TestFailed('Checking exception arguments, exception ' + '"%s", attribute "%s" expected %s got %s.' % + ( repr(e), checkArgName, + repr(expected[checkArgName]), + repr(getattr(e, checkArgName)) )) diff --git a/Lib/test/test_file.py b/Lib/test/test_file.py index cfc1019..ca1c6ba 100644 --- a/Lib/test/test_file.py +++ b/Lib/test/test_file.py @@ -147,7 +147,7 @@ f.close() bad_mode = "qwerty" try: open(TESTFN, bad_mode) -except IOError, msg: +except ValueError, msg: if msg[0] != 0: s = str(msg) if s.find(TESTFN) != -1 or s.find(bad_mode) == -1: diff --git a/Lib/test/test_grp.py b/Lib/test/test_grp.py index 2c3ab29..08958ba 100755 --- a/Lib/test/test_grp.py +++ b/Lib/test/test_grp.py @@ -31,7 +31,10 @@ class GroupDatabaseTestCase(unittest.TestCase): self.assertEqual(e2.gr_gid, e.gr_gid) e2 = grp.getgrnam(e.gr_name) self.check_value(e2) - self.assertEqual(e2.gr_name, e.gr_name) + # There are instances where getgrall() returns group names in + # lowercase while getgrgid() returns proper casing. + # Discovered on Ubuntu 5.04 (custom). + self.assertEqual(e2.gr_name.lower(), e.gr_name.lower()) def test_errors(self): self.assertRaises(TypeError, grp.getgrgid) diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py index a72b8bd..effba3c 100644 --- a/Lib/test/test_import.py +++ b/Lib/test/test_import.py @@ -205,3 +205,20 @@ def test_import_name_binding(): assert y is test.test_support, y.__name__ test_import_name_binding() + +def test_import_initless_directory_warning(): + import warnings + oldfilters = warnings.filters[:] + warnings.simplefilter('error', ImportWarning); + try: + # Just a random non-package directory we always expect to be + # somewhere in sys.path... + __import__("site-packages") + except ImportWarning: + pass + else: + raise AssertionError + finally: + warnings.filters = oldfilters + +test_import_initless_directory_warning() diff --git a/Lib/test/test_importhooks.py b/Lib/test/test_importhooks.py index 0693581..e8b4695 100644 --- a/Lib/test/test_importhooks.py +++ b/Lib/test/test_importhooks.py @@ -14,6 +14,7 @@ def get_file(): absimp = "import sub\n" relimp = "from . import sub\n" +deeprelimp = "from .... import sub\n" futimp = "from __future__ import absolute_import\n" reload_src = test_src+"""\ @@ -26,6 +27,7 @@ reload_co = compile(reload_src, "", "exec") test2_oldabs_co = compile(absimp + test_src, "", "exec") test2_newabs_co = compile(futimp + absimp + test_src, "", "exec") test2_newrel_co = compile(relimp + test_src, "", "exec") +test2_deeprel_co = compile(deeprelimp + test_src, "", "exec") test2_futrel_co = compile(futimp + relimp + test_src, "", "exec") test_path = "!!!_test_!!!" @@ -46,10 +48,11 @@ class TestImporter: "hooktestmodule": (False, test_co), "hooktestpackage": (True, test_co), "hooktestpackage.sub": (True, test_co), - "hooktestpackage.sub.subber": (False, test_co), + "hooktestpackage.sub.subber": (True, test_co), "hooktestpackage.oldabs": (False, test2_oldabs_co), "hooktestpackage.newabs": (False, test2_newabs_co), "hooktestpackage.newrel": (False, test2_newrel_co), + "hooktestpackage.sub.subber.subest": (True, test2_deeprel_co), "hooktestpackage.futrel": (False, test2_futrel_co), "sub": (False, test_co), "reloadmodule": (False, test_co), @@ -203,6 +206,12 @@ class ImportHooksTestCase(ImportHooksBaseTestCase): self.assertEqual(hooktestpackage.newrel.sub, hooktestpackage.sub) + import hooktestpackage.sub.subber.subest as subest + self.assertEqual(subest.get_name(), + "hooktestpackage.sub.subber.subest") + self.assertEqual(subest.sub, + hooktestpackage.sub) + import hooktestpackage.futrel self.assertEqual(hooktestpackage.futrel.get_name(), "hooktestpackage.futrel") diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py index 1523e77..9e264b9 100644 --- a/Lib/test/test_locale.py +++ b/Lib/test/test_locale.py @@ -20,14 +20,14 @@ for tloc in tlocs: else: raise ImportError, "test locale not supported (tried %s)"%(', '.join(tlocs)) -def testformat(formatstr, value, grouping = 0, output=None): +def testformat(formatstr, value, grouping = 0, output=None, func=locale.format): if verbose: if output: print "%s %% %s =? %s ..." %\ (repr(formatstr), repr(value), repr(output)), else: print "%s %% %s works? ..." % (repr(formatstr), repr(value)), - result = locale.format(formatstr, value, grouping = grouping) + result = func(formatstr, value, grouping = grouping) if output and result != output: if verbose: print 'no' @@ -49,6 +49,30 @@ try: testformat("%-10.f", 4200, grouping=1, output='4%s200 ' % sep) # Invoke getpreferredencoding to make sure it does not cause exceptions, locale.getpreferredencoding() + + # === Test format() with more complex formatting strings + # test if grouping is independent from other characters in formatting string + testformat("One million is %i", 1000000, grouping=1, + output='One million is 1%s000%s000' % (sep, sep), + func=locale.format_string) + testformat("One million is %i", 1000000, grouping=1, + output='One million is 1%s000%s000' % (sep, sep), + func=locale.format_string) + # test dots in formatting string + testformat(".%f.", 1000.0, output='.1000.000000.', func=locale.format_string) + # test floats + testformat("--> %10.2f", 1000.0, grouping=1, output='--> 1%s000.00' % sep, + func=locale.format_string) + # test asterisk formats + testformat("%10.*f", (2, 1000.0), grouping=0, output=' 1000.00', + func=locale.format_string) + testformat("%*.*f", (10, 2, 1000.0), grouping=1, output=' 1%s000.00' % sep, + func=locale.format_string) + # test more-in-one + testformat("int %i float %.2f str %s", (1000, 1000.0, 'str'), grouping=1, + output='int 1%s000 float 1%s000.00 str str' % (sep, sep), + func=locale.format_string) + finally: locale.setlocale(locale.LC_NUMERIC, oldlocale) diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index b689dc8..73f8288 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -28,6 +28,7 @@ import select import os, sys, string, struct, types, cPickle, cStringIO import socket, tempfile, threading, time import logging, logging.handlers, logging.config +from test.test_support import run_with_locale BANNER = "-- %-10s %-6s ---------------------------------------------------\n" @@ -657,19 +658,11 @@ def test_main_inner(): pass rootLogger.removeHandler(hdlr) +# Set the locale to the platform-dependent default. I have no idea +# why the test does this, but in any case we save the current locale +# first and restore it at the end. +@run_with_locale('LC_ALL', '') def test_main(): - import locale - # Set the locale to the platform-dependent default. I have no idea - # why the test does this, but in any case we save the current locale - # first so we can restore it at the end. - try: - original_locale = locale.setlocale(locale.LC_ALL) - locale.setlocale(locale.LC_ALL, '') - except (ValueError, locale.Error): - # this happens on a Solaris box which only supports "C" locale - # or a Mac OS X box which supports very little locale stuff at all - original_locale = None - # Save and restore the original root logger level across the tests. # Otherwise, e.g., if any test using cookielib runs after test_logging, # cookielib's debug-level logger tries to log messages, leading to @@ -681,8 +674,6 @@ def test_main(): try: test_main_inner() finally: - if original_locale is not None: - locale.setlocale(locale.LC_ALL, original_locale) root_logger.setLevel(original_logging_level) if __name__ == "__main__": diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py index 77d39a6..914a20c 100644 --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -1,15 +1,1576 @@ -import mailbox import os import time -import unittest +import stat +import socket +import email +import email.Message +import rfc822 +import re +import StringIO from test import test_support - -# cleanup earlier tests +import unittest +import mailbox +import glob try: - os.unlink(test_support.TESTFN) -except os.error: + import fcntl +except ImportError: pass + +class TestBase(unittest.TestCase): + + def _check_sample(self, msg): + # Inspect a mailbox.Message representation of the sample message + self.assert_(isinstance(msg, email.Message.Message)) + self.assert_(isinstance(msg, mailbox.Message)) + for key, value in _sample_headers.iteritems(): + self.assert_(value in msg.get_all(key)) + self.assert_(msg.is_multipart()) + self.assert_(len(msg.get_payload()) == len(_sample_payloads)) + for i, payload in enumerate(_sample_payloads): + part = msg.get_payload(i) + self.assert_(isinstance(part, email.Message.Message)) + self.assert_(not isinstance(part, mailbox.Message)) + self.assert_(part.get_payload() == payload) + + def _delete_recursively(self, target): + # Delete a file or delete a directory recursively + if os.path.isdir(target): + for path, dirs, files in os.walk(target, topdown=False): + for name in files: + os.remove(os.path.join(path, name)) + for name in dirs: + os.rmdir(os.path.join(path, name)) + os.rmdir(target) + elif os.path.exists(target): + os.remove(target) + + +class TestMailbox(TestBase): + + _factory = None # Overridden by subclasses to reuse tests + _template = 'From: foo\n\n%s' + + def setUp(self): + self._path = test_support.TESTFN + self._box = self._factory(self._path) + + def tearDown(self): + self._box.close() + self._delete_recursively(self._path) + + def test_add(self): + # Add copies of a sample message + keys = [] + keys.append(self._box.add(self._template % 0)) + self.assert_(len(self._box) == 1) + keys.append(self._box.add(mailbox.Message(_sample_message))) + self.assert_(len(self._box) == 2) + keys.append(self._box.add(email.message_from_string(_sample_message))) + self.assert_(len(self._box) == 3) + keys.append(self._box.add(StringIO.StringIO(_sample_message))) + self.assert_(len(self._box) == 4) + keys.append(self._box.add(_sample_message)) + self.assert_(len(self._box) == 5) + self.assert_(self._box.get_string(keys[0]) == self._template % 0) + for i in (1, 2, 3, 4): + self._check_sample(self._box[keys[i]]) + + def test_remove(self): + # Remove messages using remove() + self._test_remove_or_delitem(self._box.remove) + + def test_delitem(self): + # Remove messages using __delitem__() + self._test_remove_or_delitem(self._box.__delitem__) + + def _test_remove_or_delitem(self, method): + # (Used by test_remove() and test_delitem().) + key0 = self._box.add(self._template % 0) + key1 = self._box.add(self._template % 1) + self.assert_(len(self._box) == 2) + method(key0) + l = len(self._box) + self.assert_(l == 1, "actual l: %s" % l) + self.assertRaises(KeyError, lambda: self._box[key0]) + self.assertRaises(KeyError, lambda: method(key0)) + self.assert_(self._box.get_string(key1) == self._template % 1) + key2 = self._box.add(self._template % 2) + self.assert_(len(self._box) == 2) + method(key2) + l = len(self._box) + self.assert_(l == 1, "actual l: %s" % l) + self.assertRaises(KeyError, lambda: self._box[key2]) + self.assertRaises(KeyError, lambda: method(key2)) + self.assert_(self._box.get_string(key1) == self._template % 1) + method(key1) + self.assert_(len(self._box) == 0) + self.assertRaises(KeyError, lambda: self._box[key1]) + self.assertRaises(KeyError, lambda: method(key1)) + + def test_discard(self, repetitions=10): + # Discard messages + key0 = self._box.add(self._template % 0) + key1 = self._box.add(self._template % 1) + self.assert_(len(self._box) == 2) + self._box.discard(key0) + self.assert_(len(self._box) == 1) + self.assertRaises(KeyError, lambda: self._box[key0]) + self._box.discard(key0) + self.assert_(len(self._box) == 1) + self.assertRaises(KeyError, lambda: self._box[key0]) + + def test_get(self): + # Retrieve messages using get() + key0 = self._box.add(self._template % 0) + msg = self._box.get(key0) + self.assert_(msg['from'] == 'foo') + self.assert_(msg.get_payload() == '0') + self.assert_(self._box.get('foo') is None) + self.assert_(self._box.get('foo', False) is False) + self._box.close() + self._box = self._factory(self._path, factory=rfc822.Message) + key1 = self._box.add(self._template % 1) + msg = self._box.get(key1) + self.assert_(msg['from'] == 'foo') + self.assert_(msg.fp.read() == '1') + + def test_getitem(self): + # Retrieve message using __getitem__() + key0 = self._box.add(self._template % 0) + msg = self._box[key0] + self.assert_(msg['from'] == 'foo') + self.assert_(msg.get_payload() == '0') + self.assertRaises(KeyError, lambda: self._box['foo']) + self._box.discard(key0) + self.assertRaises(KeyError, lambda: self._box[key0]) + + def test_get_message(self): + # Get Message representations of messages + key0 = self._box.add(self._template % 0) + key1 = self._box.add(_sample_message) + msg0 = self._box.get_message(key0) + self.assert_(isinstance(msg0, mailbox.Message)) + self.assert_(msg0['from'] == 'foo') + self.assert_(msg0.get_payload() == '0') + self._check_sample(self._box.get_message(key1)) + + def test_get_string(self): + # Get string representations of messages + key0 = self._box.add(self._template % 0) + key1 = self._box.add(_sample_message) + self.assert_(self._box.get_string(key0) == self._template % 0) + self.assert_(self._box.get_string(key1) == _sample_message) + + def test_get_file(self): + # Get file representations of messages + key0 = self._box.add(self._template % 0) + key1 = self._box.add(_sample_message) + self.assert_(self._box.get_file(key0).read().replace(os.linesep, '\n') + == self._template % 0) + self.assert_(self._box.get_file(key1).read().replace(os.linesep, '\n') + == _sample_message) + + def test_iterkeys(self): + # Get keys using iterkeys() + self._check_iteration(self._box.iterkeys, do_keys=True, do_values=False) + + def test_keys(self): + # Get keys using keys() + self._check_iteration(self._box.keys, do_keys=True, do_values=False) + + def test_itervalues(self): + # Get values using itervalues() + self._check_iteration(self._box.itervalues, do_keys=False, + do_values=True) + + def test_iter(self): + # Get values using __iter__() + self._check_iteration(self._box.__iter__, do_keys=False, + do_values=True) + + def test_values(self): + # Get values using values() + self._check_iteration(self._box.values, do_keys=False, do_values=True) + + def test_iteritems(self): + # Get keys and values using iteritems() + self._check_iteration(self._box.iteritems, do_keys=True, + do_values=True) + + def test_items(self): + # Get keys and values using items() + self._check_iteration(self._box.items, do_keys=True, do_values=True) + + def _check_iteration(self, method, do_keys, do_values, repetitions=10): + for value in method(): + self.fail("Not empty") + keys, values = [], [] + for i in xrange(repetitions): + keys.append(self._box.add(self._template % i)) + values.append(self._template % i) + if do_keys and not do_values: + returned_keys = list(method()) + elif do_values and not do_keys: + returned_values = list(method()) + else: + returned_keys, returned_values = [], [] + for key, value in method(): + returned_keys.append(key) + returned_values.append(value) + if do_keys: + self.assert_(len(keys) == len(returned_keys)) + self.assert_(set(keys) == set(returned_keys)) + if do_values: + count = 0 + for value in returned_values: + self.assert_(value['from'] == 'foo') + self.assert_(int(value.get_payload()) < repetitions) + count += 1 + self.assert_(len(values) == count) + + def test_has_key(self): + # Check existence of keys using has_key() + self._test_has_key_or_contains(self._box.has_key) + + def test_contains(self): + # Check existence of keys using __contains__() + self._test_has_key_or_contains(self._box.__contains__) + + def _test_has_key_or_contains(self, method): + # (Used by test_has_key() and test_contains().) + self.assert_(not method('foo')) + key0 = self._box.add(self._template % 0) + self.assert_(method(key0)) + self.assert_(not method('foo')) + key1 = self._box.add(self._template % 1) + self.assert_(method(key1)) + self.assert_(method(key0)) + self.assert_(not method('foo')) + self._box.remove(key0) + self.assert_(not method(key0)) + self.assert_(method(key1)) + self.assert_(not method('foo')) + self._box.remove(key1) + self.assert_(not method(key1)) + self.assert_(not method(key0)) + self.assert_(not method('foo')) + + def test_len(self, repetitions=10): + # Get message count + keys = [] + for i in xrange(repetitions): + self.assert_(len(self._box) == i) + keys.append(self._box.add(self._template % i)) + self.assert_(len(self._box) == i + 1) + for i in xrange(repetitions): + self.assert_(len(self._box) == repetitions - i) + self._box.remove(keys[i]) + self.assert_(len(self._box) == repetitions - i - 1) + + def test_set_item(self): + # Modify messages using __setitem__() + key0 = self._box.add(self._template % 'original 0') + self.assert_(self._box.get_string(key0) == \ + self._template % 'original 0') + key1 = self._box.add(self._template % 'original 1') + self.assert_(self._box.get_string(key1) == \ + self._template % 'original 1') + self._box[key0] = self._template % 'changed 0' + self.assert_(self._box.get_string(key0) == \ + self._template % 'changed 0') + self._box[key1] = self._template % 'changed 1' + self.assert_(self._box.get_string(key1) == \ + self._template % 'changed 1') + self._box[key0] = _sample_message + self._check_sample(self._box[key0]) + self._box[key1] = self._box[key0] + self._check_sample(self._box[key1]) + self._box[key0] = self._template % 'original 0' + self.assert_(self._box.get_string(key0) == + self._template % 'original 0') + self._check_sample(self._box[key1]) + self.assertRaises(KeyError, + lambda: self._box.__setitem__('foo', 'bar')) + self.assertRaises(KeyError, lambda: self._box['foo']) + self.assert_(len(self._box) == 2) + + def test_clear(self, iterations=10): + # Remove all messages using clear() + keys = [] + for i in xrange(iterations): + self._box.add(self._template % i) + for i, key in enumerate(keys): + self.assert_(self._box.get_string(key) == self._template % i) + self._box.clear() + self.assert_(len(self._box) == 0) + for i, key in enumerate(keys): + self.assertRaises(KeyError, lambda: self._box.get_string(key)) + + def test_pop(self): + # Get and remove a message using pop() + key0 = self._box.add(self._template % 0) + self.assert_(key0 in self._box) + key1 = self._box.add(self._template % 1) + self.assert_(key1 in self._box) + self.assert_(self._box.pop(key0).get_payload() == '0') + self.assert_(key0 not in self._box) + self.assert_(key1 in self._box) + key2 = self._box.add(self._template % 2) + self.assert_(key2 in self._box) + self.assert_(self._box.pop(key2).get_payload() == '2') + self.assert_(key2 not in self._box) + self.assert_(key1 in self._box) + self.assert_(self._box.pop(key1).get_payload() == '1') + self.assert_(key1 not in self._box) + self.assert_(len(self._box) == 0) + + def test_popitem(self, iterations=10): + # Get and remove an arbitrary (key, message) using popitem() + keys = [] + for i in xrange(10): + keys.append(self._box.add(self._template % i)) + seen = [] + for i in xrange(10): + key, msg = self._box.popitem() + self.assert_(key in keys) + self.assert_(key not in seen) + seen.append(key) + self.assert_(int(msg.get_payload()) == keys.index(key)) + self.assert_(len(self._box) == 0) + for key in keys: + self.assertRaises(KeyError, lambda: self._box[key]) + + def test_update(self): + # Modify multiple messages using update() + key0 = self._box.add(self._template % 'original 0') + key1 = self._box.add(self._template % 'original 1') + key2 = self._box.add(self._template % 'original 2') + self._box.update({key0: self._template % 'changed 0', + key2: _sample_message}) + self.assert_(len(self._box) == 3) + self.assert_(self._box.get_string(key0) == + self._template % 'changed 0') + self.assert_(self._box.get_string(key1) == + self._template % 'original 1') + self._check_sample(self._box[key2]) + self._box.update([(key2, self._template % 'changed 2'), + (key1, self._template % 'changed 1'), + (key0, self._template % 'original 0')]) + self.assert_(len(self._box) == 3) + self.assert_(self._box.get_string(key0) == + self._template % 'original 0') + self.assert_(self._box.get_string(key1) == + self._template % 'changed 1') + self.assert_(self._box.get_string(key2) == + self._template % 'changed 2') + self.assertRaises(KeyError, + lambda: self._box.update({'foo': 'bar', + key0: self._template % "changed 0"})) + self.assert_(len(self._box) == 3) + self.assert_(self._box.get_string(key0) == + self._template % "changed 0") + self.assert_(self._box.get_string(key1) == + self._template % "changed 1") + self.assert_(self._box.get_string(key2) == + self._template % "changed 2") + + def test_flush(self): + # Write changes to disk + self._test_flush_or_close(self._box.flush) + + def test_lock_unlock(self): + # Lock and unlock the mailbox + self.assert_(not os.path.exists(self._get_lock_path())) + self._box.lock() + self.assert_(os.path.exists(self._get_lock_path())) + self._box.unlock() + self.assert_(not os.path.exists(self._get_lock_path())) + + def test_close(self): + # Close mailbox and flush changes to disk + self._test_flush_or_close(self._box.close) + + def _test_flush_or_close(self, method): + contents = [self._template % i for i in xrange(3)] + self._box.add(contents[0]) + self._box.add(contents[1]) + self._box.add(contents[2]) + method() + self._box = self._factory(self._path) + keys = self._box.keys() + self.assert_(len(keys) == 3) + for key in keys: + self.assert_(self._box.get_string(key) in contents) + + def test_dump_message(self): + # Write message representations to disk + for input in (email.message_from_string(_sample_message), + _sample_message, StringIO.StringIO(_sample_message)): + output = StringIO.StringIO() + self._box._dump_message(input, output) + self.assert_(output.getvalue() == + _sample_message.replace('\n', os.linesep)) + output = StringIO.StringIO() + self.assertRaises(TypeError, + lambda: self._box._dump_message(None, output)) + + def _get_lock_path(self): + # Return the path of the dot lock file. May be overridden. + return self._path + '.lock' + + +class TestMailboxSuperclass(TestBase): + + def test_notimplemented(self): + # Test that all Mailbox methods raise NotImplementedException. + box = mailbox.Mailbox('path') + self.assertRaises(NotImplementedError, lambda: box.add('')) + self.assertRaises(NotImplementedError, lambda: box.remove('')) + self.assertRaises(NotImplementedError, lambda: box.__delitem__('')) + self.assertRaises(NotImplementedError, lambda: box.discard('')) + self.assertRaises(NotImplementedError, lambda: box.__setitem__('', '')) + self.assertRaises(NotImplementedError, lambda: box.iterkeys()) + self.assertRaises(NotImplementedError, lambda: box.keys()) + self.assertRaises(NotImplementedError, lambda: box.itervalues().next()) + self.assertRaises(NotImplementedError, lambda: box.__iter__().next()) + self.assertRaises(NotImplementedError, lambda: box.values()) + self.assertRaises(NotImplementedError, lambda: box.iteritems().next()) + self.assertRaises(NotImplementedError, lambda: box.items()) + self.assertRaises(NotImplementedError, lambda: box.get('')) + self.assertRaises(NotImplementedError, lambda: box.__getitem__('')) + self.assertRaises(NotImplementedError, lambda: box.get_message('')) + self.assertRaises(NotImplementedError, lambda: box.get_string('')) + self.assertRaises(NotImplementedError, lambda: box.get_file('')) + self.assertRaises(NotImplementedError, lambda: box.has_key('')) + self.assertRaises(NotImplementedError, lambda: box.__contains__('')) + self.assertRaises(NotImplementedError, lambda: box.__len__()) + self.assertRaises(NotImplementedError, lambda: box.clear()) + self.assertRaises(NotImplementedError, lambda: box.pop('')) + self.assertRaises(NotImplementedError, lambda: box.popitem()) + self.assertRaises(NotImplementedError, lambda: box.update((('', ''),))) + self.assertRaises(NotImplementedError, lambda: box.flush()) + self.assertRaises(NotImplementedError, lambda: box.lock()) + self.assertRaises(NotImplementedError, lambda: box.unlock()) + self.assertRaises(NotImplementedError, lambda: box.close()) + + +class TestMaildir(TestMailbox): + + _factory = lambda self, path, factory=None: mailbox.Maildir(path, factory) + + def setUp(self): + TestMailbox.setUp(self) + if os.name == 'nt': + self._box.colon = '!' + + def test_add_MM(self): + # Add a MaildirMessage instance + msg = mailbox.MaildirMessage(self._template % 0) + msg.set_subdir('cur') + msg.set_info('foo') + key = self._box.add(msg) + self.assert_(os.path.exists(os.path.join(self._path, 'cur', '%s%sfoo' % + (key, self._box.colon)))) + + def test_get_MM(self): + # Get a MaildirMessage instance + msg = mailbox.MaildirMessage(self._template % 0) + msg.set_subdir('cur') + msg.set_flags('RF') + key = self._box.add(msg) + msg_returned = self._box.get_message(key) + self.assert_(isinstance(msg_returned, mailbox.MaildirMessage)) + self.assert_(msg_returned.get_subdir() == 'cur') + self.assert_(msg_returned.get_flags() == 'FR') + + def test_set_MM(self): + # Set with a MaildirMessage instance + msg0 = mailbox.MaildirMessage(self._template % 0) + msg0.set_flags('TP') + key = self._box.add(msg0) + msg_returned = self._box.get_message(key) + self.assert_(msg_returned.get_subdir() == 'new') + self.assert_(msg_returned.get_flags() == 'PT') + msg1 = mailbox.MaildirMessage(self._template % 1) + self._box[key] = msg1 + msg_returned = self._box.get_message(key) + self.assert_(msg_returned.get_subdir() == 'new') + self.assert_(msg_returned.get_flags() == '') + self.assert_(msg_returned.get_payload() == '1') + msg2 = mailbox.MaildirMessage(self._template % 2) + msg2.set_info('2,S') + self._box[key] = msg2 + self._box[key] = self._template % 3 + msg_returned = self._box.get_message(key) + self.assert_(msg_returned.get_subdir() == 'new') + self.assert_(msg_returned.get_flags() == 'S') + self.assert_(msg_returned.get_payload() == '3') + + def test_initialize_new(self): + # Initialize a non-existent mailbox + self.tearDown() + self._box = mailbox.Maildir(self._path) + self._check_basics(factory=rfc822.Message) + self._delete_recursively(self._path) + self._box = self._factory(self._path, factory=None) + self._check_basics() + + def test_initialize_existing(self): + # Initialize an existing mailbox + self.tearDown() + for subdir in '', 'tmp', 'new', 'cur': + os.mkdir(os.path.join(self._path, subdir)) + self._box = mailbox.Maildir(self._path) + self._check_basics(factory=rfc822.Message) + self._box = mailbox.Maildir(self._path, factory=None) + self._check_basics() + + def _check_basics(self, factory=None): + # (Used by test_open_new() and test_open_existing().) + self.assertEqual(self._box._path, os.path.abspath(self._path)) + self.assertEqual(self._box._factory, factory) + for subdir in '', 'tmp', 'new', 'cur': + path = os.path.join(self._path, subdir) + mode = os.stat(path)[stat.ST_MODE] + self.assert_(stat.S_ISDIR(mode), "Not a directory: '%s'" % path) + + def test_list_folders(self): + # List folders + self._box.add_folder('one') + self._box.add_folder('two') + self._box.add_folder('three') + self.assert_(len(self._box.list_folders()) == 3) + self.assert_(set(self._box.list_folders()) == + set(('one', 'two', 'three'))) + + def test_get_folder(self): + # Open folders + self._box.add_folder('foo.bar') + folder0 = self._box.get_folder('foo.bar') + folder0.add(self._template % 'bar') + self.assert_(os.path.isdir(os.path.join(self._path, '.foo.bar'))) + folder1 = self._box.get_folder('foo.bar') + self.assert_(folder1.get_string(folder1.keys()[0]) == \ + self._template % 'bar') + + def test_add_and_remove_folders(self): + # Delete folders + self._box.add_folder('one') + self._box.add_folder('two') + self.assert_(len(self._box.list_folders()) == 2) + self.assert_(set(self._box.list_folders()) == set(('one', 'two'))) + self._box.remove_folder('one') + self.assert_(len(self._box.list_folders()) == 1) + self.assert_(set(self._box.list_folders()) == set(('two',))) + self._box.add_folder('three') + self.assert_(len(self._box.list_folders()) == 2) + self.assert_(set(self._box.list_folders()) == set(('two', 'three'))) + self._box.remove_folder('three') + self.assert_(len(self._box.list_folders()) == 1) + self.assert_(set(self._box.list_folders()) == set(('two',))) + self._box.remove_folder('two') + self.assert_(len(self._box.list_folders()) == 0) + self.assert_(self._box.list_folders() == []) + + def test_clean(self): + # Remove old files from 'tmp' + foo_path = os.path.join(self._path, 'tmp', 'foo') + bar_path = os.path.join(self._path, 'tmp', 'bar') + f = open(foo_path, 'w') + f.write("@") + f.close() + f = open(bar_path, 'w') + f.write("@") + f.close() + self._box.clean() + self.assert_(os.path.exists(foo_path)) + self.assert_(os.path.exists(bar_path)) + foo_stat = os.stat(foo_path) + os.utime(foo_path, (time.time() - 129600 - 2, + foo_stat.st_mtime)) + self._box.clean() + self.assert_(not os.path.exists(foo_path)) + self.assert_(os.path.exists(bar_path)) + + def test_create_tmp(self, repetitions=10): + # Create files in tmp directory + hostname = socket.gethostname() + if '/' in hostname: + hostname = hostname.replace('/', r'\057') + if ':' in hostname: + hostname = hostname.replace(':', r'\072') + pid = os.getpid() + pattern = re.compile(r"(?P