summaryrefslogtreecommitdiffstats
path: root/win
diff options
context:
space:
mode:
authormdejong <mdejong>2001-06-22 09:10:38 (GMT)
committermdejong <mdejong>2001-06-22 09:10:38 (GMT)
commit83ca1ca9b40094b453deb82faa202aa2d9fb7a03 (patch)
treeb08db2b610248f3e1db623b6d3e5eef73fea2257 /win
parenta9040f5c279e1c15647daad577a4b2c11f0a1f43 (diff)
downloadtk-83ca1ca9b40094b453deb82faa202aa2d9fb7a03.zip
tk-83ca1ca9b40094b453deb82faa202aa2d9fb7a03.tar.gz
tk-83ca1ca9b40094b453deb82faa202aa2d9fb7a03.tar.bz2
* win/configure:
* win/tcl.m4: Update From Tcl.
Diffstat (limited to 'win')
-rwxr-xr-xwin/configure4
-rw-r--r--win/tcl.m44
2 files changed, 4 insertions, 4 deletions
diff --git a/win/configure b/win/configure
index 7de82f6..e04096d 100755
--- a/win/configure
+++ b/win/configure
@@ -1229,8 +1229,8 @@ echo "configure:1202: checking whether $ld_prog supports -shared option" >&5
CFLAGS_DEBUG=-g
CFLAGS_OPTIMIZE=-O
CFLAGS_WARNING="-Wall -Wconversion"
- LDFLAGS_DEBUG=-g
- LDFLAGS_OPTIMIZE=-O
+ LDFLAGS_DEBUG=
+ LDFLAGS_OPTIMIZE=
# Specify the CC output file names based on the target name
CC_OBJNAME="-o \$@"
diff --git a/win/tcl.m4 b/win/tcl.m4
index cc719dd..ba58cad 100644
--- a/win/tcl.m4
+++ b/win/tcl.m4
@@ -428,8 +428,8 @@ AC_DEFUN(SC_CONFIG_CFLAGS, [
CFLAGS_DEBUG=-g
CFLAGS_OPTIMIZE=-O
CFLAGS_WARNING="-Wall -Wconversion"
- LDFLAGS_DEBUG=-g
- LDFLAGS_OPTIMIZE=-O
+ LDFLAGS_DEBUG=
+ LDFLAGS_OPTIMIZE=
# Specify the CC output file names based on the target name
CC_OBJNAME="-o \[$]@"
mail/utils.py61
-rw-r--r--Lib/encodings/mbcs.py35
-rw-r--r--Lib/encodings/punycode.py6
-rw-r--r--Lib/encodings/utf_8_sig.py4
-rw-r--r--Lib/encodings/uu_codec.py4
-rw-r--r--Lib/gzip.py8
-rw-r--r--Lib/httplib.py8
-rw-r--r--Lib/idlelib/Bindings.py26
-rw-r--r--Lib/idlelib/CREDITS.txt15
-rw-r--r--Lib/idlelib/CallTipWindow.py6
-rw-r--r--Lib/idlelib/CallTips.py2
-rw-r--r--Lib/idlelib/CodeContext.py3
-rw-r--r--Lib/idlelib/ColorDelegator.py30
-rw-r--r--Lib/idlelib/Debugger.py9
-rw-r--r--Lib/idlelib/EditorWindow.py49
-rw-r--r--Lib/idlelib/NEWS.txt43
-rw-r--r--Lib/idlelib/ParenMatch.py7
-rw-r--r--Lib/idlelib/PyShell.py31
-rw-r--r--Lib/idlelib/ScriptBinding.py5
-rw-r--r--Lib/idlelib/ZoomHeight.py9
-rw-r--r--Lib/idlelib/buildapp.py17
-rw-r--r--Lib/idlelib/config-keys.def53
-rw-r--r--Lib/idlelib/configHandler.py16
-rw-r--r--Lib/idlelib/configHelpSourceEdit.py5
-rw-r--r--Lib/idlelib/idlever.py2
-rw-r--r--Lib/idlelib/keybindingDialog.py4
-rw-r--r--Lib/idlelib/macosxSupport.py112
-rw-r--r--Lib/inspect.py58
-rw-r--r--Lib/lib-tk/Tkinter.py99
-rw-r--r--Lib/lib-tk/tkMessageBox.py7
-rw-r--r--Lib/lib-tk/turtle.py109
-rw-r--r--Lib/linecache.py4
-rw-r--r--Lib/logging/config.py1
-rw-r--r--Lib/logging/handlers.py42
-rwxr-xr-xLib/mailbox.py30
-rw-r--r--Lib/mimetypes.py4
-rw-r--r--Lib/msilib/__init__.py10
-rw-r--r--Lib/optparse.py18
-rw-r--r--Lib/os.py2
-rwxr-xr-xLib/pdb.py3
-rw-r--r--Lib/pkgutil.py53
-rw-r--r--Lib/popen2.py4
-rw-r--r--Lib/pstats.py9
-rwxr-xr-xLib/pydoc.py22
-rw-r--r--Lib/random.py18
-rw-r--r--Lib/sgmllib.py99
-rw-r--r--Lib/shelve.py3
-rw-r--r--Lib/shutil.py8
-rw-r--r--Lib/site.py9
-rw-r--r--Lib/socket.py29
-rw-r--r--Lib/sqlite3/test/hooks.py2
-rw-r--r--Lib/sqlite3/test/regression.py8
-rw-r--r--Lib/sqlite3/test/types.py52
-rw-r--r--Lib/sqlite3/test/userfunctions.py107
-rw-r--r--Lib/string.py6
-rw-r--r--Lib/struct.py2
-rw-r--r--Lib/subprocess.py27
-rw-r--r--Lib/tarfile.py15
-rw-r--r--Lib/telnetlib.py16
-rw-r--r--Lib/tempfile.py2
-rw-r--r--Lib/test/crashers/bogus_code_obj.py19
-rw-r--r--Lib/test/crashers/borrowed_ref_1.py29
-rw-r--r--Lib/test/crashers/borrowed_ref_2.py38
-rw-r--r--Lib/test/crashers/coerce.py9
-rw-r--r--Lib/test/crashers/gc_inspection.py32
-rw-r--r--Lib/test/crashers/infinite_rec_3.py9
-rw-r--r--Lib/test/crashers/recursion_limit_too_high.py16
-rw-r--r--Lib/test/crashers/recursive_call.py5
-rw-r--r--Lib/test/crashers/xml_parsers.py56
-rw-r--r--Lib/test/fork_wait.py9
-rw-r--r--Lib/test/output/test_ossaudiodev3
-rw-r--r--Lib/test/output/test_thread12
-rwxr-xr-xLib/test/regrtest.py86
-rw-r--r--Lib/test/string_tests.py43
-rw-r--r--Lib/test/test__locale.py3
-rw-r--r--Lib/test/test_ast.py2
-rw-r--r--Lib/test/test_asynchat.py3
-rw-r--r--Lib/test/test_bigaddrspace.py46
-rw-r--r--Lib/test/test_bigmem.py2
-rwxr-xr-xLib/test/test_bsddb.py3
-rw-r--r--Lib/test/test_builtin.py30
-rw-r--r--Lib/test/test_bz2.py3
-rw-r--r--Lib/test/test_cmd_line.py1
-rw-r--r--Lib/test/test_code.py17
-rw-r--r--Lib/test/test_codecs.py6
-rw-r--r--Lib/test/test_commands.py3
-rw-r--r--Lib/test/test_compile.py35
-rw-r--r--Lib/test/test_compiler.py38
-rw-r--r--Lib/test/test_curses.py7
-rw-r--r--Lib/test/test_defaultdict.py6
-rw-r--r--Lib/test/test_descr.py34
-rw-r--r--Lib/test/test_dis.py24
-rw-r--r--Lib/test/test_doctest.py7
-rw-r--r--Lib/test/test_email_codecs.py12
-rw-r--r--Lib/test/test_exceptions.py12
-rwxr-xr-xLib/test/test_fcntl.py7
-rw-r--r--Lib/test/test_file.py78
-rw-r--r--Lib/test/test_filecmp.py3
-rw-r--r--Lib/test/test_fork1.py13
-rw-r--r--Lib/test/test_generators.py39
-rw-r--r--Lib/test/test_genexps.py2
-rw-r--r--Lib/test/test_getargs2.py19
-rw-r--r--Lib/test/test_grammar.py5
-rw-r--r--Lib/test/test_inspect.py32
-rw-r--r--Lib/test/test_iterlen.py7
-rw-r--r--Lib/test/test_logging.py2
-rw-r--r--Lib/test/test_mailbox.py29
-rw-r--r--Lib/test/test_mimetools.py4
-rw-r--r--Lib/test/test_mimetypes.py5
-rw-r--r--Lib/test/test_minidom.py92
-rw-r--r--Lib/test/test_multibytecodec.py38
-rw-r--r--Lib/test/test_optparse.py49
-rw-r--r--Lib/test/test_os.py14
-rw-r--r--Lib/test/test_ossaudiodev.py37
-rw-r--r--Lib/test/test_pep292.py7
-rw-r--r--Lib/test/test_popen.py3
-rw-r--r--Lib/test/test_popen2.py3
-rw-r--r--Lib/test/test_pyexpat.py21
-rw-r--r--Lib/test/test_sax.py82
-rw-r--r--Lib/test/test_scope.py11
-rw-r--r--Lib/test/test_select.py3
-rw-r--r--Lib/test/test_sgmllib.py111
-rw-r--r--Lib/test/test_shutil.py47
-rw-r--r--Lib/test/test_signal.py51
-rw-r--r--Lib/test/test_socket.py67
-rw-r--r--Lib/test/test_socket_ssl.py26
-rw-r--r--Lib/test/test_socketserver.py30
-rw-r--r--Lib/test/test_struct.py95
-rw-r--r--Lib/test/test_subprocess.py49
-rw-r--r--Lib/test/test_support.py82
-rw-r--r--Lib/test/test_sys.py84
-rw-r--r--Lib/test/test_tcl.py6
-rw-r--r--Lib/test/test_textwrap.py61
-rw-r--r--Lib/test/test_thread.py43
-rw-r--r--Lib/test/test_threaded_import.py7
-rw-r--r--Lib/test/test_threadedtempfile.py4
-rw-r--r--Lib/test/test_threading.py26
-rw-r--r--Lib/test/test_time.py44
-rw-r--r--Lib/test/test_timeout.py2
-rw-r--r--Lib/test/test_trace.py4
-rw-r--r--Lib/test/test_traceback.py45
-rw-r--r--Lib/test/test_types.py8
-rw-r--r--Lib/test/test_urllib2.py14
-rw-r--r--Lib/test/test_urllib2net.py13
-rw-r--r--Lib/test/test_urllibnet.py2
-rw-r--r--Lib/test/test_uuid.py434
-rw-r--r--Lib/test/test_wait3.py12
-rw-r--r--Lib/test/test_wait4.py12
-rw-r--r--Lib/test/test_warnings.py13
-rw-r--r--Lib/test/test_winreg.py3
-rwxr-xr-xLib/test/test_wsgiref.py615
-rw-r--r--Lib/test/test_xml_etree.py44
-rw-r--r--Lib/test/test_xml_etree_c.py6
-rw-r--r--Lib/test/test_zipfile.py252
-rw-r--r--Lib/test/test_zipfile64.py101
-rw-r--r--Lib/test/test_zlib.py116
-rw-r--r--Lib/textwrap.py75
-rw-r--r--Lib/threading.py4
-rw-r--r--Lib/trace.py2
-rw-r--r--Lib/traceback.py85
-rw-r--r--Lib/types.py14
-rw-r--r--Lib/urllib.py12
-rw-r--r--Lib/urllib2.py26
-rw-r--r--Lib/uuid.py515
-rw-r--r--Lib/warnings.py6
-rw-r--r--Lib/webbrowser.py7
-rw-r--r--Lib/wsgiref.egg-info8
-rw-r--r--Lib/wsgiref/__init__.py23
-rw-r--r--Lib/wsgiref/handlers.py492
-rw-r--r--Lib/wsgiref/headers.py205
-rw-r--r--Lib/wsgiref/simple_server.py205
-rw-r--r--Lib/wsgiref/util.py205
-rw-r--r--Lib/wsgiref/validate.py432
-rw-r--r--Lib/xml/__init__.py (renamed from Lib/xml.py)8
-rw-r--r--Lib/xml/dom/NodeFilter.py (renamed from Lib/xmlcore/dom/NodeFilter.py)0
-rw-r--r--Lib/xml/dom/__init__.py (renamed from Lib/xmlcore/dom/__init__.py)2
-rw-r--r--Lib/xml/dom/domreg.py (renamed from Lib/xmlcore/dom/domreg.py)2
-rw-r--r--Lib/xml/dom/expatbuilder.py (renamed from Lib/xmlcore/dom/expatbuilder.py)12
-rw-r--r--Lib/xml/dom/minicompat.py (renamed from Lib/xmlcore/dom/minicompat.py)3
-rw-r--r--Lib/xml/dom/minidom.py (renamed from Lib/xmlcore/dom/minidom.py)146
-rw-r--r--Lib/xml/dom/pulldom.py (renamed from Lib/xmlcore/dom/pulldom.py)18
-rw-r--r--Lib/xml/dom/xmlbuilder.py (renamed from Lib/xmlcore/dom/xmlbuilder.py)22
-rw-r--r--Lib/xml/etree/ElementInclude.py (renamed from Lib/xmlcore/etree/ElementInclude.py)2
-rw-r--r--Lib/xml/etree/ElementPath.py (renamed from Lib/xmlcore/etree/ElementPath.py)0
-rw-r--r--Lib/xml/etree/ElementTree.py (renamed from Lib/xmlcore/etree/ElementTree.py)9
-rw-r--r--Lib/xml/etree/__init__.py (renamed from Lib/xmlcore/etree/__init__.py)0
-rw-r--r--Lib/xml/etree/cElementTree.py (renamed from Lib/xmlcore/etree/cElementTree.py)0
-rw-r--r--Lib/xml/parsers/__init__.py (renamed from Lib/xmlcore/parsers/__init__.py)0
-rw-r--r--Lib/xml/parsers/expat.py (renamed from Lib/xmlcore/parsers/expat.py)0
-rw-r--r--Lib/xml/sax/__init__.py (renamed from Lib/xmlcore/sax/__init__.py)14
-rw-r--r--Lib/xml/sax/_exceptions.py (renamed from Lib/xmlcore/sax/_exceptions.py)0
-rw-r--r--Lib/xml/sax/expatreader.py (renamed from Lib/xmlcore/sax/expatreader.py)26
-rw-r--r--Lib/xml/sax/handler.py (renamed from Lib/xmlcore/sax/handler.py)0
-rw-r--r--Lib/xml/sax/saxutils.py (renamed from Lib/xmlcore/sax/saxutils.py)6
-rw-r--r--Lib/xml/sax/xmlreader.py (renamed from Lib/xmlcore/sax/xmlreader.py)6
-rw-r--r--Lib/xmlcore/__init__.py20
-rw-r--r--Lib/zipfile.py385
243 files changed, 8137 insertions, 1462 deletions
diff --git a/Lib/Queue.py b/Lib/Queue.py
index 51ad354..0f80584 100644
--- a/Lib/Queue.py
+++ b/Lib/Queue.py
@@ -14,11 +14,11 @@ class Full(Exception):
pass
class Queue:
- def __init__(self, maxsize=0):
- """Initialize a queue object with a given maximum size.
+ """Create a queue object with a given maximum size.
- If maxsize is <= 0, the queue size is infinite.
- """
+ If maxsize is <= 0, the queue size is infinite.
+ """
+ def __init__(self, maxsize=0):
try:
import threading
except ImportError:
diff --git a/Lib/SimpleHTTPServer.py b/Lib/SimpleHTTPServer.py
index 089936f..fae551a 100644
--- a/Lib/SimpleHTTPServer.py
+++ b/Lib/SimpleHTTPServer.py
@@ -192,6 +192,8 @@ class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
else:
return self.extensions_map['']
+ if not mimetypes.inited:
+ mimetypes.init() # try to read system mime.types
extensions_map = mimetypes.types_map.copy()
extensions_map.update({
'': 'application/octet-stream', # Default
diff --git a/Lib/UserString.py b/Lib/UserString.py
index 473ee88..60dc34b 100755
--- a/Lib/UserString.py
+++ b/Lib/UserString.py
@@ -5,14 +5,13 @@
Note: string objects have grown methods in Python 1.6
This module requires Python 1.6 or later.
"""
-from types import StringTypes
import sys
__all__ = ["UserString","MutableString"]
class UserString:
def __init__(self, seq):
- if isinstance(seq, StringTypes):
+ if isinstance(seq, basestring):
self.data = seq
elif isinstance(seq, UserString):
self.data = seq.data[:]
@@ -43,12 +42,12 @@ class UserString:
def __add__(self, other):
if isinstance(other, UserString):
return self.__class__(self.data + other.data)
- elif isinstance(other, StringTypes):
+ elif isinstance(other, basestring):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + str(other))
def __radd__(self, other):
- if isinstance(other, StringTypes):
+ if isinstance(other, basestring):
return self.__class__(other + self.data)
else:
return self.__class__(str(other) + self.data)
@@ -163,7 +162,7 @@ class MutableString(UserString):
start = max(start, 0); end = max(end, 0)
if isinstance(sub, UserString):
self.data = self.data[:start]+sub.data+self.data[end:]
- elif isinstance(sub, StringTypes):
+ elif isinstance(sub, basestring):
self.data = self.data[:start]+sub+self.data[end:]
else:
self.data = self.data[:start]+str(sub)+self.data[end:]
@@ -175,7 +174,7 @@ class MutableString(UserString):
def __iadd__(self, other):
if isinstance(other, UserString):
self.data += other.data
- elif isinstance(other, StringTypes):
+ elif isinstance(other, basestring):
self.data += other
else:
self.data += str(other)
diff --git a/Lib/_MozillaCookieJar.py b/Lib/_MozillaCookieJar.py
index 1776b93..4fd6de3 100644
--- a/Lib/_MozillaCookieJar.py
+++ b/Lib/_MozillaCookieJar.py
@@ -63,8 +63,7 @@ class MozillaCookieJar(FileCookieJar):
if line.endswith("\n"): line = line[:-1]
# skip comments and blank lines XXX what is $ for?
- if (line.strip().startswith("#") or
- line.strip().startswith("$") or
+ if (line.strip().startswith(("#", "$")) or
line.strip() == ""):
continue
diff --git a/Lib/binhex.py b/Lib/binhex.py
index 4f3882a..0f3e3c4 100644
--- a/Lib/binhex.py
+++ b/Lib/binhex.py
@@ -44,22 +44,14 @@ RUNCHAR=chr(0x90) # run-length introducer
#
# Workarounds for non-mac machines.
-if os.name == 'mac':
- import macfs
- import MacOS
- try:
- openrf = MacOS.openrf
- except AttributeError:
- # Backward compatibility
- openrf = open
-
- def FInfo():
- return macfs.FInfo()
+try:
+ from Carbon.File import FSSpec, FInfo
+ from MacOS import openrf
def getfileinfo(name):
- finfo = macfs.FSSpec(name).GetFInfo()
+ finfo = FSSpec(name).FSpGetFInfo()
dir, file = os.path.split(name)
- # XXXX Get resource/data sizes
+ # XXX Get resource/data sizes
fp = open(name, 'rb')
fp.seek(0, 2)
dlen = fp.tell()
@@ -75,7 +67,7 @@ if os.name == 'mac':
mode = '*' + mode[0]
return openrf(name, mode)
-else:
+except ImportError:
#
# Glue code for non-macintosh usage
#
@@ -183,7 +175,7 @@ class BinHex:
ofname = ofp
ofp = open(ofname, 'w')
if os.name == 'mac':
- fss = macfs.FSSpec(ofname)
+ fss = FSSpec(ofname)
fss.SetCreatorType('BnHq', 'TEXT')
ofp.write('(This file must be converted with BinHex 4.0)\n\n:')
hqxer = _Hqxcoderengine(ofp)
@@ -486,7 +478,7 @@ def hexbin(inp, out):
if not out:
out = ifp.FName
if os.name == 'mac':
- ofss = macfs.FSSpec(out)
+ ofss = FSSpec(out)
out = ofss.as_pathname()
ofp = open(out, 'wb')
@@ -519,6 +511,7 @@ def hexbin(inp, out):
def _test():
if os.name == 'mac':
+ import macfs
fss, ok = macfs.PromptGetFile('File to convert:')
if not ok:
sys.exit(0)
diff --git a/Lib/bsddb/__init__.py b/Lib/bsddb/__init__.py
index 90ed362..cf32668 100644
--- a/Lib/bsddb/__init__.py
+++ b/Lib/bsddb/__init__.py
@@ -33,7 +33,10 @@
#----------------------------------------------------------------------
-"""Support for BerkeleyDB 3.2 through 4.2.
+"""Support for BerkeleyDB 3.3 through 4.4 with a simple interface.
+
+For the full featured object oriented interface use the bsddb.db module
+instead. It mirrors the Sleepycat BerkeleyDB C API.
"""
try:
@@ -43,8 +46,10 @@ try:
# python as bsddb._bsddb.
import _pybsddb
_bsddb = _pybsddb
+ from bsddb3.dbutils import DeadlockWrap as _DeadlockWrap
else:
import _bsddb
+ from bsddb.dbutils import DeadlockWrap as _DeadlockWrap
except ImportError:
# Remove ourselves from sys.modules
import sys
@@ -70,7 +75,7 @@ if sys.version >= '2.3':
exec """
class _iter_mixin(UserDict.DictMixin):
def _make_iter_cursor(self):
- cur = self.db.cursor()
+ cur = _DeadlockWrap(self.db.cursor)
key = id(cur)
self._cursor_refs[key] = ref(cur, self._gen_cref_cleaner(key))
return cur
@@ -90,19 +95,19 @@ class _iter_mixin(UserDict.DictMixin):
# since we're only returning keys, we call the cursor
# methods with flags=0, dlen=0, dofs=0
- key = cur.first(0,0,0)[0]
+ key = _DeadlockWrap(cur.first, 0,0,0)[0]
yield key
next = cur.next
while 1:
try:
- key = next(0,0,0)[0]
+ key = _DeadlockWrap(next, 0,0,0)[0]
yield key
except _bsddb.DBCursorClosedError:
cur = self._make_iter_cursor()
# FIXME-20031101-greg: race condition. cursor could
# be closed by another thread before this call.
- cur.set(key,0,0,0)
+ _DeadlockWrap(cur.set, key,0,0,0)
next = cur.next
except _bsddb.DBNotFoundError:
return
@@ -119,21 +124,21 @@ class _iter_mixin(UserDict.DictMixin):
# FIXME-20031102-greg: race condition. cursor could
# be closed by another thread before this call.
- kv = cur.first()
+ kv = _DeadlockWrap(cur.first)
key = kv[0]
yield kv
next = cur.next
while 1:
try:
- kv = next()
+ kv = _DeadlockWrap(next)
key = kv[0]
yield kv
except _bsddb.DBCursorClosedError:
cur = self._make_iter_cursor()
# FIXME-20031101-greg: race condition. cursor could
# be closed by another thread before this call.
- cur.set(key,0,0,0)
+ _DeadlockWrap(cur.set, key,0,0,0)
next = cur.next
except _bsddb.DBNotFoundError:
return
@@ -177,9 +182,9 @@ class _DBWithCursor(_iter_mixin):
def _checkCursor(self):
if self.dbc is None:
- self.dbc = self.db.cursor()
+ self.dbc = _DeadlockWrap(self.db.cursor)
if self.saved_dbc_key is not None:
- self.dbc.set(self.saved_dbc_key)
+ _DeadlockWrap(self.dbc.set, self.saved_dbc_key)
self.saved_dbc_key = None
# This method is needed for all non-cursor DB calls to avoid
@@ -192,15 +197,15 @@ class _DBWithCursor(_iter_mixin):
self.dbc = None
if save:
try:
- self.saved_dbc_key = c.current(0,0,0)[0]
+ self.saved_dbc_key = _DeadlockWrap(c.current, 0,0,0)[0]
except db.DBError:
pass
- c.close()
+ _DeadlockWrap(c.close)
del c
for cref in self._cursor_refs.values():
c = cref()
if c is not None:
- c.close()
+ _DeadlockWrap(c.close)
def _checkOpen(self):
if self.db is None:
@@ -211,73 +216,77 @@ class _DBWithCursor(_iter_mixin):
def __len__(self):
self._checkOpen()
- return len(self.db)
+ return _DeadlockWrap(lambda: len(self.db)) # len(self.db)
def __getitem__(self, key):
self._checkOpen()
- return self.db[key]
+ return _DeadlockWrap(lambda: self.db[key]) # self.db[key]
def __setitem__(self, key, value):
self._checkOpen()
self._closeCursors()
- self.db[key] = value
+ def wrapF():
+ self.db[key] = value
+ _DeadlockWrap(wrapF) # self.db[key] = value
def __delitem__(self, key):
self._checkOpen()
self._closeCursors()
- del self.db[key]
+ def wrapF():
+ del self.db[key]
+ _DeadlockWrap(wrapF) # del self.db[key]
def close(self):
self._closeCursors(save=0)
if self.dbc is not None:
- self.dbc.close()
+ _DeadlockWrap(self.dbc.close)
v = 0
if self.db is not None:
- v = self.db.close()
+ v = _DeadlockWrap(self.db.close)
self.dbc = None
self.db = None
return v
def keys(self):
self._checkOpen()
- return self.db.keys()
+ return _DeadlockWrap(self.db.keys)
def has_key(self, key):
self._checkOpen()
- return self.db.has_key(key)
+ return _DeadlockWrap(self.db.has_key, key)
def set_location(self, key):
self._checkOpen()
self._checkCursor()
- return self.dbc.set_range(key)
+ return _DeadlockWrap(self.dbc.set_range, key)
def next(self):
self._checkOpen()
self._checkCursor()
- rv = self.dbc.next()
+ rv = _DeadlockWrap(self.dbc.next)
return rv
def previous(self):
self._checkOpen()
self._checkCursor()
- rv = self.dbc.prev()
+ rv = _DeadlockWrap(self.dbc.prev)
return rv
def first(self):
self._checkOpen()
self._checkCursor()
- rv = self.dbc.first()
+ rv = _DeadlockWrap(self.dbc.first)
return rv
def last(self):
self._checkOpen()
self._checkCursor()
- rv = self.dbc.last()
+ rv = _DeadlockWrap(self.dbc.last)
return rv
def sync(self):
self._checkOpen()
- return self.db.sync()
+ return _DeadlockWrap(self.db.sync)
#----------------------------------------------------------------------
@@ -385,5 +394,4 @@ try:
except ImportError:
db.DB_THREAD = 0
-
#----------------------------------------------------------------------
diff --git a/Lib/bsddb/dbrecio.py b/Lib/bsddb/dbrecio.py
index 22e382a..d439f32 100644
--- a/Lib/bsddb/dbrecio.py
+++ b/Lib/bsddb/dbrecio.py
@@ -75,7 +75,7 @@ class DBRecIO:
dlen = newpos - self.pos
- r = self.db.get(key, txn=self.txn, dlen=dlen, doff=self.pos)
+ r = self.db.get(self.key, txn=self.txn, dlen=dlen, doff=self.pos)
self.pos = newpos
return r
@@ -121,7 +121,7 @@ class DBRecIO:
"Negative size not allowed")
elif size < self.pos:
self.pos = size
- self.db.put(key, "", txn=self.txn, dlen=self.len-size, doff=size)
+ self.db.put(self.key, "", txn=self.txn, dlen=self.len-size, doff=size)
def write(self, s):
if self.closed:
@@ -131,7 +131,7 @@ class DBRecIO:
self.buflist.append('\0'*(self.pos - self.len))
self.len = self.pos
newpos = self.pos + len(s)
- self.db.put(key, s, txn=self.txn, dlen=len(s), doff=self.pos)
+ self.db.put(self.key, s, txn=self.txn, dlen=len(s), doff=self.pos)
self.pos = newpos
def writelines(self, list):
diff --git a/Lib/bsddb/dbtables.py b/Lib/bsddb/dbtables.py
index 369db43..492d5fd 100644
--- a/Lib/bsddb/dbtables.py
+++ b/Lib/bsddb/dbtables.py
@@ -32,6 +32,12 @@ except ImportError:
# For Python 2.3
from bsddb.db import *
+# XXX(nnorwitz): is this correct? DBIncompleteError is conditional in _bsddb.c
+try:
+ DBIncompleteError
+except NameError:
+ class DBIncompleteError(Exception):
+ pass
class TableDBError(StandardError):
pass
diff --git a/Lib/bsddb/dbutils.py b/Lib/bsddb/dbutils.py
index 3f63842..6dcfdd5 100644
--- a/Lib/bsddb/dbutils.py
+++ b/Lib/bsddb/dbutils.py
@@ -22,14 +22,14 @@
#
# import the time.sleep function in a namespace safe way to allow
-# "from bsddb.db import *"
+# "from bsddb.dbutils import *"
#
from time import sleep as _sleep
import db
# always sleep at least N seconds between retrys
-_deadlock_MinSleepTime = 1.0/64
+_deadlock_MinSleepTime = 1.0/128
# never sleep more than N seconds between retrys
_deadlock_MaxSleepTime = 3.14159
@@ -57,7 +57,7 @@ def DeadlockWrap(function, *_args, **_kwargs):
max_retries = _kwargs.get('max_retries', -1)
if _kwargs.has_key('max_retries'):
del _kwargs['max_retries']
- while 1:
+ while True:
try:
return function(*_args, **_kwargs)
except db.DBLockDeadlockError:
diff --git a/Lib/bsddb/test/test_basics.py b/Lib/bsddb/test/test_basics.py
index bec5da3..d6d507f 100644
--- a/Lib/bsddb/test/test_basics.py
+++ b/Lib/bsddb/test/test_basics.py
@@ -562,6 +562,9 @@ class BasicTestCase(unittest.TestCase):
num = d.truncate()
assert num == 0, "truncate on empty DB returned nonzero (%r)" % (num,)
+ #----------------------------------------
+
+
#----------------------------------------------------------------------
@@ -583,18 +586,40 @@ class BasicHashWithThreadFlagTestCase(BasicTestCase):
dbopenflags = db.DB_THREAD
-class BasicBTreeWithEnvTestCase(BasicTestCase):
- dbtype = db.DB_BTREE
+class BasicWithEnvTestCase(BasicTestCase):
dbopenflags = db.DB_THREAD
useEnv = 1
envflags = db.DB_THREAD | db.DB_INIT_MPOOL | db.DB_INIT_LOCK
+ #----------------------------------------
+
+ def test07_EnvRemoveAndRename(self):
+ if not self.env:
+ return
+
+ if verbose:
+ print '\n', '-=' * 30
+ print "Running %s.test07_EnvRemoveAndRename..." % self.__class__.__name__
+
+ # can't rename or remove an open DB
+ self.d.close()
+
+ newname = self.filename + '.renamed'
+ self.env.dbrename(self.filename, None, newname)
+ self.env.dbremove(newname)
+
+ # dbremove and dbrename are in 4.1 and later
+ if db.version() < (4,1):
+ del test07_EnvRemoveAndRename
-class BasicHashWithEnvTestCase(BasicTestCase):
+ #----------------------------------------
+
+class BasicBTreeWithEnvTestCase(BasicWithEnvTestCase):
+ dbtype = db.DB_BTREE
+
+
+class BasicHashWithEnvTestCase(BasicWithEnvTestCase):
dbtype = db.DB_HASH
- dbopenflags = db.DB_THREAD
- useEnv = 1
- envflags = db.DB_THREAD | db.DB_INIT_MPOOL | db.DB_INIT_LOCK
#----------------------------------------------------------------------
diff --git a/Lib/compiler/future.py b/Lib/compiler/future.py
index 39c3bb9..fef189e 100644
--- a/Lib/compiler/future.py
+++ b/Lib/compiler/future.py
@@ -23,14 +23,7 @@ class FutureParser:
def visitModule(self, node):
stmt = node.node
- found_docstring = False
for s in stmt.nodes:
- # Skip over docstrings
- if not found_docstring and isinstance(s, ast.Discard) \
- and isinstance(s.expr, ast.Const) \
- and isinstance(s.expr.value, str):
- found_docstring = True
- continue
if not self.check_stmt(s):
break
diff --git a/Lib/compiler/pycodegen.py b/Lib/compiler/pycodegen.py
index c093128..c8a9779 100644
--- a/Lib/compiler/pycodegen.py
+++ b/Lib/compiler/pycodegen.py
@@ -380,16 +380,7 @@ class CodeGenerator:
self.set_lineno(node)
for default in node.defaults:
self.visit(default)
- frees = gen.scope.get_free_vars()
- if frees:
- for name in frees:
- self.emit('LOAD_CLOSURE', name)
- self.emit('LOAD_CONST', gen)
- self.emit('MAKE_CLOSURE', len(node.defaults))
- else:
- self.emit('LOAD_CONST', gen)
- self.emit('MAKE_FUNCTION', len(node.defaults))
-
+ self._makeClosure(gen, len(node.defaults))
for i in range(ndecorators):
self.emit('CALL_FUNCTION', 1)
@@ -403,14 +394,7 @@ class CodeGenerator:
for base in node.bases:
self.visit(base)
self.emit('BUILD_TUPLE', len(node.bases))
- frees = gen.scope.get_free_vars()
- for name in frees:
- self.emit('LOAD_CLOSURE', name)
- self.emit('LOAD_CONST', gen)
- if frees:
- self.emit('MAKE_CLOSURE', 0)
- else:
- self.emit('MAKE_FUNCTION', 0)
+ self._makeClosure(gen, 0)
self.emit('CALL_FUNCTION', 0)
self.emit('BUILD_CLASS')
self.storeName(node.name)
@@ -642,22 +626,25 @@ class CodeGenerator:
self.newBlock()
self.emit('POP_TOP')
- def visitGenExpr(self, node):
- gen = GenExprCodeGenerator(node, self.scopes, self.class_name,
- self.get_module())
- walk(node.code, gen)
- gen.finish()
- self.set_lineno(node)
+ def _makeClosure(self, gen, args):
frees = gen.scope.get_free_vars()
if frees:
for name in frees:
self.emit('LOAD_CLOSURE', name)
+ self.emit('BUILD_TUPLE', len(frees))
self.emit('LOAD_CONST', gen)
- self.emit('MAKE_CLOSURE', 0)
+ self.emit('MAKE_CLOSURE', args)
else:
self.emit('LOAD_CONST', gen)
- self.emit('MAKE_FUNCTION', 0)
+ self.emit('MAKE_FUNCTION', args)
+ def visitGenExpr(self, node):
+ gen = GenExprCodeGenerator(node, self.scopes, self.class_name,
+ self.get_module())
+ walk(node.code, gen)
+ gen.finish()
+ self.set_lineno(node)
+ self._makeClosure(gen, 0)
# precomputation of outmost iterable
self.visit(node.code.quals[0].iter)
self.emit('GET_ITER')
diff --git a/Lib/compiler/symbols.py b/Lib/compiler/symbols.py
index c608f64..8eb5fce 100644
--- a/Lib/compiler/symbols.py
+++ b/Lib/compiler/symbols.py
@@ -191,7 +191,7 @@ class GenExprScope(Scope):
self.add_param('[outmost-iterable]')
def get_names(self):
- keys = Scope.get_names()
+ keys = Scope.get_names(self)
return keys
class LambdaScope(FunctionScope):
diff --git a/Lib/compiler/transformer.py b/Lib/compiler/transformer.py
index 96bcce3..8d256ed 100644
--- a/Lib/compiler/transformer.py
+++ b/Lib/compiler/transformer.py
@@ -536,12 +536,7 @@ class Transformer:
lineno=nodelist[0][2])
def try_stmt(self, nodelist):
- # 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
- # | 'try' ':' suite 'finally' ':' suite
- if nodelist[3][0] != symbol.except_clause:
- return self.com_try_finally(nodelist)
-
- return self.com_try_except(nodelist)
+ return self.com_try_except_finally(nodelist)
def with_stmt(self, nodelist):
return self.com_with(nodelist)
@@ -729,22 +724,20 @@ class Transformer:
def atom(self, nodelist):
return self._atom_dispatch[nodelist[0][0]](nodelist)
- n.lineno = nodelist[0][2]
- return n
def atom_lpar(self, nodelist):
if nodelist[1][0] == token.RPAR:
- return Tuple(())
+ return Tuple((), lineno=nodelist[0][2])
return self.com_node(nodelist[1])
def atom_lsqb(self, nodelist):
if nodelist[1][0] == token.RSQB:
- return List(())
+ return List((), lineno=nodelist[0][2])
return self.com_list_constructor(nodelist[1])
def atom_lbrace(self, nodelist):
if nodelist[1][0] == token.RBRACE:
- return Dict(())
+ return Dict((), lineno=nodelist[0][2])
return self.com_dictmaker(nodelist[1])
def atom_backquote(self, nodelist):
@@ -919,18 +912,21 @@ class Transformer:
bases.append(self.com_node(node[i]))
return bases
- def com_try_finally(self, nodelist):
- # try_fin_stmt: "try" ":" suite "finally" ":" suite
- return TryFinally(self.com_node(nodelist[2]),
- self.com_node(nodelist[5]),
- lineno=nodelist[0][2])
+ def com_try_except_finally(self, nodelist):
+ # ('try' ':' suite
+ # ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite]
+ # | 'finally' ':' suite))
+
+ if nodelist[3][0] == token.NAME:
+ # first clause is a finally clause: only try-finally
+ return TryFinally(self.com_node(nodelist[2]),
+ self.com_node(nodelist[5]),
+ lineno=nodelist[0][2])
- def com_try_except(self, nodelist):
- # try_except: 'try' ':' suite (except_clause ':' suite)* ['else' suite]
#tryexcept: [TryNode, [except_clauses], elseNode)]
- stmt = self.com_node(nodelist[2])
clauses = []
elseNode = None
+ finallyNode = None
for i in range(3, len(nodelist), 3):
node = nodelist[i]
if node[0] == symbol.except_clause:
@@ -946,9 +942,16 @@ class Transformer:
clauses.append((expr1, expr2, self.com_node(nodelist[i+2])))
if node[0] == token.NAME:
- elseNode = self.com_node(nodelist[i+2])
- return TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
- lineno=nodelist[0][2])
+ if node[1] == 'else':
+ elseNode = self.com_node(nodelist[i+2])
+ elif node[1] == 'finally':
+ finallyNode = self.com_node(nodelist[i+2])
+ try_except = TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
+ lineno=nodelist[0][2])
+ if finallyNode:
+ return TryFinally(try_except, finallyNode, lineno=nodelist[0][2])
+ else:
+ return try_except
def com_with(self, nodelist):
# with_stmt: 'with' expr [with_var] ':' suite
@@ -1138,7 +1141,7 @@ class Transformer:
values = []
for i in range(1, len(nodelist), 2):
values.append(self.com_node(nodelist[i]))
- return List(values)
+ return List(values, lineno=values[0].lineno)
if hasattr(symbol, 'gen_for'):
def com_generator_expression(self, expr, node):
@@ -1185,7 +1188,7 @@ class Transformer:
for i in range(1, len(nodelist), 4):
items.append((self.com_node(nodelist[i]),
self.com_node(nodelist[i+2])))
- return Dict(items)
+ return Dict(items, lineno=items[0][0].lineno)
def com_apply_trailer(self, primaryNode, nodelist):
t = nodelist[1][0]
@@ -1379,6 +1382,7 @@ _doc_nodes = [
symbol.testlist,
symbol.testlist_safe,
symbol.test,
+ symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index f2ddbaa..a4e3c36 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -1,9 +1,11 @@
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
"""create and manipulate C data types in Python"""
import os as _os, sys as _sys
-from itertools import chain as _chain
-__version__ = "0.9.9.6"
+__version__ = "1.0.0"
from _ctypes import Union, Structure, Array
from _ctypes import _Pointer
@@ -20,6 +22,23 @@ if __version__ != _ctypes_version:
if _os.name in ("nt", "ce"):
from _ctypes import FormatError
+DEFAULT_MODE = RTLD_LOCAL
+if _os.name == "posix" and _sys.platform == "darwin":
+ import gestalt
+
+ # gestalt.gestalt("sysv") returns the version number of the
+ # currently active system file as BCD.
+ # On OS X 10.4.6 -> 0x1046
+ # On OS X 10.2.8 -> 0x1028
+ # See also http://www.rgaros.nl/gestalt/
+ #
+ # On OS X 10.3, we use RTLD_GLOBAL as default mode
+ # because RTLD_LOCAL does not work at least on some
+ # libraries.
+
+ if gestalt.gestalt("sysv") < 0x1040:
+ DEFAULT_MODE = RTLD_GLOBAL
+
from _ctypes import FUNCFLAG_CDECL as _FUNCFLAG_CDECL, \
FUNCFLAG_PYTHONAPI as _FUNCFLAG_PYTHONAPI
@@ -67,7 +86,7 @@ def CFUNCTYPE(restype, *argtypes):
restype: the result type
argtypes: a sequence specifying the argument types
- The function prototype can be called in three ways to create a
+ The function prototype can be called in different ways to create a
callable object:
prototype(integer address) -> foreign function
@@ -111,7 +130,7 @@ if _os.name in ("nt", "ce"):
elif _os.name == "posix":
from _ctypes import dlopen as _dlopen
-from _ctypes import sizeof, byref, addressof, alignment
+from _ctypes import sizeof, byref, addressof, alignment, resize
from _ctypes import _SimpleCData
class py_object(_SimpleCData):
@@ -282,7 +301,7 @@ class CDLL(object):
_flags_ = _FUNCFLAG_CDECL
_restype_ = c_int # default, can be overridden in instances
- def __init__(self, name, mode=RTLD_LOCAL, handle=None):
+ def __init__(self, name, mode=DEFAULT_MODE, handle=None):
self._name = name
if handle is None:
self._handle = _dlopen(self._name, mode)
@@ -293,18 +312,19 @@ class CDLL(object):
return "<%s '%s', handle %x at %x>" % \
(self.__class__.__name__, self._name,
(self._handle & (_sys.maxint*2 + 1)),
- id(self))
+ id(self) & (_sys.maxint*2 + 1))
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
raise AttributeError, name
- return self.__getitem__(name)
+ func = self.__getitem__(name)
+ setattr(self, name, func)
+ return func
def __getitem__(self, name_or_ordinal):
func = self._FuncPtr((name_or_ordinal, self))
if not isinstance(name_or_ordinal, (int, long)):
func.__name__ = name_or_ordinal
- setattr(self, name_or_ordinal, func)
return func
class PyDLL(CDLL):
@@ -419,12 +439,10 @@ def PYFUNCTYPE(restype, *argtypes):
_restype_ = restype
_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
return CFunctionType
-_cast = PYFUNCTYPE(py_object, c_void_p, py_object)(_cast_addr)
+_cast = PYFUNCTYPE(py_object, c_void_p, py_object, py_object)(_cast_addr)
def cast(obj, typ):
- result = _cast(obj, typ)
- result.__keepref = obj
- return result
+ return _cast(obj, obj, typ)
_string_at = CFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr)
def string_at(ptr, size=0):
@@ -446,52 +464,21 @@ else:
return _wstring_at(ptr, size)
-if _os.name == "nt": # COM stuff
+if _os.name in ("nt", "ce"): # COM stuff
def DllGetClassObject(rclsid, riid, ppv):
- # First ask ctypes.com.server than comtypes.server for the
- # class object.
-
- # trick py2exe by doing dynamic imports
- result = -2147221231 # CLASS_E_CLASSNOTAVAILABLE
try:
- ctcom = __import__("ctypes.com.server", globals(), locals(), ['*'])
+ ccom = __import__("comtypes.server.inprocserver", globals(), locals(), ['*'])
except ImportError:
- pass
+ return -2147221231 # CLASS_E_CLASSNOTAVAILABLE
else:
- result = ctcom.DllGetClassObject(rclsid, riid, ppv)
-
- if result == -2147221231: # CLASS_E_CLASSNOTAVAILABLE
- try:
- ccom = __import__("comtypes.server", globals(), locals(), ['*'])
- except ImportError:
- pass
- else:
- result = ccom.DllGetClassObject(rclsid, riid, ppv)
-
- return result
+ return ccom.DllGetClassObject(rclsid, riid, ppv)
def DllCanUnloadNow():
- # First ask ctypes.com.server than comtypes.server if we can unload or not.
- # trick py2exe by doing dynamic imports
- result = 0 # S_OK
- try:
- ctcom = __import__("ctypes.com.server", globals(), locals(), ['*'])
- except ImportError:
- pass
- else:
- result = ctcom.DllCanUnloadNow()
- if result != 0: # != S_OK
- return result
-
try:
- ccom = __import__("comtypes.server", globals(), locals(), ['*'])
+ ccom = __import__("comtypes.server.inprocserver", globals(), locals(), ['*'])
except ImportError:
- return result
- try:
- return ccom.DllCanUnloadNow()
- except AttributeError:
- pass
- return result
+ return 0 # S_OK
+ return ccom.DllCanUnloadNow()
from ctypes._endian import BigEndianStructure, LittleEndianStructure
diff --git a/Lib/ctypes/_endian.py b/Lib/ctypes/_endian.py
index 5818ae1..6de0d47 100644
--- a/Lib/ctypes/_endian.py
+++ b/Lib/ctypes/_endian.py
@@ -1,3 +1,6 @@
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
import sys
from ctypes import *
diff --git a/Lib/ctypes/macholib/__init__.py b/Lib/ctypes/macholib/__init__.py
index 5621def..36149d2 100644
--- a/Lib/ctypes/macholib/__init__.py
+++ b/Lib/ctypes/macholib/__init__.py
@@ -1,3 +1,6 @@
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
"""
Enough Mach-O to make your head spin.
diff --git a/Lib/ctypes/macholib/dyld.py b/Lib/ctypes/macholib/dyld.py
index a336fd0..14e2139 100644
--- a/Lib/ctypes/macholib/dyld.py
+++ b/Lib/ctypes/macholib/dyld.py
@@ -1,3 +1,6 @@
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
"""
dyld emulation
"""
diff --git a/Lib/ctypes/macholib/dylib.py b/Lib/ctypes/macholib/dylib.py
index aa10750..ea3dd38 100644
--- a/Lib/ctypes/macholib/dylib.py
+++ b/Lib/ctypes/macholib/dylib.py
@@ -1,3 +1,6 @@
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
"""
Generic dylib path manipulation
"""
diff --git a/Lib/ctypes/macholib/framework.py b/Lib/ctypes/macholib/framework.py
index ad6ed55..dd7fb2f 100644
--- a/Lib/ctypes/macholib/framework.py
+++ b/Lib/ctypes/macholib/framework.py
@@ -1,3 +1,6 @@
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
"""
Generic framework path manipulation
"""
diff --git a/Lib/ctypes/test/test_anon.py b/Lib/ctypes/test/test_anon.py
new file mode 100644
index 0000000..99e02cb
--- /dev/null
+++ b/Lib/ctypes/test/test_anon.py
@@ -0,0 +1,60 @@
+import unittest
+from ctypes import *
+
+class AnonTest(unittest.TestCase):
+
+ def test_anon(self):
+ class ANON(Union):
+ _fields_ = [("a", c_int),
+ ("b", c_int)]
+
+ class Y(Structure):
+ _fields_ = [("x", c_int),
+ ("_", ANON),
+ ("y", c_int)]
+ _anonymous_ = ["_"]
+
+ self.failUnlessEqual(Y.a.offset, sizeof(c_int))
+ self.failUnlessEqual(Y.b.offset, sizeof(c_int))
+
+ self.failUnlessEqual(ANON.a.offset, 0)
+ self.failUnlessEqual(ANON.b.offset, 0)
+
+ def test_anon_nonseq(self):
+ # TypeError: _anonymous_ must be a sequence
+ self.failUnlessRaises(TypeError,
+ lambda: type(Structure)("Name",
+ (Structure,),
+ {"_fields_": [], "_anonymous_": 42}))
+
+ def test_anon_nonmember(self):
+ # AttributeError: type object 'Name' has no attribute 'x'
+ self.failUnlessRaises(AttributeError,
+ lambda: type(Structure)("Name",
+ (Structure,),
+ {"_fields_": [],
+ "_anonymous_": ["x"]}))
+
+ def test_nested(self):
+ class ANON_S(Structure):
+ _fields_ = [("a", c_int)]
+
+ class ANON_U(Union):
+ _fields_ = [("_", ANON_S),
+ ("b", c_int)]
+ _anonymous_ = ["_"]
+
+ class Y(Structure):
+ _fields_ = [("x", c_int),
+ ("_", ANON_U),
+ ("y", c_int)]
+ _anonymous_ = ["_"]
+
+ self.failUnlessEqual(Y.x.offset, 0)
+ self.failUnlessEqual(Y.a.offset, sizeof(c_int))
+ self.failUnlessEqual(Y.b.offset, sizeof(c_int))
+ self.failUnlessEqual(Y._.offset, sizeof(c_int))
+ self.failUnlessEqual(Y.y.offset, sizeof(c_int) * 2)
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/ctypes/test/test_cast.py b/Lib/ctypes/test/test_cast.py
index 821ce3f..09e928f 100644
--- a/Lib/ctypes/test/test_cast.py
+++ b/Lib/ctypes/test/test_cast.py
@@ -30,17 +30,32 @@ class Test(unittest.TestCase):
ptr = cast(address, POINTER(c_int))
self.failUnlessEqual([ptr[i] for i in range(3)], [42, 17, 2])
-
- def test_ptr2array(self):
- array = (c_int * 3)(42, 17, 2)
-
- from sys import getrefcount
-
- before = getrefcount(array)
- ptr = cast(array, POINTER(c_int))
- self.failUnlessEqual(getrefcount(array), before + 1)
- del ptr
- self.failUnlessEqual(getrefcount(array), before)
+ def test_p2a_objects(self):
+ array = (c_char_p * 5)()
+ self.failUnlessEqual(array._objects, None)
+ array[0] = "foo bar"
+ self.failUnlessEqual(array._objects, {'0': "foo bar"})
+
+ p = cast(array, POINTER(c_char_p))
+ # array and p share a common _objects attribute
+ self.failUnless(p._objects is array._objects)
+ self.failUnlessEqual(array._objects, {'0': "foo bar", id(array): array})
+ p[0] = "spam spam"
+ self.failUnlessEqual(p._objects, {'0': "spam spam", id(array): array})
+ self.failUnless(array._objects is p._objects)
+ p[1] = "foo bar"
+ self.failUnlessEqual(p._objects, {'1': 'foo bar', '0': "spam spam", id(array): array})
+ self.failUnless(array._objects is p._objects)
+
+ def test_other(self):
+ p = cast((c_int * 4)(1, 2, 3, 4), POINTER(c_int))
+ self.failUnlessEqual(p[:4], [1,2, 3, 4])
+ c_int()
+ self.failUnlessEqual(p[:4], [1, 2, 3, 4])
+ p[2] = 96
+ self.failUnlessEqual(p[:4], [1, 2, 96, 4])
+ c_int()
+ self.failUnlessEqual(p[:4], [1, 2, 96, 4])
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/ctypes/test/test_keeprefs.py b/Lib/ctypes/test/test_keeprefs.py
index 7318f29..80b6ca2 100644
--- a/Lib/ctypes/test/test_keeprefs.py
+++ b/Lib/ctypes/test/test_keeprefs.py
@@ -61,6 +61,8 @@ class StructureTestCase(unittest.TestCase):
r.ul.x = 22
r.ul.y = 44
self.assertEquals(r._objects, {'0': {}})
+ r.lr = POINT()
+ self.assertEquals(r._objects, {'0': {}, '1': {}})
class ArrayTestCase(unittest.TestCase):
def test_cint_array(self):
@@ -86,9 +88,10 @@ class ArrayTestCase(unittest.TestCase):
self.assertEquals(x._objects, {'1': {}})
class PointerTestCase(unittest.TestCase):
- def X_test_p_cint(self):
- x = pointer(c_int(42))
- print x._objects
+ def test_p_cint(self):
+ i = c_int(42)
+ x = pointer(i)
+ self.failUnlessEqual(x._objects, {'1': i})
class DeletePointerTestCase(unittest.TestCase):
def X_test(self):
diff --git a/Lib/ctypes/test/test_loading.py b/Lib/ctypes/test/test_loading.py
index 45585ae..28c83fd4 100644
--- a/Lib/ctypes/test/test_loading.py
+++ b/Lib/ctypes/test/test_loading.py
@@ -9,18 +9,10 @@ if os.name == "nt":
libc_name = "msvcrt"
elif os.name == "ce":
libc_name = "coredll"
-elif sys.platform == "darwin":
- libc_name = "libc.dylib"
elif sys.platform == "cygwin":
libc_name = "cygwin1.dll"
else:
- for line in os.popen("ldd %s" % sys.executable):
- if "libc.so" in line:
- if sys.platform == "openbsd3":
- libc_name = line.split()[4]
- else:
- libc_name = line.split()[2]
- break
+ libc_name = find_library("c")
if is_resource_enabled("printing"):
print "libc_name is", libc_name
diff --git a/Lib/ctypes/test/test_objects.py b/Lib/ctypes/test/test_objects.py
new file mode 100644
index 0000000..4d921d2
--- /dev/null
+++ b/Lib/ctypes/test/test_objects.py
@@ -0,0 +1,70 @@
+r'''
+This tests the '_objects' attribute of ctypes instances. '_objects'
+holds references to objects that must be kept alive as long as the
+ctypes instance, to make sure that the memory buffer is valid.
+
+WARNING: The '_objects' attribute is exposed ONLY for debugging ctypes itself,
+it MUST NEVER BE MODIFIED!
+
+'_objects' is initialized to a dictionary on first use, before that it
+is None.
+
+Here is an array of string pointers:
+
+>>> from ctypes import *
+>>> array = (c_char_p * 5)()
+>>> print array._objects
+None
+>>>
+
+The memory block stores pointers to strings, and the strings itself
+assigned from Python must be kept.
+
+>>> array[4] = 'foo bar'
+>>> array._objects
+{'4': 'foo bar'}
+>>> array[4]
+'foo bar'
+>>>
+
+It gets more complicated when the ctypes instance itself is contained
+in a 'base' object.
+
+>>> class X(Structure):
+... _fields_ = [("x", c_int), ("y", c_int), ("array", c_char_p * 5)]
+...
+>>> x = X()
+>>> print x._objects
+None
+>>>
+
+The'array' attribute of the 'x' object shares part of the memory buffer
+of 'x' ('_b_base_' is either None, or the root object owning the memory block):
+
+>>> print x.array._b_base_ # doctest: +ELLIPSIS
+<ctypes.test.test_objects.X object at 0x...>
+>>>
+
+>>> x.array[0] = 'spam spam spam'
+>>> x._objects
+{'0:2': 'spam spam spam'}
+>>> x.array._b_base_._objects
+{'0:2': 'spam spam spam'}
+>>>
+
+'''
+
+import unittest, doctest, sys
+
+import ctypes.test.test_objects
+
+class TestCase(unittest.TestCase):
+ if sys.hexversion > 0x02040000:
+ # Python 2.3 has no ELLIPSIS flag, so we don't test with this
+ # version:
+ def test(self):
+ doctest.testmod(ctypes.test.test_objects)
+
+if __name__ == '__main__':
+ if sys.hexversion > 0x02040000:
+ doctest.testmod(ctypes.test.test_objects)
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py
index 9537400..1b7f0dc 100644
--- a/Lib/ctypes/test/test_parameters.py
+++ b/Lib/ctypes/test/test_parameters.py
@@ -147,6 +147,41 @@ class SimpleTypesTestCase(unittest.TestCase):
## def test_performance(self):
## check_perf()
+ def test_noctypes_argtype(self):
+ import _ctypes_test
+ from ctypes import CDLL, c_void_p, ArgumentError
+
+ func = CDLL(_ctypes_test.__file__)._testfunc_p_p
+ func.restype = c_void_p
+ # TypeError: has no from_param method
+ self.assertRaises(TypeError, setattr, func, "argtypes", (object,))
+
+ class Adapter(object):
+ def from_param(cls, obj):
+ return None
+
+ func.argtypes = (Adapter(),)
+ self.failUnlessEqual(func(None), None)
+ self.failUnlessEqual(func(object()), None)
+
+ class Adapter(object):
+ def from_param(cls, obj):
+ return obj
+
+ func.argtypes = (Adapter(),)
+ # don't know how to convert parameter 1
+ self.assertRaises(ArgumentError, func, object())
+ self.failUnlessEqual(func(c_void_p(42)), 42)
+
+ class Adapter(object):
+ def from_param(cls, obj):
+ raise ValueError(obj)
+
+ func.argtypes = (Adapter(),)
+ # ArgumentError: argument 1: ValueError: 99
+ self.assertRaises(ArgumentError, func, 99)
+
+
################################################################
if __name__ == '__main__':
diff --git a/Lib/ctypes/test/test_pointers.py b/Lib/ctypes/test/test_pointers.py
index a7a2802..586655a 100644
--- a/Lib/ctypes/test/test_pointers.py
+++ b/Lib/ctypes/test/test_pointers.py
@@ -157,6 +157,23 @@ class PointersTestCase(unittest.TestCase):
q = pointer(y)
pp[0] = q # <==
self.failUnlessEqual(p[0], 6)
+ def test_c_void_p(self):
+ # http://sourceforge.net/tracker/?func=detail&aid=1518190&group_id=5470&atid=105470
+ if sizeof(c_void_p) == 4:
+ self.failUnlessEqual(c_void_p(0xFFFFFFFFL).value,
+ c_void_p(-1).value)
+ self.failUnlessEqual(c_void_p(0xFFFFFFFFFFFFFFFFL).value,
+ c_void_p(-1).value)
+ elif sizeof(c_void_p) == 8:
+ self.failUnlessEqual(c_void_p(0xFFFFFFFFL).value,
+ 0xFFFFFFFFL)
+ self.failUnlessEqual(c_void_p(0xFFFFFFFFFFFFFFFFL).value,
+ c_void_p(-1).value)
+ self.failUnlessEqual(c_void_p(0xFFFFFFFFFFFFFFFFFFFFFFFFL).value,
+ c_void_p(-1).value)
+
+ self.assertRaises(TypeError, c_void_p, 3.14) # make sure floats are NOT accepted
+ self.assertRaises(TypeError, c_void_p, object()) # nor other objects
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/ctypes/test/test_slicing.py b/Lib/ctypes/test/test_slicing.py
index 08c811e..511c3d3 100644
--- a/Lib/ctypes/test/test_slicing.py
+++ b/Lib/ctypes/test/test_slicing.py
@@ -35,7 +35,7 @@ class SlicesTestCase(unittest.TestCase):
self.assertRaises(ValueError, setslice, a, 0, 5, range(32))
def test_char_ptr(self):
- s = "abcdefghijklmnopqrstuvwxyz\0"
+ s = "abcdefghijklmnopqrstuvwxyz"
dll = CDLL(_ctypes_test.__file__)
dll.my_strdup.restype = POINTER(c_char)
@@ -50,9 +50,31 @@ class SlicesTestCase(unittest.TestCase):
dll.my_strdup.restype = POINTER(c_byte)
res = dll.my_strdup(s)
- self.failUnlessEqual(res[:len(s)-1], range(ord("a"), ord("z")+1))
+ self.failUnlessEqual(res[:len(s)], range(ord("a"), ord("z")+1))
dll.my_free(res)
+ def test_char_ptr_with_free(self):
+ dll = CDLL(_ctypes_test.__file__)
+ s = "abcdefghijklmnopqrstuvwxyz"
+
+ class allocated_c_char_p(c_char_p):
+ pass
+
+ dll.my_free.restype = None
+ def errcheck(result, func, args):
+ retval = result.value
+ dll.my_free(result)
+ return retval
+
+ dll.my_strdup.restype = allocated_c_char_p
+ dll.my_strdup.errcheck = errcheck
+ try:
+ res = dll.my_strdup(s)
+ self.failUnlessEqual(res, s)
+ finally:
+ del dll.my_strdup.errcheck
+
+
def test_char_array(self):
s = "abcdefghijklmnopqrstuvwxyz\0"
diff --git a/Lib/ctypes/test/test_structures.py b/Lib/ctypes/test/test_structures.py
index 49f064b..8a4531d 100644
--- a/Lib/ctypes/test/test_structures.py
+++ b/Lib/ctypes/test/test_structures.py
@@ -138,8 +138,8 @@ class StructureTestCase(unittest.TestCase):
self.failUnlessEqual(X.y.size, sizeof(c_char))
# readonly
- self.assertRaises(AttributeError, setattr, X.x, "offset", 92)
- self.assertRaises(AttributeError, setattr, X.x, "size", 92)
+ self.assertRaises((TypeError, AttributeError), setattr, X.x, "offset", 92)
+ self.assertRaises((TypeError, AttributeError), setattr, X.x, "size", 92)
class X(Union):
_fields_ = [("x", c_int),
@@ -152,8 +152,8 @@ class StructureTestCase(unittest.TestCase):
self.failUnlessEqual(X.y.size, sizeof(c_char))
# readonly
- self.assertRaises(AttributeError, setattr, X.x, "offset", 92)
- self.assertRaises(AttributeError, setattr, X.x, "size", 92)
+ self.assertRaises((TypeError, AttributeError), setattr, X.x, "offset", 92)
+ self.assertRaises((TypeError, AttributeError), setattr, X.x, "size", 92)
# XXX Should we check nested data types also?
# offset is always relative to the class...
@@ -298,7 +298,7 @@ class StructureTestCase(unittest.TestCase):
"expected string or Unicode object, int found")
else:
self.failUnlessEqual(msg,
- "(Phone) TypeError: "
+ "(Phone) exceptions.TypeError: "
"expected string or Unicode object, int found")
cls, msg = self.get_except(Person, "Someone", ("a", "b", "c"))
@@ -307,7 +307,7 @@ class StructureTestCase(unittest.TestCase):
self.failUnlessEqual(msg,
"(Phone) <type 'exceptions.ValueError'>: too many initializers")
else:
- self.failUnlessEqual(msg, "(Phone) ValueError: too many initializers")
+ self.failUnlessEqual(msg, "(Phone) exceptions.ValueError: too many initializers")
def get_except(self, func, *args):
@@ -371,5 +371,15 @@ class PointerMemberTestCase(unittest.TestCase):
items = [s.array[i] for i in range(3)]
self.failUnlessEqual(items, [1, 2, 3])
+ def test_none_to_pointer_fields(self):
+ class S(Structure):
+ _fields_ = [("x", c_int),
+ ("p", POINTER(c_int))]
+
+ s = S()
+ s.x = 12345678
+ s.p = None
+ self.failUnlessEqual(s.x, 12345678)
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/ctypes/test/test_varsize_struct.py b/Lib/ctypes/test/test_varsize_struct.py
new file mode 100644
index 0000000..06d2323
--- /dev/null
+++ b/Lib/ctypes/test/test_varsize_struct.py
@@ -0,0 +1,50 @@
+from ctypes import *
+import unittest
+
+class VarSizeTest(unittest.TestCase):
+ def test_resize(self):
+ class X(Structure):
+ _fields_ = [("item", c_int),
+ ("array", c_int * 1)]
+
+ self.failUnlessEqual(sizeof(X), sizeof(c_int) * 2)
+ x = X()
+ x.item = 42
+ x.array[0] = 100
+ self.failUnlessEqual(sizeof(x), sizeof(c_int) * 2)
+
+ # make room for one additional item
+ new_size = sizeof(X) + sizeof(c_int) * 1
+ resize(x, new_size)
+ self.failUnlessEqual(sizeof(x), new_size)
+ self.failUnlessEqual((x.item, x.array[0]), (42, 100))
+
+ # make room for 10 additional items
+ new_size = sizeof(X) + sizeof(c_int) * 9
+ resize(x, new_size)
+ self.failUnlessEqual(sizeof(x), new_size)
+ self.failUnlessEqual((x.item, x.array[0]), (42, 100))
+
+ # make room for one additional item
+ new_size = sizeof(X) + sizeof(c_int) * 1
+ resize(x, new_size)
+ self.failUnlessEqual(sizeof(x), new_size)
+ self.failUnlessEqual((x.item, x.array[0]), (42, 100))
+
+ def test_array_invalid_length(self):
+ # cannot create arrays with non-positive size
+ self.failUnlessRaises(ValueError, lambda: c_int * -1)
+ self.failUnlessRaises(ValueError, lambda: c_int * -3)
+
+ def test_zerosized_array(self):
+ array = (c_int * 0)()
+ # accessing elements of zero-sized arrays raise IndexError
+ self.failUnlessRaises(IndexError, array.__setitem__, 0, None)
+ self.failUnlessRaises(IndexError, array.__getitem__, 0)
+ self.failUnlessRaises(IndexError, array.__setitem__, 1, None)
+ self.failUnlessRaises(IndexError, array.__getitem__, 1)
+ self.failUnlessRaises(IndexError, array.__setitem__, -1, None)
+ self.failUnlessRaises(IndexError, array.__getitem__, -1)
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/ctypes/test/test_win32.py b/Lib/ctypes/test/test_win32.py
index 8247d37..db530d3 100644
--- a/Lib/ctypes/test/test_win32.py
+++ b/Lib/ctypes/test/test_win32.py
@@ -1,6 +1,7 @@
# Windows specific tests
from ctypes import *
+from ctypes.test import is_resource_enabled
import unittest, sys
import _ctypes_test
@@ -30,15 +31,10 @@ if sys.platform == "win32":
# or wrong calling convention
self.assertRaises(ValueError, IsWindow, None)
- def test_SEH(self):
- # Call functions with invalid arguments, and make sure that access violations
- # are trapped and raise an exception.
- #
- # Normally, in a debug build of the _ctypes extension
- # module, exceptions are not trapped, so we can only run
- # this test in a release build.
- import sys
- if not hasattr(sys, "getobjects"):
+ if is_resource_enabled("SEH"):
+ def test_SEH(self):
+ # Call functions with invalid arguments, and make sure that access violations
+ # are trapped and raise an exception.
self.assertRaises(WindowsError, windll.kernel32.GetModuleHandleA, 32)
class Structures(unittest.TestCase):
diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py
index d756c1c..2ee2968 100644
--- a/Lib/ctypes/util.py
+++ b/Lib/ctypes/util.py
@@ -1,5 +1,7 @@
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
import sys, os
-import ctypes
# find_library(name) returns the pathname of a library, or None.
if os.name == "nt":
@@ -41,14 +43,17 @@ if os.name == "posix" and sys.platform == "darwin":
elif os.name == "posix":
# Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
- import re, tempfile
+ import re, tempfile, errno
def _findLib_gcc(name):
expr = '[^\(\)\s]*lib%s\.[^\(\)\s]*' % name
+ fdout, ccout = tempfile.mkstemp()
+ os.close(fdout)
cmd = 'if type gcc &>/dev/null; then CC=gcc; else CC=cc; fi;' \
- '$CC -Wl,-t -o /dev/null 2>&1 -l' + name
+ '$CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name
try:
fdout, outfile = tempfile.mkstemp()
+ os.close(fdout)
fd = os.popen(cmd)
trace = fd.read()
err = fd.close()
@@ -58,6 +63,11 @@ elif os.name == "posix":
except OSError, e:
if e.errno != errno.ENOENT:
raise
+ try:
+ os.unlink(ccout)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
res = re.search(expr, trace)
if not res:
return None
diff --git a/Lib/ctypes/wintypes.py b/Lib/ctypes/wintypes.py
index 92b79d2..9768233 100644
--- a/Lib/ctypes/wintypes.py
+++ b/Lib/ctypes/wintypes.py
@@ -1,60 +1,117 @@
-# XXX This module needs cleanup.
+######################################################################
+# This file should be kept compatible with Python 2.3, see PEP 291. #
+######################################################################
+# The most useful windows datatypes
from ctypes import *
-DWORD = c_ulong
-WORD = c_ushort
BYTE = c_byte
+WORD = c_ushort
+DWORD = c_ulong
+
+WCHAR = c_wchar
+UINT = c_uint
+
+DOUBLE = c_double
+
+BOOLEAN = BYTE
+BOOL = c_long
+
+from ctypes import _SimpleCData
+class VARIANT_BOOL(_SimpleCData):
+ _type_ = "v"
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self.value)
ULONG = c_ulong
LONG = c_long
-LARGE_INTEGER = c_longlong
-ULARGE_INTEGER = c_ulonglong
+# in the windows header files, these are structures.
+_LARGE_INTEGER = LARGE_INTEGER = c_longlong
+_ULARGE_INTEGER = ULARGE_INTEGER = c_ulonglong
+LPCOLESTR = LPOLESTR = OLESTR = c_wchar_p
+LPCWSTR = LPWSTR = c_wchar_p
+LPCSTR = LPSTR = c_char_p
+WPARAM = c_uint
+LPARAM = c_long
+
+ATOM = WORD
+LANGID = WORD
+
+COLORREF = DWORD
+LGRPID = DWORD
+LCTYPE = DWORD
+
+LCID = DWORD
+
+################################################################
+# HANDLE types
HANDLE = c_ulong # in the header files: void *
-HWND = HANDLE
+HACCEL = HANDLE
+HBITMAP = HANDLE
+HBRUSH = HANDLE
+HCOLORSPACE = HANDLE
HDC = HANDLE
-HMODULE = HANDLE
+HDESK = HANDLE
+HDWP = HANDLE
+HENHMETAFILE = HANDLE
+HFONT = HANDLE
+HGDIOBJ = HANDLE
+HGLOBAL = HANDLE
+HHOOK = HANDLE
+HICON = HANDLE
HINSTANCE = HANDLE
-HRGN = HANDLE
-HTASK = HANDLE
HKEY = HANDLE
-HPEN = HANDLE
-HGDIOBJ = HANDLE
+HKL = HANDLE
+HLOCAL = HANDLE
HMENU = HANDLE
+HMETAFILE = HANDLE
+HMODULE = HANDLE
+HMONITOR = HANDLE
+HPALETTE = HANDLE
+HPEN = HANDLE
+HRGN = HANDLE
+HRSRC = HANDLE
+HSTR = HANDLE
+HTASK = HANDLE
+HWINSTA = HANDLE
+HWND = HANDLE
+SC_HANDLE = HANDLE
+SERVICE_STATUS_HANDLE = HANDLE
-LCID = DWORD
-
-WPARAM = c_uint
-LPARAM = c_long
-
-BOOL = c_long
-VARIANT_BOOL = c_short
-
-LPCOLESTR = LPOLESTR = OLESTR = c_wchar_p
-LPCWSTR = LPWSTR = c_wchar_p
-
-LPCSTR = LPSTR = c_char_p
+################################################################
+# Some important structure definitions
class RECT(Structure):
_fields_ = [("left", c_long),
("top", c_long),
("right", c_long),
("bottom", c_long)]
-RECTL = RECT
+tagRECT = _RECTL = RECTL = RECT
+
+class _SMALL_RECT(Structure):
+ _fields_ = [('Left', c_short),
+ ('Top', c_short),
+ ('Right', c_short),
+ ('Bottom', c_short)]
+SMALL_RECT = _SMALL_RECT
+
+class _COORD(Structure):
+ _fields_ = [('X', c_short),
+ ('Y', c_short)]
class POINT(Structure):
_fields_ = [("x", c_long),
("y", c_long)]
-POINTL = POINT
+tagPOINT = _POINTL = POINTL = POINT
class SIZE(Structure):
_fields_ = [("cx", c_long),
("cy", c_long)]
-SIZEL = SIZE
+tagSIZE = SIZEL = SIZE
def RGB(red, green, blue):
return red + (green << 8) + (blue << 16)
@@ -62,6 +119,7 @@ def RGB(red, green, blue):
class FILETIME(Structure):
_fields_ = [("dwLowDateTime", DWORD),
("dwHighDateTime", DWORD)]
+_FILETIME = FILETIME
class MSG(Structure):
_fields_ = [("hWnd", HWND),
@@ -70,6 +128,7 @@ class MSG(Structure):
("lParam", LPARAM),
("time", DWORD),
("pt", POINT)]
+tagMSG = MSG
MAX_PATH = 260
class WIN32_FIND_DATAA(Structure):
@@ -95,3 +154,19 @@ class WIN32_FIND_DATAW(Structure):
("dwReserved1", DWORD),
("cFileName", c_wchar * MAX_PATH),
("cAlternameFileName", c_wchar * 14)]
+
+__all__ = ['ATOM', 'BOOL', 'BOOLEAN', 'BYTE', 'COLORREF', 'DOUBLE',
+ 'DWORD', 'FILETIME', 'HACCEL', 'HANDLE', 'HBITMAP', 'HBRUSH',
+ 'HCOLORSPACE', 'HDC', 'HDESK', 'HDWP', 'HENHMETAFILE', 'HFONT',
+ 'HGDIOBJ', 'HGLOBAL', 'HHOOK', 'HICON', 'HINSTANCE', 'HKEY',
+ 'HKL', 'HLOCAL', 'HMENU', 'HMETAFILE', 'HMODULE', 'HMONITOR',
+ 'HPALETTE', 'HPEN', 'HRGN', 'HRSRC', 'HSTR', 'HTASK', 'HWINSTA',
+ 'HWND', 'LANGID', 'LARGE_INTEGER', 'LCID', 'LCTYPE', 'LGRPID',
+ 'LONG', 'LPARAM', 'LPCOLESTR', 'LPCSTR', 'LPCWSTR', 'LPOLESTR',
+ 'LPSTR', 'LPWSTR', 'MAX_PATH', 'MSG', 'OLESTR', 'POINT',
+ 'POINTL', 'RECT', 'RECTL', 'RGB', 'SC_HANDLE',
+ 'SERVICE_STATUS_HANDLE', 'SIZE', 'SIZEL', 'SMALL_RECT', 'UINT',
+ 'ULARGE_INTEGER', 'ULONG', 'VARIANT_BOOL', 'WCHAR',
+ 'WIN32_FIND_DATAA', 'WIN32_FIND_DATAW', 'WORD', 'WPARAM', '_COORD',
+ '_FILETIME', '_LARGE_INTEGER', '_POINTL', '_RECTL', '_SMALL_RECT',
+ '_ULARGE_INTEGER', 'tagMSG', 'tagPOINT', 'tagRECT', 'tagSIZE']
diff --git a/Lib/difflib.py b/Lib/difflib.py
index 55f69ba..3e28b18 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -86,8 +86,7 @@ class SequenceMatcher:
>>> for block in s.get_matching_blocks():
... print "a[%d] and b[%d] match for %d elements" % block
a[0] and b[0] match for 8 elements
- a[8] and b[17] match for 6 elements
- a[14] and b[23] match for 15 elements
+ a[8] and b[17] match for 21 elements
a[29] and b[38] match for 0 elements
Note that the last tuple returned by .get_matching_blocks() is always a
@@ -101,8 +100,7 @@ class SequenceMatcher:
... print "%6s a[%d:%d] b[%d:%d]" % opcode
equal a[0:8] b[0:8]
insert a[8:8] b[8:17]
- equal a[8:14] b[17:23]
- equal a[14:29] b[23:38]
+ equal a[8:29] b[17:38]
See the Differ class for a fancy human-friendly file differencer, which
uses SequenceMatcher both to compare sequences of lines, and to compare
@@ -461,7 +459,11 @@ class SequenceMatcher:
Each triple is of the form (i, j, n), and means that
a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in
- i and in j.
+ i and in j. New in Python 2.5, it's also guaranteed that if
+ (i, j, n) and (i', j', n') are adjacent triples in the list, and
+ the second is not the last triple in the list, then i+n != i' or
+ j+n != j'. IOW, adjacent triples never describe adjacent equal
+ blocks.
The last triple is a dummy, (len(a), len(b), 0), and is the only
triple with n==0.
@@ -475,28 +477,52 @@ class SequenceMatcher:
return self.matching_blocks
la, lb = len(self.a), len(self.b)
- indexed_blocks = []
+ # This is most naturally expressed as a recursive algorithm, but
+ # at least one user bumped into extreme use cases that exceeded
+ # the recursion limit on their box. So, now we maintain a list
+ # ('queue`) of blocks we still need to look at, and append partial
+ # results to `matching_blocks` in a loop; the matches are sorted
+ # at the end.
queue = [(0, la, 0, lb)]
+ matching_blocks = []
while queue:
- # builds list of matching blocks covering a[alo:ahi] and
- # b[blo:bhi], appending them in increasing order to answer
alo, ahi, blo, bhi = queue.pop()
-
+ i, j, k = x = self.find_longest_match(alo, ahi, blo, bhi)
# a[alo:i] vs b[blo:j] unknown
# a[i:i+k] same as b[j:j+k]
# a[i+k:ahi] vs b[j+k:bhi] unknown
- i, j, k = x = self.find_longest_match(alo, ahi, blo, bhi)
-
- if k:
+ if k: # if k is 0, there was no matching block
+ matching_blocks.append(x)
if alo < i and blo < j:
queue.append((alo, i, blo, j))
- indexed_blocks.append((i, x))
if i+k < ahi and j+k < bhi:
queue.append((i+k, ahi, j+k, bhi))
- indexed_blocks.sort()
-
- self.matching_blocks = [elem[1] for elem in indexed_blocks]
- self.matching_blocks.append( (la, lb, 0) )
+ matching_blocks.sort()
+
+ # It's possible that we have adjacent equal blocks in the
+ # matching_blocks list now. Starting with 2.5, this code was added
+ # to collapse them.
+ i1 = j1 = k1 = 0
+ non_adjacent = []
+ for i2, j2, k2 in matching_blocks:
+ # Is this block adjacent to i1, j1, k1?
+ if i1 + k1 == i2 and j1 + k1 == j2:
+ # Yes, so collapse them -- this just increases the length of
+ # the first block by the length of the second, and the first
+ # block so lengthened remains the block to compare against.
+ k1 += k2
+ else:
+ # Not adjacent. Remember the first block (k1==0 means it's
+ # the dummy we started with), and make the second block the
+ # new block to compare against.
+ if k1:
+ non_adjacent.append((i1, j1, k1))
+ i1, j1, k1 = i2, j2, k2
+ if k1:
+ non_adjacent.append((i1, j1, k1))
+
+ non_adjacent.append( (la, lb, 0) )
+ self.matching_blocks = non_adjacent
return self.matching_blocks
def get_opcodes(self):
@@ -1422,8 +1448,7 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None,
num_blanks_pending -= 1
yield _make_line(lines,'-',0), None, True
continue
- elif s.startswith('--?+') or s.startswith('--+') or \
- s.startswith('- '):
+ elif s.startswith(('--?+', '--+', '- ')):
# in delete block and see a intraline change or unchanged line
# coming: yield the delete line and then blanks
from_line,to_line = _make_line(lines,'-',0), None
@@ -1447,7 +1472,7 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None,
num_blanks_pending += 1
yield None, _make_line(lines,'+',1), True
continue
- elif s.startswith('+ ') or s.startswith('+-'):
+ elif s.startswith(('+ ', '+-')):
# will be leaving an add block: yield blanks then add line
from_line, to_line = None, _make_line(lines,'+',1)
num_blanks_to_yield,num_blanks_pending = num_blanks_pending+1,0
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index a1dbb4b..9c60e54 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -12,4 +12,6 @@ used from a setup script as
__revision__ = "$Id$"
-__version__ = "2.4.0"
+import sys
+__version__ = "%d.%d.%d" % sys.version_info[:3]
+del sys
diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py
index 738e3f7..5b09965 100644
--- a/Lib/distutils/command/bdist_rpm.py
+++ b/Lib/distutils/command/bdist_rpm.py
@@ -467,7 +467,8 @@ class bdist_rpm (Command):
# rpm scripts
# figure out default build script
- def_build = "%s setup.py build" % self.python
+ def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
+ def_build = "%s build" % def_setup_call
if self.use_rpm_opt_flags:
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
@@ -481,9 +482,9 @@ class bdist_rpm (Command):
('prep', 'prep_script', "%setup"),
('build', 'build_script', def_build),
('install', 'install_script',
- ("%s setup.py install "
+ ("%s install "
"--root=$RPM_BUILD_ROOT "
- "--record=INSTALLED_FILES") % self.python),
+ "--record=INSTALLED_FILES") % def_setup_call),
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
('verifyscript', 'verify_script', None),
('pre', 'pre_install', None),
diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py
index 4a9ed39..67ba080 100644
--- a/Lib/distutils/command/upload.py
+++ b/Lib/distutils/command/upload.py
@@ -185,7 +185,7 @@ class upload(Command):
http.endheaders()
http.send(body)
except socket.error, e:
- self.announce(e.msg, log.ERROR)
+ self.announce(str(e), log.ERROR)
return
r = http.getresponse()
diff --git a/Lib/distutils/msvccompiler.py b/Lib/distutils/msvccompiler.py
index d24d0ac..0d72837 100644
--- a/Lib/distutils/msvccompiler.py
+++ b/Lib/distutils/msvccompiler.py
@@ -131,8 +131,10 @@ class MacroExpander:
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
except KeyError, exc: #
raise DistutilsPlatformError, \
- ("The .NET Framework SDK needs to be installed before "
- "building extensions for Python.")
+ ("""Python was built with Visual Studio 2003;
+extensions must be built with a compiler than can generate compatible binaries.
+Visual Studio 2003 was not found on this system. If you have Cygwin installed,
+you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
p = r"Software\Microsoft\NET Framework Setup\Product"
for base in HKEYS:
@@ -237,7 +239,7 @@ class MSVCCompiler (CCompiler) :
def initialize(self):
self.__paths = []
- if os.environ.has_key("MSSdk") and self.find_exe("cl.exe"):
+ if os.environ.has_key("DISTUTILS_USE_SDK") and os.environ.has_key("MSSdk") and self.find_exe("cl.exe"):
# Assume that the SDK set up everything alright; don't try to be
# smarter
self.cc = "cl.exe"
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index e1397a1..76fe256 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -512,7 +512,7 @@ def get_config_vars(*args):
for key in ('LDFLAGS', 'BASECFLAGS'):
flags = _config_vars[key]
flags = re.sub('-arch\s+\w+\s', ' ', flags)
- flags = re.sub('-isysroot [^ \t]* ', ' ', flags)
+ flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_config_vars[key] = flags
if args:
diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py
index 324819d..6cd14f7 100644
--- a/Lib/distutils/unixccompiler.py
+++ b/Lib/distutils/unixccompiler.py
@@ -78,7 +78,7 @@ def _darwin_compiler_fixup(compiler_so, cc_args):
try:
index = compiler_so.index('-isysroot')
# Strip this argument and the next one:
- del compiler_so[index:index+1]
+ del compiler_so[index:index+2]
except ValueError:
pass
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 47b3aae..fe734b3 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -95,7 +95,7 @@ __all__ = [
import __future__
-import sys, traceback, inspect, linecache, os, re, types
+import sys, traceback, inspect, linecache, os, re
import unittest, difflib, pdb, tempfile
import warnings
from StringIO import StringIO
@@ -821,6 +821,11 @@ class DocTestFinder:
# Recursively expore `obj`, extracting DocTests.
tests = []
self._find(tests, obj, name, module, source_lines, globs, {})
+ # Sort the tests by alpha order of names, for consistency in
+ # verbose-mode output. This was a feature of doctest in Pythons
+ # <= 2.3 that got lost by accident in 2.4. It was repaired in
+ # 2.4.4 and 2.5.
+ tests.sort()
return tests
def _from_module(self, module, object):
diff --git a/Lib/dummy_thread.py b/Lib/dummy_thread.py
index 21fd03f..a72c927 100644
--- a/Lib/dummy_thread.py
+++ b/Lib/dummy_thread.py
@@ -20,6 +20,7 @@ __all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
import traceback as _traceback
+import warnings
class error(Exception):
"""Dummy implementation of thread.error."""
@@ -75,6 +76,12 @@ def allocate_lock():
"""Dummy implementation of thread.allocate_lock()."""
return LockType()
+def stack_size(size=None):
+ """Dummy implementation of thread.stack_size()."""
+ if size is not None:
+ raise error("setting thread stack size not supported")
+ return 0
+
class LockType(object):
"""Class implementing dummy implementation of thread.LockType.
diff --git a/Lib/email/__init__.py b/Lib/email/__init__.py
index f01260f..8d230fd 100644
--- a/Lib/email/__init__.py
+++ b/Lib/email/__init__.py
@@ -4,7 +4,7 @@
"""A package for parsing, handling, and generating email messages."""
-__version__ = '4.0a2'
+__version__ = '4.0.1'
__all__ = [
# Old names
diff --git a/Lib/email/message.py b/Lib/email/message.py
index 50d90b4..79c5c4c 100644
--- a/Lib/email/message.py
+++ b/Lib/email/message.py
@@ -747,7 +747,18 @@ class Message:
if isinstance(charset, tuple):
# RFC 2231 encoded, so decode it, and it better end up as ascii.
pcharset = charset[0] or 'us-ascii'
- charset = unicode(charset[2], pcharset).encode('us-ascii')
+ try:
+ # LookupError will be raised if the charset isn't known to
+ # Python. UnicodeError will be raised if the encoded text
+ # contains a character not in the charset.
+ charset = unicode(charset[2], pcharset).encode('us-ascii')
+ except (LookupError, UnicodeError):
+ charset = charset[2]
+ # charset character must be in us-ascii range
+ try:
+ charset = unicode(charset, 'us-ascii').encode('us-ascii')
+ except UnicodeError:
+ return failobj
# RFC 2046, $4.1.2 says charsets are not case sensitive
return charset.lower()
diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py
index a197a36..13801dc 100644
--- a/Lib/email/test/test_email.py
+++ b/Lib/email/test/test_email.py
@@ -3005,14 +3005,29 @@ Content-Type: text/html; NAME*0=file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOC
'''
msg = email.message_from_string(m)
- self.assertEqual(msg.get_param('NAME'),
- (None, None, 'file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm'))
+ param = msg.get_param('NAME')
+ self.failIf(isinstance(param, tuple))
+ self.assertEqual(
+ param,
+ 'file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm')
def test_rfc2231_no_language_or_charset_in_filename(self):
m = '''\
Content-Disposition: inline;
-\tfilename*0="This%20is%20even%20more%20";
-\tfilename*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*0*="''This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(msg.get_filename(),
+ 'This is even more ***fun*** is it not.pdf')
+
+ def test_rfc2231_no_language_or_charset_in_filename_encoded(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0*="''This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2="is it not.pdf"
'''
@@ -3020,11 +3035,37 @@ Content-Disposition: inline;
self.assertEqual(msg.get_filename(),
'This is even more ***fun*** is it not.pdf')
+ def test_rfc2231_partly_encoded(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0="''This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(
+ msg.get_filename(),
+ 'This%20is%20even%20more%20***fun*** is it not.pdf')
+
+ def test_rfc2231_partly_nonencoded(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0="This%20is%20even%20more%20";
+\tfilename*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(
+ msg.get_filename(),
+ 'This%20is%20even%20more%20%2A%2A%2Afun%2A%2A%2A%20is it not.pdf')
+
def test_rfc2231_no_language_or_charset_in_boundary(self):
m = '''\
Content-Type: multipart/alternative;
-\tboundary*0="This%20is%20even%20more%20";
-\tboundary*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tboundary*0*="''This%20is%20even%20more%20";
+\tboundary*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tboundary*2="is it not.pdf"
'''
@@ -3036,8 +3077,8 @@ Content-Type: multipart/alternative;
# This is a nonsensical charset value, but tests the code anyway
m = '''\
Content-Type: text/plain;
-\tcharset*0="This%20is%20even%20more%20";
-\tcharset*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tcharset*0*="This%20is%20even%20more%20";
+\tcharset*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tcharset*2="is it not.pdf"
'''
@@ -3045,15 +3086,145 @@ Content-Type: text/plain;
self.assertEqual(msg.get_content_charset(),
'this is even more ***fun*** is it not.pdf')
+ def test_rfc2231_bad_encoding_in_filename(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0*="bogus'xx'This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(msg.get_filename(),
+ 'This is even more ***fun*** is it not.pdf')
+
+ def test_rfc2231_bad_encoding_in_charset(self):
+ m = """\
+Content-Type: text/plain; charset*=bogus''utf-8%E2%80%9D
+
+"""
+ msg = email.message_from_string(m)
+ # This should return None because non-ascii characters in the charset
+ # are not allowed.
+ self.assertEqual(msg.get_content_charset(), None)
+
+ def test_rfc2231_bad_character_in_charset(self):
+ m = """\
+Content-Type: text/plain; charset*=ascii''utf-8%E2%80%9D
+
+"""
+ msg = email.message_from_string(m)
+ # This should return None because non-ascii characters in the charset
+ # are not allowed.
+ self.assertEqual(msg.get_content_charset(), None)
+
+ def test_rfc2231_bad_character_in_filename(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0*="ascii'xx'This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2*="is it not.pdf%E2"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(msg.get_filename(),
+ u'This is even more ***fun*** is it not.pdf\ufffd')
+
def test_rfc2231_unknown_encoding(self):
m = """\
Content-Transfer-Encoding: 8bit
-Content-Disposition: inline; filename*0=X-UNKNOWN''myfile.txt
+Content-Disposition: inline; filename*=X-UNKNOWN''myfile.txt
"""
msg = email.message_from_string(m)
self.assertEqual(msg.get_filename(), 'myfile.txt')
+ def test_rfc2231_single_tick_in_filename_extended(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0*=\"Frank's\"; name*1*=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, None)
+ eq(language, None)
+ eq(s, "Frank's Document")
+
+ def test_rfc2231_single_tick_in_filename(self):
+ m = """\
+Content-Type: application/x-foo; name*0=\"Frank's\"; name*1=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ param = msg.get_param('name')
+ self.failIf(isinstance(param, tuple))
+ self.assertEqual(param, "Frank's Document")
+
+ def test_rfc2231_tick_attack_extended(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0*=\"us-ascii'en-us'Frank's\"; name*1*=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, 'us-ascii')
+ eq(language, 'en-us')
+ eq(s, "Frank's Document")
+
+ def test_rfc2231_tick_attack(self):
+ m = """\
+Content-Type: application/x-foo;
+\tname*0=\"us-ascii'en-us'Frank's\"; name*1=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ param = msg.get_param('name')
+ self.failIf(isinstance(param, tuple))
+ self.assertEqual(param, "us-ascii'en-us'Frank's Document")
+
+ def test_rfc2231_no_extended_values(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo; name=\"Frank's Document\"
+
+"""
+ msg = email.message_from_string(m)
+ eq(msg.get_param('name'), "Frank's Document")
+
+ def test_rfc2231_encoded_then_unencoded_segments(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0*=\"us-ascii'en-us'My\";
+\tname*1=\" Document\";
+\tname*2*=\" For You\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, 'us-ascii')
+ eq(language, 'en-us')
+ eq(s, 'My Document For You')
+
+ def test_rfc2231_unencoded_then_encoded_segments(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0=\"us-ascii'en-us'My\";
+\tname*1*=\" Document\";
+\tname*2*=\" For You\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, 'us-ascii')
+ eq(language, 'en-us')
+ eq(s, 'My Document For You')
+
def _testclasses():
diff --git a/Lib/email/test/test_email_renamed.py b/Lib/email/test/test_email_renamed.py
index 95d06cb..30f39b9 100644
--- a/Lib/email/test/test_email_renamed.py
+++ b/Lib/email/test/test_email_renamed.py
@@ -3011,14 +3011,29 @@ Content-Type: text/html; NAME*0=file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOC
'''
msg = email.message_from_string(m)
- self.assertEqual(msg.get_param('NAME'),
- (None, None, 'file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm'))
+ param = msg.get_param('NAME')
+ self.failIf(isinstance(param, tuple))
+ self.assertEqual(
+ param,
+ 'file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm')
def test_rfc2231_no_language_or_charset_in_filename(self):
m = '''\
Content-Disposition: inline;
-\tfilename*0="This%20is%20even%20more%20";
-\tfilename*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*0*="''This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(msg.get_filename(),
+ 'This is even more ***fun*** is it not.pdf')
+
+ def test_rfc2231_no_language_or_charset_in_filename_encoded(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0*="''This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2="is it not.pdf"
'''
@@ -3026,11 +3041,37 @@ Content-Disposition: inline;
self.assertEqual(msg.get_filename(),
'This is even more ***fun*** is it not.pdf')
+ def test_rfc2231_partly_encoded(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0="''This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(
+ msg.get_filename(),
+ 'This%20is%20even%20more%20***fun*** is it not.pdf')
+
+ def test_rfc2231_partly_nonencoded(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0="This%20is%20even%20more%20";
+\tfilename*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(
+ msg.get_filename(),
+ 'This%20is%20even%20more%20%2A%2A%2Afun%2A%2A%2A%20is it not.pdf')
+
def test_rfc2231_no_language_or_charset_in_boundary(self):
m = '''\
Content-Type: multipart/alternative;
-\tboundary*0="This%20is%20even%20more%20";
-\tboundary*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tboundary*0*="''This%20is%20even%20more%20";
+\tboundary*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tboundary*2="is it not.pdf"
'''
@@ -3042,8 +3083,8 @@ Content-Type: multipart/alternative;
# This is a nonsensical charset value, but tests the code anyway
m = '''\
Content-Type: text/plain;
-\tcharset*0="This%20is%20even%20more%20";
-\tcharset*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tcharset*0*="This%20is%20even%20more%20";
+\tcharset*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tcharset*2="is it not.pdf"
'''
@@ -3051,15 +3092,145 @@ Content-Type: text/plain;
self.assertEqual(msg.get_content_charset(),
'this is even more ***fun*** is it not.pdf')
+ def test_rfc2231_bad_encoding_in_filename(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0*="bogus'xx'This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(msg.get_filename(),
+ 'This is even more ***fun*** is it not.pdf')
+
+ def test_rfc2231_bad_encoding_in_charset(self):
+ m = """\
+Content-Type: text/plain; charset*=bogus''utf-8%E2%80%9D
+
+"""
+ msg = email.message_from_string(m)
+ # This should return None because non-ascii characters in the charset
+ # are not allowed.
+ self.assertEqual(msg.get_content_charset(), None)
+
+ def test_rfc2231_bad_character_in_charset(self):
+ m = """\
+Content-Type: text/plain; charset*=ascii''utf-8%E2%80%9D
+
+"""
+ msg = email.message_from_string(m)
+ # This should return None because non-ascii characters in the charset
+ # are not allowed.
+ self.assertEqual(msg.get_content_charset(), None)
+
+ def test_rfc2231_bad_character_in_filename(self):
+ m = '''\
+Content-Disposition: inline;
+\tfilename*0*="ascii'xx'This%20is%20even%20more%20";
+\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2*="is it not.pdf%E2"
+
+'''
+ msg = email.message_from_string(m)
+ self.assertEqual(msg.get_filename(),
+ u'This is even more ***fun*** is it not.pdf\ufffd')
+
def test_rfc2231_unknown_encoding(self):
m = """\
Content-Transfer-Encoding: 8bit
-Content-Disposition: inline; filename*0=X-UNKNOWN''myfile.txt
+Content-Disposition: inline; filename*=X-UNKNOWN''myfile.txt
"""
msg = email.message_from_string(m)
self.assertEqual(msg.get_filename(), 'myfile.txt')
+ def test_rfc2231_single_tick_in_filename_extended(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0*=\"Frank's\"; name*1*=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, None)
+ eq(language, None)
+ eq(s, "Frank's Document")
+
+ def test_rfc2231_single_tick_in_filename(self):
+ m = """\
+Content-Type: application/x-foo; name*0=\"Frank's\"; name*1=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ param = msg.get_param('name')
+ self.failIf(isinstance(param, tuple))
+ self.assertEqual(param, "Frank's Document")
+
+ def test_rfc2231_tick_attack_extended(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0*=\"us-ascii'en-us'Frank's\"; name*1*=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, 'us-ascii')
+ eq(language, 'en-us')
+ eq(s, "Frank's Document")
+
+ def test_rfc2231_tick_attack(self):
+ m = """\
+Content-Type: application/x-foo;
+\tname*0=\"us-ascii'en-us'Frank's\"; name*1=\" Document\"
+
+"""
+ msg = email.message_from_string(m)
+ param = msg.get_param('name')
+ self.failIf(isinstance(param, tuple))
+ self.assertEqual(param, "us-ascii'en-us'Frank's Document")
+
+ def test_rfc2231_no_extended_values(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo; name=\"Frank's Document\"
+
+"""
+ msg = email.message_from_string(m)
+ eq(msg.get_param('name'), "Frank's Document")
+
+ def test_rfc2231_encoded_then_unencoded_segments(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0*=\"us-ascii'en-us'My\";
+\tname*1=\" Document\";
+\tname*2*=\" For You\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, 'us-ascii')
+ eq(language, 'en-us')
+ eq(s, 'My Document For You')
+
+ def test_rfc2231_unencoded_then_encoded_segments(self):
+ eq = self.assertEqual
+ m = """\
+Content-Type: application/x-foo;
+\tname*0=\"us-ascii'en-us'My\";
+\tname*1*=\" Document\";
+\tname*2*=\" For You\"
+
+"""
+ msg = email.message_from_string(m)
+ charset, language, s = msg.get_param('name')
+ eq(charset, 'us-ascii')
+ eq(language, 'en-us')
+ eq(s, 'My Document For You')
+
def _testclasses():
diff --git a/Lib/email/utils.py b/Lib/email/utils.py
index 250eb19..26ebb0e 100644
--- a/Lib/email/utils.py
+++ b/Lib/email/utils.py
@@ -25,6 +25,7 @@ import time
import base64
import random
import socket
+import urllib
import warnings
from cStringIO import StringIO
@@ -45,6 +46,7 @@ COMMASPACE = ', '
EMPTYSTRING = ''
UEMPTYSTRING = u''
CRLF = '\r\n'
+TICK = "'"
specialsre = re.compile(r'[][\\()<>@,:;".]')
escapesre = re.compile(r'[][\\()"]')
@@ -230,12 +232,14 @@ def unquote(str):
# RFC2231-related functions - parameter encoding and decoding
def decode_rfc2231(s):
"""Decode string according to RFC 2231"""
- import urllib
- parts = s.split("'", 2)
- if len(parts) == 1:
- return None, None, urllib.unquote(s)
- charset, language, s = parts
- return charset, language, urllib.unquote(s)
+ parts = s.split(TICK, 2)
+ if len(parts) <= 2:
+ return None, None, s
+ if len(parts) > 3:
+ charset, language = parts[:2]
+ s = TICK.join(parts[2:])
+ return charset, language, s
+ return parts
def encode_rfc2231(s, charset=None, language=None):
@@ -259,37 +263,54 @@ rfc2231_continuation = re.compile(r'^(?P<name>\w+)\*((?P<num>[0-9]+)\*?)?$')
def decode_params(params):
"""Decode parameters list according to RFC 2231.
- params is a sequence of 2-tuples containing (content type, string value).
+ params is a sequence of 2-tuples containing (param name, string value).
"""
+ # Copy params so we don't mess with the original
+ params = params[:]
new_params = []
- # maps parameter's name to a list of continuations
+ # Map parameter's name to a list of continuations. The values are a
+ # 3-tuple of the continuation number, the string value, and a flag
+ # specifying whether a particular segment is %-encoded.
rfc2231_params = {}
- # params is a sequence of 2-tuples containing (content_type, string value)
- name, value = params[0]
+ name, value = params.pop(0)
new_params.append((name, value))
- # Cycle through each of the rest of the parameters.
- for name, value in params[1:]:
+ while params:
+ name, value = params.pop(0)
+ if name.endswith('*'):
+ encoded = True
+ else:
+ encoded = False
value = unquote(value)
mo = rfc2231_continuation.match(name)
if mo:
name, num = mo.group('name', 'num')
if num is not None:
num = int(num)
- rfc2231_param1 = rfc2231_params.setdefault(name, [])
- rfc2231_param1.append((num, value))
+ rfc2231_params.setdefault(name, []).append((num, value, encoded))
else:
new_params.append((name, '"%s"' % quote(value)))
if rfc2231_params:
for name, continuations in rfc2231_params.items():
value = []
+ extended = False
# Sort by number
continuations.sort()
- # And now append all values in num order
- for num, continuation in continuations:
- value.append(continuation)
- charset, language, value = decode_rfc2231(EMPTYSTRING.join(value))
- new_params.append(
- (name, (charset, language, '"%s"' % quote(value))))
+ # And now append all values in numerical order, converting
+ # %-encodings for the encoded segments. If any of the
+ # continuation names ends in a *, then the entire string, after
+ # decoding segments and concatenating, must have the charset and
+ # language specifiers at the beginning of the string.
+ for num, s, encoded in continuations:
+ if encoded:
+ s = urllib.unquote(s)
+ extended = True
+ value.append(s)
+ value = quote(EMPTYSTRING.join(value))
+ if extended:
+ charset, language, value = decode_rfc2231(value)
+ new_params.append((name, (charset, language, '"%s"' % value)))
+ else:
+ new_params.append((name, '"%s"' % value))
return new_params
def collapse_rfc2231_value(value, errors='replace',
diff --git a/Lib/encodings/mbcs.py b/Lib/encodings/mbcs.py
index ff77fde..baf46cb 100644
--- a/Lib/encodings/mbcs.py
+++ b/Lib/encodings/mbcs.py
@@ -7,42 +7,39 @@ which was written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
+# Import them explicitly to cause an ImportError
+# on non-Windows systems
+from codecs import mbcs_encode, mbcs_decode
+# for IncrementalDecoder, IncrementalEncoder, ...
import codecs
### Codec APIs
-class Codec(codecs.Codec):
+encode = mbcs_encode
- # Note: Binding these as C functions will result in the class not
- # converting them to methods. This is intended.
- encode = codecs.mbcs_encode
- decode = codecs.mbcs_decode
+def decode(input, errors='strict'):
+ return mbcs_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
- return codecs.mbcs_encode(input,self.errors)[0]
+ return mbcs_encode(input, self.errors)[0]
-class IncrementalDecoder(codecs.IncrementalDecoder):
- def decode(self, input, final=False):
- return codecs.mbcs_decode(input,self.errors)[0]
-class StreamWriter(Codec,codecs.StreamWriter):
- pass
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ _buffer_decode = mbcs_decode
-class StreamReader(Codec,codecs.StreamReader):
- pass
+class StreamWriter(codecs.StreamWriter):
+ encode = mbcs_encode
-class StreamConverter(StreamWriter,StreamReader):
-
- encode = codecs.mbcs_decode
- decode = codecs.mbcs_encode
+class StreamReader(codecs.StreamReader):
+ decode = mbcs_decode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mbcs',
- encode=Codec.encode,
- decode=Codec.decode,
+ encode=encode,
+ decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
diff --git a/Lib/encodings/punycode.py b/Lib/encodings/punycode.py
index 2cde8b9..d97200f 100644
--- a/Lib/encodings/punycode.py
+++ b/Lib/encodings/punycode.py
@@ -214,9 +214,9 @@ class IncrementalEncoder(codecs.IncrementalEncoder):
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
- if errors not in ('strict', 'replace', 'ignore'):
- raise UnicodeError, "Unsupported error handling "+errors
- return punycode_decode(input, errors)
+ if self.errors not in ('strict', 'replace', 'ignore'):
+ raise UnicodeError, "Unsupported error handling "+self.errors
+ return punycode_decode(input, self.errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
diff --git a/Lib/encodings/utf_8_sig.py b/Lib/encodings/utf_8_sig.py
index cd14ab0..f05f6b8 100644
--- a/Lib/encodings/utf_8_sig.py
+++ b/Lib/encodings/utf_8_sig.py
@@ -30,9 +30,9 @@ class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
if self.first:
self.first = False
- return codecs.BOM_UTF8 + codecs.utf_8_encode(input, errors)[0]
+ return codecs.BOM_UTF8 + codecs.utf_8_encode(input, self.errors)[0]
else:
- return codecs.utf_8_encode(input, errors)[0]
+ return codecs.utf_8_encode(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
diff --git a/Lib/encodings/uu_codec.py b/Lib/encodings/uu_codec.py
index 0877fe1..43fb93c 100644
--- a/Lib/encodings/uu_codec.py
+++ b/Lib/encodings/uu_codec.py
@@ -102,11 +102,11 @@ class Codec(codecs.Codec):
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
- return uu_encode(input, errors)[0]
+ return uu_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
- return uu_decode(input, errors)[0]
+ return uu_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
diff --git a/Lib/gzip.py b/Lib/gzip.py
index 860accc..0bf29e8 100644
--- a/Lib/gzip.py
+++ b/Lib/gzip.py
@@ -315,7 +315,13 @@ class GzipFile:
def close(self):
if self.mode == WRITE:
self.fileobj.write(self.compress.flush())
- write32(self.fileobj, self.crc)
+ # The native zlib crc is an unsigned 32-bit integer, but
+ # the Python wrapper implicitly casts that to a signed C
+ # long. So, on a 32-bit box self.crc may "look negative",
+ # while the same crc on a 64-bit box may "look positive".
+ # To avoid irksome warnings from the `struct` module, force
+ # it to look positive on all boxes.
+ write32u(self.fileobj, LOWU32(self.crc))
# self.size may exceed 2GB, or even 4GB
write32u(self.fileobj, LOWU32(self.size))
self.fileobj = None
diff --git a/Lib/httplib.py b/Lib/httplib.py
index 36381de..5ae5efc 100644
--- a/Lib/httplib.py
+++ b/Lib/httplib.py
@@ -3,7 +3,7 @@
<intro stuff goes here>
<other stuff, too>
-HTTPConnection go through a number of "states", which defines when a client
+HTTPConnection goes through a number of "states", which define when a client
may legally make another request or fetch the response for a particular
request. This diagram details these state transitions:
@@ -926,15 +926,15 @@ class HTTPConnection:
self.__state = _CS_IDLE
if response.will_close:
- # this effectively passes the connection to the response
- self.close()
+ # Pass the socket to the response
+ self.sock = None
else:
# remember this, so we can tell when it is complete
self.__response = response
return response
-# The next several classes are used to define FakeSocket,a socket-like
+# The next several classes are used to define FakeSocket, a socket-like
# interface to an SSL connection.
# The primary complexity comes from faking a makefile() method. The
diff --git a/Lib/idlelib/Bindings.py b/Lib/idlelib/Bindings.py
index b5e90b0..d24be3f 100644
--- a/Lib/idlelib/Bindings.py
+++ b/Lib/idlelib/Bindings.py
@@ -80,6 +80,32 @@ menudefs = [
]),
]
+import sys
+if sys.platform == 'darwin' and '.app' in sys.executable:
+ # Running as a proper MacOS application bundle. This block restructures
+ # the menus a little to make them conform better to the HIG.
+
+ quitItem = menudefs[0][1][-1]
+ closeItem = menudefs[0][1][-2]
+
+ # Remove the last 3 items of the file menu: a separator, close window and
+ # quit. Close window will be reinserted just above the save item, where
+ # it should be according to the HIG. Quit is in the application menu.
+ del menudefs[0][1][-3:]
+ menudefs[0][1].insert(6, closeItem)
+
+ # Remove the 'About' entry from the help menu, it is in the application
+ # menu
+ del menudefs[-1][1][0:2]
+
+ menudefs.insert(0,
+ ('application', [
+ ('About IDLE', '<<about-idle>>'),
+ None,
+ ('_Preferences....', '<<open-config-dialog>>'),
+ ]))
+
+
default_keydefs = idleConf.GetCurrentKeySet()
del sys
diff --git a/Lib/idlelib/CREDITS.txt b/Lib/idlelib/CREDITS.txt
index 6f4e95d..e838c03 100644
--- a/Lib/idlelib/CREDITS.txt
+++ b/Lib/idlelib/CREDITS.txt
@@ -19,17 +19,18 @@ the integration of the RPC and remote debugger, implemented the threaded
subprocess, and made a number of usability enhancements.
Other contributors include Raymond Hettinger, Tony Lownds (Mac integration),
-Neal Norwitz (code check and clean-up), and Chui Tey (RPC integration, debugger
-integration and persistent breakpoints).
+Neal Norwitz (code check and clean-up), Ronald Oussoren (Mac integration),
+Noam Raphael (Code Context, Call Tips, many other patches), and Chui Tey (RPC
+integration, debugger integration and persistent breakpoints).
-Scott David Daniels, Hernan Foffani, Christos Georgiou, Martin v. Löwis,
-Jason Orendorff, Noam Raphael, Josh Robb, Nigel Rowe, Bruce Sherwood, and
-Jeff Shute have submitted useful patches. Thanks, guys!
+Scott David Daniels, Tal Einat, Hernan Foffani, Christos Georgiou,
+Martin v. Löwis, Jason Orendorff, Josh Robb, Nigel Rowe, Bruce Sherwood,
+and Jeff Shute have submitted useful patches. Thanks, guys!
For additional details refer to NEWS.txt and Changelog.
-Please contact the IDLE maintainer to have yourself included here if you
-are one of those we missed!
+Please contact the IDLE maintainer (kbk@shore.net) to have yourself included
+here if you are one of those we missed!
diff --git a/Lib/idlelib/CallTipWindow.py b/Lib/idlelib/CallTipWindow.py
index afd4439..2223885 100644
--- a/Lib/idlelib/CallTipWindow.py
+++ b/Lib/idlelib/CallTipWindow.py
@@ -49,7 +49,11 @@ class CallTip:
"""
# truncate overly long calltip
if len(text) >= 79:
- text = text[:75] + ' ...'
+ textlines = text.splitlines()
+ for i, line in enumerate(textlines):
+ if len(line) > 79:
+ textlines[i] = line[:75] + ' ...'
+ text = '\n'.join(textlines)
self.text = text
if self.tipwindow or not self.text:
return
diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py
index 47a1d55..997eb13 100644
--- a/Lib/idlelib/CallTips.py
+++ b/Lib/idlelib/CallTips.py
@@ -127,7 +127,7 @@ def get_arg_text(ob):
argText = ""
if ob is not None:
argOffset = 0
- if type(ob)==types.ClassType:
+ if type(ob) in (types.ClassType, types.TypeType):
# Look for the highest __init__ in the class chain.
fob = _find_constructor(ob)
if fob is None:
diff --git a/Lib/idlelib/CodeContext.py b/Lib/idlelib/CodeContext.py
index 5d55f77..63cc82c 100644
--- a/Lib/idlelib/CodeContext.py
+++ b/Lib/idlelib/CodeContext.py
@@ -11,11 +11,10 @@ not open blocks are not shown in the context hints pane.
"""
import Tkinter
from configHandler import idleConf
-from sets import Set
import re
from sys import maxint as INFINITY
-BLOCKOPENERS = Set(["class", "def", "elif", "else", "except", "finally", "for",
+BLOCKOPENERS = set(["class", "def", "elif", "else", "except", "finally", "for",
"if", "try", "while"])
UPDATEINTERVAL = 100 # millisec
FONTUPDATEINTERVAL = 1000 # millisec
diff --git a/Lib/idlelib/ColorDelegator.py b/Lib/idlelib/ColorDelegator.py
index f258b34..e55f9e6 100644
--- a/Lib/idlelib/ColorDelegator.py
+++ b/Lib/idlelib/ColorDelegator.py
@@ -8,28 +8,29 @@ from configHandler import idleConf
DEBUG = False
-def any(name, list):
- return "(?P<%s>" % name + "|".join(list) + ")"
+def any(name, alternates):
+ "Return a named group pattern matching list of alternates."
+ return "(?P<%s>" % name + "|".join(alternates) + ")"
def make_pat():
kw = r"\b" + any("KEYWORD", keyword.kwlist) + r"\b"
builtinlist = [str(name) for name in dir(__builtin__)
if not name.startswith('_')]
# self.file = file("file") :
- # 1st 'file' colorized normal, 2nd as builtin, 3rd as comment
- builtin = r"([^.'\"\\]\b|^)" + any("BUILTIN", builtinlist) + r"\b"
+ # 1st 'file' colorized normal, 2nd as builtin, 3rd as string
+ builtin = r"([^.'\"\\#]\b|^)" + any("BUILTIN", builtinlist) + r"\b"
comment = any("COMMENT", [r"#[^\n]*"])
- sqstring = r"(\b[rR])?'[^'\\\n]*(\\.[^'\\\n]*)*'?"
- dqstring = r'(\b[rR])?"[^"\\\n]*(\\.[^"\\\n]*)*"?'
- sq3string = r"(\b[rR])?'''[^'\\]*((\\.|'(?!''))[^'\\]*)*(''')?"
- dq3string = r'(\b[rR])?"""[^"\\]*((\\.|"(?!""))[^"\\]*)*(""")?'
+ sqstring = r"(\b[rRuU])?'[^'\\\n]*(\\.[^'\\\n]*)*'?"
+ dqstring = r'(\b[rRuU])?"[^"\\\n]*(\\.[^"\\\n]*)*"?'
+ sq3string = r"(\b[rRuU])?'''[^'\\]*((\\.|'(?!''))[^'\\]*)*(''')?"
+ dq3string = r'(\b[rRuU])?"""[^"\\]*((\\.|"(?!""))[^"\\]*)*(""")?'
string = any("STRING", [sq3string, dq3string, sqstring, dqstring])
return kw + "|" + builtin + "|" + comment + "|" + string +\
"|" + any("SYNC", [r"\n"])
prog = re.compile(make_pat(), re.S)
idprog = re.compile(r"\s+(\w+)", re.S)
-asprog = re.compile(r".*?\b(as)\b", re.S)
+asprog = re.compile(r".*?\b(as)\b")
class ColorDelegator(Delegator):
@@ -208,10 +209,15 @@ class ColorDelegator(Delegator):
head + "+%dc" % a,
head + "+%dc" % b)
elif value == "import":
- # color all the "as" words on same line;
- # cheap approximation to the truth
+ # color all the "as" words on same line, except
+ # if in a comment; cheap approximation to the
+ # truth
+ if '#' in chars:
+ endpos = chars.index('#')
+ else:
+ endpos = len(chars)
while True:
- m1 = self.asprog.match(chars, b)
+ m1 = self.asprog.match(chars, b, endpos)
if not m1:
break
a, b = m1.span(1)
diff --git a/Lib/idlelib/Debugger.py b/Lib/idlelib/Debugger.py
index 7a9d02f..f56460a 100644
--- a/Lib/idlelib/Debugger.py
+++ b/Lib/idlelib/Debugger.py
@@ -4,6 +4,7 @@ import types
from Tkinter import *
from WindowList import ListedToplevel
from ScrolledList import ScrolledList
+import macosxSupport
class Idb(bdb.Bdb):
@@ -322,7 +323,13 @@ class Debugger:
class StackViewer(ScrolledList):
def __init__(self, master, flist, gui):
- ScrolledList.__init__(self, master, width=80)
+ if macosxSupport.runningAsOSXApp():
+ # At least on with the stock AquaTk version on OSX 10.4 you'll
+ # get an shaking GUI that eventually kills IDLE if the width
+ # argument is specified.
+ ScrolledList.__init__(self, master)
+ else:
+ ScrolledList.__init__(self, master, width=80)
self.flist = flist
self.gui = gui
self.stack = []
diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py
index 59440f0..6b8ab63 100644
--- a/Lib/idlelib/EditorWindow.py
+++ b/Lib/idlelib/EditorWindow.py
@@ -17,6 +17,7 @@ import ReplaceDialog
import PyParse
from configHandler import idleConf
import aboutDialog, textView, configDialog
+import macosxSupport
# The default tab setting for a Text widget, in average-width characters.
TK_TABWIDTH_DEFAULT = 8
@@ -66,26 +67,40 @@ class EditorWindow(object):
'Python%d%d.chm' % sys.version_info[:2])
if os.path.isfile(chmfile):
dochome = chmfile
+
+ elif macosxSupport.runningAsOSXApp():
+ # documentation is stored inside the python framework
+ dochome = os.path.join(sys.prefix,
+ 'Resources/English.lproj/Documentation/index.html')
+
dochome = os.path.normpath(dochome)
if os.path.isfile(dochome):
EditorWindow.help_url = dochome
+ if sys.platform == 'darwin':
+ # Safari requires real file:-URLs
+ EditorWindow.help_url = 'file://' + EditorWindow.help_url
else:
EditorWindow.help_url = "http://www.python.org/doc/current"
currentTheme=idleConf.CurrentTheme()
self.flist = flist
root = root or flist.root
self.root = root
+ try:
+ sys.ps1
+ except AttributeError:
+ sys.ps1 = '>>> '
self.menubar = Menu(root)
self.top = top = WindowList.ListedToplevel(root, menu=self.menubar)
if flist:
self.tkinter_vars = flist.vars
#self.top.instance_dict makes flist.inversedict avalable to
#configDialog.py so it can access all EditorWindow instaces
- self.top.instance_dict=flist.inversedict
+ self.top.instance_dict = flist.inversedict
else:
self.tkinter_vars = {} # keys: Tkinter event names
# values: Tkinter variable instances
- self.recent_files_path=os.path.join(idleConf.GetUserCfgDir(),
+ self.top.instance_dict = {}
+ self.recent_files_path = os.path.join(idleConf.GetUserCfgDir(),
'recent-files.lst')
self.vbar = vbar = Scrollbar(top, name='vbar')
self.text_frame = text_frame = Frame(top)
@@ -111,6 +126,9 @@ class EditorWindow(object):
self.top.protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<<close-window>>", self.close_event)
+ if macosxSupport.runningAsOSXApp():
+ # Command-W on editorwindows doesn't work without this.
+ text.bind('<<close-window>>', self.close_event)
text.bind("<<cut>>", self.cut)
text.bind("<<copy>>", self.copy)
text.bind("<<paste>>", self.paste)
@@ -278,6 +296,10 @@ class EditorWindow(object):
def set_status_bar(self):
self.status_bar = self.MultiStatusBar(self.top)
+ if macosxSupport.runningAsOSXApp():
+ # Insert some padding to avoid obscuring some of the statusbar
+ # by the resize widget.
+ self.status_bar.set_label('_padding1', ' ', side=RIGHT)
self.status_bar.set_label('column', 'Col: ?', side=RIGHT)
self.status_bar.set_label('line', 'Ln: ?', side=RIGHT)
self.status_bar.pack(side=BOTTOM, fill=X)
@@ -301,6 +323,11 @@ class EditorWindow(object):
("help", "_Help"),
]
+ if macosxSupport.runningAsOSXApp():
+ del menu_specs[-3]
+ menu_specs[-2] = ("windows", "_Window")
+
+
def createmenubar(self):
mbar = self.menubar
self.menudict = menudict = {}
@@ -308,6 +335,12 @@ class EditorWindow(object):
underline, label = prepstr(label)
menudict[name] = menu = Menu(mbar, name=name)
mbar.add_cascade(label=label, menu=menu, underline=underline)
+
+ if sys.platform == 'darwin' and '.framework' in sys.executable:
+ # Insert the application menu
+ menudict['application'] = menu = Menu(mbar, name='apple')
+ mbar.add_cascade(label='IDLE', menu=menu)
+
self.fill_menus()
self.base_helpmenu_length = self.menudict['help'].index(END)
self.reset_help_menu_entries()
@@ -649,7 +682,7 @@ class EditorWindow(object):
def __extra_help_callback(self, helpfile):
"Create a callback with the helpfile value frozen at definition time"
def display_extra_help(helpfile=helpfile):
- if not (helpfile.startswith('www') or helpfile.startswith('http')):
+ if not helpfile.startswith(('www', 'http')):
url = os.path.normpath(helpfile)
if sys.platform[:3] == 'win':
os.startfile(helpfile)
@@ -1244,13 +1277,13 @@ class EditorWindow(object):
"Toggle tabs",
"Turn tabs " + ("on", "off")[self.usetabs] +
"?\nIndent width " +
- ("will be", "remains at")[self.usetabs] + " 8.",
+ ("will be", "remains at")[self.usetabs] + " 8." +
+ "\n Note: a tab is always 8 columns",
parent=self.text):
self.usetabs = not self.usetabs
- # Try to prevent mixed tabs/spaces.
- # User must reset indent width manually after using tabs
- # if he insists on getting into trouble.
- self.indentwidth = 8
+ # Try to prevent inconsistent indentation.
+ # User must change indent width manually after using tabs.
+ self.indentwidth = 8
return "break"
# XXX this isn't bound to anything -- see tabwidth comments
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index 25e5d40..235963e 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,3 +1,46 @@
+What's New in IDLE 1.2c1?
+=========================
+
+*Release date: XX-AUG-2006*
+
+- Changing tokenize (39046) to detect dedent broke tabnanny check (since 1.2a1)
+
+- ToggleTab dialog was setting indent to 8 even if cancelled (since 1.2a1).
+
+- When used w/o subprocess, all exceptions were preceded by an error
+ message claiming they were IDLE internal errors (since 1.2a1).
+
+What's New in IDLE 1.2b3?
+=========================
+
+*Release date: 03-AUG-2006*
+
+- EditorWindow.test() was failing. Bug 1417598
+
+- EditorWindow failed when used stand-alone if sys.ps1 not set.
+ Bug 1010370 Dave Florek
+
+- Tooltips failed on new-syle class __init__ args. Bug 1027566 Loren Guthrie
+
+- Avoid occasional failure to detect closing paren properly.
+ Patch 1407280 Tal Einat
+
+- Rebinding Tab key was inserting 'tab' instead of 'Tab'. Bug 1179168.
+
+- Colorizer now handles #<builtin> correctly, also unicode strings and
+ 'as' keyword in comment directly following import command. Closes 1325071.
+ Patch 1479219 Tal Einat
+
+What's New in IDLE 1.2b2?
+=========================
+
+*Release date: 11-JUL-2006*
+
+What's New in IDLE 1.2b1?
+=========================
+
+*Release date: 20-JUN-2006*
+
What's New in IDLE 1.2a2?
=========================
diff --git a/Lib/idlelib/ParenMatch.py b/Lib/idlelib/ParenMatch.py
index 673aee2..250ae8b 100644
--- a/Lib/idlelib/ParenMatch.py
+++ b/Lib/idlelib/ParenMatch.py
@@ -8,7 +8,7 @@ parentheses, square brackets, and curly braces.
from HyperParser import HyperParser
from configHandler import idleConf
-keysym_opener = {"parenright":'(', "bracketright":'[', "braceright":'{'}
+_openers = {')':'(',']':'[','}':'{'}
CHECK_DELAY = 100 # miliseconds
class ParenMatch:
@@ -100,12 +100,13 @@ class ParenMatch:
def paren_closed_event(self, event):
# If it was a shortcut and not really a closing paren, quit.
- if self.text.get("insert-1c") not in (')',']','}'):
+ closer = self.text.get("insert-1c")
+ if closer not in _openers:
return
hp = HyperParser(self.editwin, "insert-1c")
if not hp.is_in_code():
return
- indices = hp.get_surrounding_brackets(keysym_opener[event.keysym], True)
+ indices = hp.get_surrounding_brackets(_openers[closer], True)
if indices is None:
self.warn_mismatched()
return
diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py
index b6abe40..25eb446 100644
--- a/Lib/idlelib/PyShell.py
+++ b/Lib/idlelib/PyShell.py
@@ -11,6 +11,7 @@ import time
import threading
import traceback
import types
+import macosxSupport
import linecache
from code import InteractiveInterpreter
@@ -721,8 +722,12 @@ class ModifiedInterpreter(InteractiveInterpreter):
else:
self.showtraceback()
except:
- print>>sys.stderr, "IDLE internal error in runcode()"
+ if use_subprocess:
+ print >> self.tkconsole.stderr, \
+ "IDLE internal error in runcode()"
self.showtraceback()
+ if use_subprocess:
+ self.tkconsole.endexecuting()
finally:
if not use_subprocess:
self.tkconsole.endexecuting()
@@ -777,6 +782,11 @@ class PyShell(OutputWindow):
("help", "_Help"),
]
+ if macosxSupport.runningAsOSXApp():
+ del menu_specs[-3]
+ menu_specs[-2] = ("windows", "_Window")
+
+
# New classes
from IdleHistory import History
@@ -1300,10 +1310,6 @@ def main():
script = None
startup = False
try:
- sys.ps1
- except AttributeError:
- sys.ps1 = '>>> '
- try:
opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:")
except getopt.error, msg:
sys.stderr.write("Error: %s\n" % str(msg))
@@ -1371,9 +1377,12 @@ def main():
enable_shell = enable_shell or not edit_start
# start editor and/or shell windows:
root = Tk(className="Idle")
+
fixwordbreaks(root)
root.withdraw()
flist = PyShellFileList(root)
+ macosxSupport.setupApp(root, flist)
+
if enable_edit:
if not (cmd or script):
for filename in args:
@@ -1381,8 +1390,17 @@ def main():
if not args:
flist.new()
if enable_shell:
- if not flist.open_shell():
+ shell = flist.open_shell()
+ if not shell:
return # couldn't open shell
+
+ if macosxSupport.runningAsOSXApp() and flist.dict:
+ # On OSX: when the user has double-clicked on a file that causes
+ # IDLE to be launched the shell window will open just in front of
+ # the file she wants to see. Lower the interpreter window when
+ # there are open files.
+ shell.top.lower()
+
shell = flist.pyshell
# handle remaining options:
if debug:
@@ -1403,6 +1421,7 @@ def main():
elif script:
shell.interp.prepend_syspath(script)
shell.interp.execfile(script)
+
root.mainloop()
root.destroy()
diff --git a/Lib/idlelib/ScriptBinding.py b/Lib/idlelib/ScriptBinding.py
index 084c607..f325ad1 100644
--- a/Lib/idlelib/ScriptBinding.py
+++ b/Lib/idlelib/ScriptBinding.py
@@ -51,7 +51,7 @@ class ScriptBinding:
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
- self.root = self.flist.root
+ self.root = self.editwin.root
def check_module_event(self, event):
filename = self.getfilename()
@@ -76,6 +76,9 @@ class ScriptBinding:
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
+ except IndentationError:
+ # From tokenize(), let compile() in checksyntax find it again.
+ pass
return True
def checksyntax(self, filename):
diff --git a/Lib/idlelib/ZoomHeight.py b/Lib/idlelib/ZoomHeight.py
index 2ab4656..83ca3a6 100644
--- a/Lib/idlelib/ZoomHeight.py
+++ b/Lib/idlelib/ZoomHeight.py
@@ -2,6 +2,7 @@
import re
import sys
+import macosxSupport
class ZoomHeight:
@@ -29,6 +30,14 @@ def zoom_height(top):
if sys.platform == 'win32':
newy = 0
newheight = newheight - 72
+
+ elif macosxSupport.runningAsOSXApp():
+ # The '88' below is a magic number that avoids placing the bottom
+ # of the window below the panel on my machine. I don't know how
+ # to calculate the correct value for this with tkinter.
+ newy = 22
+ newheight = newheight - newy - 88
+
else:
#newy = 24
newy = 0
diff --git a/Lib/idlelib/buildapp.py b/Lib/idlelib/buildapp.py
deleted file mode 100644
index 672eb1e..0000000
--- a/Lib/idlelib/buildapp.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# After running python setup.py install, run this program from the command
-# line like so:
-#
-# % python2.3 buildapp.py build
-#
-# A double-clickable IDLE application will be created in the build/ directory.
-#
-
-from bundlebuilder import buildapp
-
-buildapp(
- name="IDLE",
- mainprogram="idle.py",
- argv_emulation=1,
- iconfile="Icons/idle.icns",
-)
diff --git a/Lib/idlelib/config-keys.def b/Lib/idlelib/config-keys.def
index 0653746..fb0aaf4 100644
--- a/Lib/idlelib/config-keys.def
+++ b/Lib/idlelib/config-keys.def
@@ -159,3 +159,56 @@ toggle-tabs=<Control-Key-t>
change-indentwidth=<Control-Key-u>
del-word-left=<Control-Key-BackSpace>
del-word-right=<Control-Key-Delete>
+
+[IDLE Classic OSX]
+toggle-tabs = <Control-Key-t>
+interrupt-execution = <Control-Key-c>
+untabify-region = <Control-Key-6>
+remove-selection = <Key-Escape>
+print-window = <Command-Key-p>
+replace = <Command-Key-r>
+goto-line = <Command-Key-j>
+plain-newline-and-indent = <Control-Key-j>
+history-previous = <Control-Key-p>
+beginning-of-line = <Control-Key-Left>
+end-of-line = <Control-Key-Right>
+comment-region = <Control-Key-3>
+redo = <Shift-Command-Key-Z>
+close-window = <Command-Key-w>
+restart-shell = <Control-Key-F6>
+save-window-as-file = <Command-Key-S>
+close-all-windows = <Command-Key-q>
+view-restart = <Key-F6>
+tabify-region = <Control-Key-5>
+find-again = <Command-Key-g> <Key-F3>
+find = <Command-Key-f>
+toggle-auto-coloring = <Control-Key-slash>
+select-all = <Command-Key-a>
+smart-backspace = <Key-BackSpace>
+change-indentwidth = <Control-Key-u>
+do-nothing = <Control-Key-F12>
+smart-indent = <Key-Tab>
+center-insert = <Control-Key-l>
+history-next = <Control-Key-n>
+del-word-right = <Option-Key-Delete>
+undo = <Command-Key-z>
+save-window = <Command-Key-s>
+uncomment-region = <Control-Key-4>
+cut = <Command-Key-x>
+find-in-files = <Command-Key-F3>
+dedent-region = <Command-Key-bracketleft>
+copy = <Command-Key-c>
+paste = <Command-Key-v>
+indent-region = <Command-Key-bracketright>
+del-word-left = <Option-Key-BackSpace> <Option-Command-Key-BackSpace>
+newline-and-indent = <Key-Return> <Key-KP_Enter>
+end-of-file = <Control-Key-d>
+open-class-browser = <Command-Key-b>
+open-new-window = <Command-Key-n>
+open-module = <Command-Key-m>
+find-selection = <Shift-Command-Key-F3>
+python-context-help = <Shift-Key-F1>
+save-copy-of-window-as-file = <Shift-Command-Key-s>
+open-window-from-file = <Command-Key-o>
+python-docs = <Key-F1>
+
diff --git a/Lib/idlelib/configHandler.py b/Lib/idlelib/configHandler.py
index 191a87c..826fb5d 100644
--- a/Lib/idlelib/configHandler.py
+++ b/Lib/idlelib/configHandler.py
@@ -20,6 +20,7 @@ configuration problem notification and resolution.
import os
import sys
import string
+import macosxSupport
from ConfigParser import ConfigParser, NoOptionError, NoSectionError
class InvalidConfigType(Exception): pass
@@ -406,7 +407,7 @@ class IdleConf:
names=extnNameList
kbNameIndicies=[]
for name in names:
- if name.endswith('_bindings') or name.endswith('_cfgBindings'):
+ if name.endswith(('_bindings', '_cfgBindings')):
kbNameIndicies.append(names.index(name))
kbNameIndicies.sort()
kbNameIndicies.reverse()
@@ -495,7 +496,18 @@ class IdleConf:
return binding
def GetCurrentKeySet(self):
- return self.GetKeySet(self.CurrentKeys())
+ result = self.GetKeySet(self.CurrentKeys())
+
+ if macosxSupport.runningAsOSXApp():
+ # We're using AquaTk, replace all keybingings that use the
+ # Alt key by ones that use the Option key because the former
+ # don't work reliably.
+ for k, v in result.items():
+ v2 = [ x.replace('<Alt-', '<Option-') for x in v ]
+ if v != v2:
+ result[k] = v2
+
+ return result
def GetKeySet(self,keySetName):
"""
diff --git a/Lib/idlelib/configHelpSourceEdit.py b/Lib/idlelib/configHelpSourceEdit.py
index 8924f79..6611621 100644
--- a/Lib/idlelib/configHelpSourceEdit.py
+++ b/Lib/idlelib/configHelpSourceEdit.py
@@ -127,7 +127,7 @@ class GetHelpSourceDialog(Toplevel):
parent=self)
self.entryPath.focus_set()
pathOk = False
- elif path.startswith('www.') or path.startswith('http'):
+ elif path.startswith(('www.', 'http')):
pass
else:
if path[:5] == 'file:':
@@ -146,8 +146,7 @@ class GetHelpSourceDialog(Toplevel):
self.path.get().strip())
if sys.platform == 'darwin':
path = self.result[1]
- if (path.startswith('www') or path.startswith('file:')
- or path.startswith('http:')):
+ if path.startswith(('www', 'file:', 'http:')):
pass
else:
# Mac Safari insists on using the URI form for local files
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index b7deb3f..07d3d82 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "1.2a2"
+IDLE_VERSION = "1.2b3"
diff --git a/Lib/idlelib/keybindingDialog.py b/Lib/idlelib/keybindingDialog.py
index ea57958..aff9cac 100644
--- a/Lib/idlelib/keybindingDialog.py
+++ b/Lib/idlelib/keybindingDialog.py
@@ -133,7 +133,7 @@ class GetKeysDialog(Toplevel):
config-keys.def must use the same ordering.
"""
import sys
- if sys.platform == 'darwin' and sys.executable.count('.app'):
+ if sys.platform == 'darwin' and sys.argv[0].count('.app'):
self.modifiers = ['Shift', 'Control', 'Option', 'Command']
else:
self.modifiers = ['Control', 'Alt', 'Shift']
@@ -202,7 +202,7 @@ class GetKeysDialog(Toplevel):
':':'colon',',':'comma','.':'period','<':'less','>':'greater',
'/':'slash','?':'question','Page Up':'Prior','Page Down':'Next',
'Left Arrow':'Left','Right Arrow':'Right','Up Arrow':'Up',
- 'Down Arrow': 'Down', 'Tab':'tab'}
+ 'Down Arrow': 'Down', 'Tab':'Tab'}
if key in translateDict.keys():
key = translateDict[key]
if 'Shift' in modifiers and key in string.ascii_lowercase:
diff --git a/Lib/idlelib/macosxSupport.py b/Lib/idlelib/macosxSupport.py
new file mode 100644
index 0000000..ad61fff
--- /dev/null
+++ b/Lib/idlelib/macosxSupport.py
@@ -0,0 +1,112 @@
+"""
+A number of function that enhance IDLE on MacOSX when it used as a normal
+GUI application (as opposed to an X11 application).
+"""
+import sys
+
+def runningAsOSXApp():
+ """ Returns True iff running from the IDLE.app bundle on OSX """
+ return (sys.platform == 'darwin' and 'IDLE.app' in sys.argv[0])
+
+def addOpenEventSupport(root, flist):
+ """
+ This ensures that the application will respont to open AppleEvents, which
+ makes is feaseable to use IDLE as the default application for python files.
+ """
+ def doOpenFile(*args):
+ for fn in args:
+ flist.open(fn)
+
+ # The command below is a hook in aquatk that is called whenever the app
+ # receives a file open event. The callback can have multiple arguments,
+ # one for every file that should be opened.
+ root.createcommand("::tk::mac::OpenDocument", doOpenFile)
+
+def hideTkConsole(root):
+ root.tk.call('console', 'hide')
+
+def overrideRootMenu(root, flist):
+ """
+ Replace the Tk root menu by something that's more appropriate for
+ IDLE.
+ """
+ # The menu that is attached to the Tk root (".") is also used by AquaTk for
+ # all windows that don't specify a menu of their own. The default menubar
+ # contains a number of menus, none of which are appropriate for IDLE. The
+ # Most annoying of those is an 'About Tck/Tk...' menu in the application
+ # menu.
+ #
+ # This function replaces the default menubar by a mostly empty one, it
+ # should only contain the correct application menu and the window menu.
+ #
+ # Due to a (mis-)feature of TkAqua the user will also see an empty Help
+ # menu.
+ from Tkinter import Menu, Text, Text
+ from EditorWindow import prepstr, get_accelerator
+ import Bindings
+ import WindowList
+ from MultiCall import MultiCallCreator
+
+ menubar = Menu(root)
+ root.configure(menu=menubar)
+ menudict = {}
+
+ menudict['windows'] = menu = Menu(menubar, name='windows')
+ menubar.add_cascade(label='Window', menu=menu, underline=0)
+
+ def postwindowsmenu(menu=menu):
+ end = menu.index('end')
+ if end is None:
+ end = -1
+
+ if end > 0:
+ menu.delete(0, end)
+ WindowList.add_windows_to_menu(menu)
+ WindowList.register_callback(postwindowsmenu)
+
+ menudict['application'] = menu = Menu(menubar, name='apple')
+ menubar.add_cascade(label='IDLE', menu=menu)
+
+ def about_dialog(event=None):
+ import aboutDialog
+ aboutDialog.AboutDialog(root, 'About IDLE')
+
+ def config_dialog(event=None):
+ import configDialog
+ configDialog.ConfigDialog(root, 'Settings')
+
+ root.bind('<<about-idle>>', about_dialog)
+ root.bind('<<open-config-dialog>>', config_dialog)
+ if flist:
+ root.bind('<<close-all-windows>>', flist.close_all_callback)
+
+ for mname, entrylist in Bindings.menudefs:
+ menu = menudict.get(mname)
+ if not menu:
+ continue
+ for entry in entrylist:
+ if not entry:
+ menu.add_separator()
+ else:
+ label, eventname = entry
+ underline, label = prepstr(label)
+ accelerator = get_accelerator(Bindings.default_keydefs,
+ eventname)
+ def command(text=root, eventname=eventname):
+ text.event_generate(eventname)
+ menu.add_command(label=label, underline=underline,
+ command=command, accelerator=accelerator)
+
+
+
+
+
+def setupApp(root, flist):
+ """
+ Perform setup for the OSX application bundle.
+ """
+ if not runningAsOSXApp(): return
+
+ hideTkConsole(root)
+ overrideRootMenu(root, flist)
+ addOpenEventSupport(root, flist)
diff --git a/Lib/inspect.py b/Lib/inspect.py
index bf7f006..0b498b5 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -89,6 +89,40 @@ def isdatadescriptor(object):
is not guaranteed."""
return (hasattr(object, "__set__") and hasattr(object, "__get__"))
+if hasattr(types, 'MemberDescriptorType'):
+ # CPython and equivalent
+ def ismemberdescriptor(object):
+ """Return true if the object is a member descriptor.
+
+ Member descriptors are specialized descriptors defined in extension
+ modules."""
+ return isinstance(object, types.MemberDescriptorType)
+else:
+ # Other implementations
+ def ismemberdescriptor(object):
+ """Return true if the object is a member descriptor.
+
+ Member descriptors are specialized descriptors defined in extension
+ modules."""
+ return False
+
+if hasattr(types, 'GetSetDescriptorType'):
+ # CPython and equivalent
+ def isgetsetdescriptor(object):
+ """Return true if the object is a getset descriptor.
+
+ getset descriptors are specialized descriptors defined in extension
+ modules."""
+ return isinstance(object, types.GetSetDescriptorType)
+else:
+ # Other implementations
+ def isgetsetdescriptor(object):
+ """Return true if the object is a getset descriptor.
+
+ getset descriptors are specialized descriptors defined in extension
+ modules."""
+ return False
+
def isfunction(object):
"""Return true if the object is a user-defined function.
@@ -355,40 +389,38 @@ def getsourcefile(object):
return None
if os.path.exists(filename):
return filename
- # Ugly but necessary - '<stdin>' and '<string>' mean that getmodule()
- # would infinitely recurse, because they're not real files nor loadable
- # Note that this means that writing a PEP 302 loader that uses '<'
- # at the start of a filename is now not a good idea. :(
- if filename[:1]!='<' and hasattr(getmodule(object), '__loader__'):
+ # only return a non-existent filename if the module has a PEP 302 loader
+ if hasattr(getmodule(object, filename), '__loader__'):
return filename
-def getabsfile(object):
+def getabsfile(object, _filename=None):
"""Return an absolute path to the source or compiled file for an object.
The idea is for each object to have a unique origin, so this routine
normalizes the result as much as possible."""
- return os.path.normcase(
- os.path.abspath(getsourcefile(object) or getfile(object)))
+ if _filename is None:
+ _filename = getsourcefile(object) or getfile(object)
+ return os.path.normcase(os.path.abspath(_filename))
modulesbyfile = {}
-def getmodule(object):
+def getmodule(object, _filename=None):
"""Return the module an object was defined in, or None if not found."""
if ismodule(object):
return object
if hasattr(object, '__module__'):
return sys.modules.get(object.__module__)
try:
- file = getabsfile(object)
+ file = getabsfile(object, _filename)
except TypeError:
return None
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
for module in sys.modules.values():
if ismodule(module) and hasattr(module, '__file__'):
- modulesbyfile[
- os.path.realpath(
- getabsfile(module))] = module.__name__
+ f = getabsfile(module)
+ modulesbyfile[f] = modulesbyfile[
+ os.path.realpath(f)] = module.__name__
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
main = sys.modules['__main__']
diff --git a/Lib/lib-tk/Tkinter.py b/Lib/lib-tk/Tkinter.py
index 0ba954e..b248031 100644
--- a/Lib/lib-tk/Tkinter.py
+++ b/Lib/lib-tk/Tkinter.py
@@ -168,18 +168,30 @@ class Variable:
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
- def __init__(self, master=None):
- """Construct a variable with an optional MASTER as master widget.
- The variable is named PY_VAR_number in Tcl.
+ def __init__(self, master=None, value=None, name=None):
+ """Construct a variable
+
+ MASTER can be given as master widget.
+ VALUE is an optional value (defaults to "")
+ NAME is an optional Tcl name (defaults to PY_VARnum).
+
+ If NAME matches an existing variable and VALUE is omitted
+ then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
- self._name = 'PY_VAR' + repr(_varnum)
- _varnum = _varnum + 1
- self.set(self._default)
+ if name:
+ self._name = name
+ else:
+ self._name = 'PY_VAR' + repr(_varnum)
+ _varnum += 1
+ if value != None:
+ self.set(value)
+ elif not self._tk.call("info", "exists", self._name):
+ self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
self._tk.globalunsetvar(self._name)
@@ -217,15 +229,29 @@ class Variable:
"""Return all trace callback information."""
return map(self._tk.split, self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name)))
+ def __eq__(self, other):
+ """Comparison for equality (==).
+
+ Note: if the Variable's master matters to behavior
+ also compare self._master == other._master
+ """
+ return self.__class__.__name__ == other.__class__.__name__ \
+ and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
- def __init__(self, master=None):
+ def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
- MASTER can be given as master widget."""
- Variable.__init__(self, master)
+ MASTER can be given as master widget.
+ VALUE is an optional value (defaults to "")
+ NAME is an optional Tcl name (defaults to PY_VARnum).
+
+ If NAME matches an existing variable and VALUE is omitted
+ then the existing value is retained.
+ """
+ Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
@@ -237,11 +263,17 @@ class StringVar(Variable):
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
- def __init__(self, master=None):
+ def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
- MASTER can be given as master widget."""
- Variable.__init__(self, master)
+ MASTER can be given as master widget.
+ VALUE is an optional value (defaults to 0)
+ NAME is an optional Tcl name (defaults to PY_VARnum).
+
+ If NAME matches an existing variable and VALUE is omitted
+ then the existing value is retained.
+ """
+ Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
@@ -256,11 +288,17 @@ class IntVar(Variable):
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
- def __init__(self, master=None):
+ def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
- MASTER can be given as a master widget."""
- Variable.__init__(self, master)
+ MASTER can be given as master widget.
+ VALUE is an optional value (defaults to 0.0)
+ NAME is an optional Tcl name (defaults to PY_VARnum).
+
+ If NAME matches an existing variable and VALUE is omitted
+ then the existing value is retained.
+ """
+ Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
@@ -268,12 +306,18 @@ class DoubleVar(Variable):
class BooleanVar(Variable):
"""Value holder for boolean variables."""
- _default = "false"
- def __init__(self, master=None):
+ _default = False
+ def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
- MASTER can be given as a master widget."""
- Variable.__init__(self, master)
+ MASTER can be given as master widget.
+ VALUE is an optional value (defaults to False)
+ NAME is an optional Tcl name (defaults to PY_VARnum).
+
+ If NAME matches an existing variable and VALUE is omitted
+ then the existing value is retained.
+ """
+ Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
@@ -1456,10 +1500,19 @@ class Wm:
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
- def wm_iconbitmap(self, bitmap=None):
+ def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
- the bitmap if None is given."""
- return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
+ the bitmap if None is given.
+
+ Under Windows, the DEFAULT parameter can be used to set the icon
+ for the widget and any descendents that don't have an icon set
+ explicitly. DEFAULT can be the relative path to a .ico file
+ (example: root.iconbitmap(default='myicon.ico') ). See Tk
+ documentation for more information."""
+ if default:
+ return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
+ else:
+ return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
@@ -1880,9 +1933,9 @@ class BaseWidget(Misc):
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in self.children.values(): c.destroy()
+ self.tk.call('destroy', self._w)
if self.master.children.has_key(self._name):
del self.master.children[self._name]
- self.tk.call('destroy', self._w)
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
diff --git a/Lib/lib-tk/tkMessageBox.py b/Lib/lib-tk/tkMessageBox.py
index 25071fe..aff069b 100644
--- a/Lib/lib-tk/tkMessageBox.py
+++ b/Lib/lib-tk/tkMessageBox.py
@@ -63,9 +63,10 @@ class Message(Dialog):
#
# convenience stuff
-def _show(title=None, message=None, icon=None, type=None, **options):
- if icon: options["icon"] = icon
- if type: options["type"] = type
+# Rename _icon and _type options to allow overriding them in options
+def _show(title=None, message=None, _icon=None, _type=None, **options):
+ if _icon and "icon" not in options: options["icon"] = _icon
+ if _type and "type" not in options: options["type"] = _type
if title: options["title"] = title
if message: options["message"] = message
res = Message(**options).show()
diff --git a/Lib/lib-tk/turtle.py b/Lib/lib-tk/turtle.py
index d68e405..01a55b1 100644
--- a/Lib/lib-tk/turtle.py
+++ b/Lib/lib-tk/turtle.py
@@ -30,6 +30,7 @@ class RawPen:
self._tracing = 1
self._arrow = 0
self._delay = 10 # default delay for drawing
+ self._angle = 0.0
self.degrees()
self.reset()
@@ -39,6 +40,10 @@ class RawPen:
Example:
>>> turtle.degrees()
"""
+ # Don't try to change _angle if it is 0, because
+ # _fullcircle might not be set, yet
+ if self._angle:
+ self._angle = (self._angle / self._fullcircle) * fullcircle
self._fullcircle = fullcircle
self._invradian = pi / (fullcircle * 0.5)
@@ -81,7 +86,6 @@ class RawPen:
self._color = "black"
self._filling = 0
self._path = []
- self._tofill = []
self.clear()
canvas._root().tkraise()
@@ -301,19 +305,15 @@ class RawPen:
{'fill': self._color,
'smooth': smooth})
self._items.append(item)
- if self._tofill:
- for item in self._tofill:
- self._canvas.itemconfigure(item, fill=self._color)
- self._items.append(item)
self._path = []
- self._tofill = []
self._filling = flag
if flag:
self._path.append(self._position)
- self.forward(0)
def begin_fill(self):
""" Called just before drawing a shape to be filled.
+ Must eventually be followed by a corresponding end_fill() call.
+ Otherwise it will be ignored.
Example:
>>> turtle.begin_fill()
@@ -326,7 +326,8 @@ class RawPen:
>>> turtle.forward(100)
>>> turtle.end_fill()
"""
- self.fill(1)
+ self._path = [self._position]
+ self._filling = 1
def end_fill(self):
""" Called after drawing a shape to be filled.
@@ -344,7 +345,7 @@ class RawPen:
"""
self.fill(0)
- def circle(self, radius, extent=None):
+ def circle(self, radius, extent = None):
""" Draw a circle with given radius.
The center is radius units left of the turtle; extent
determines which part of the circle is drawn. If not given,
@@ -361,52 +362,18 @@ class RawPen:
"""
if extent is None:
extent = self._fullcircle
- x0, y0 = self._position
- xc = x0 - radius * sin(self._angle * self._invradian)
- yc = y0 - radius * cos(self._angle * self._invradian)
- if radius >= 0.0:
- start = self._angle - (self._fullcircle / 4.0)
- else:
- start = self._angle + (self._fullcircle / 4.0)
- extent = -extent
- if self._filling:
- if abs(extent) >= self._fullcircle:
- item = self._canvas.create_oval(xc-radius, yc-radius,
- xc+radius, yc+radius,
- width=self._width,
- outline="")
- self._tofill.append(item)
- item = self._canvas.create_arc(xc-radius, yc-radius,
- xc+radius, yc+radius,
- style="chord",
- start=start,
- extent=extent,
- width=self._width,
- outline="")
- self._tofill.append(item)
- if self._drawing:
- if abs(extent) >= self._fullcircle:
- item = self._canvas.create_oval(xc-radius, yc-radius,
- xc+radius, yc+radius,
- width=self._width,
- outline=self._color)
- self._items.append(item)
- item = self._canvas.create_arc(xc-radius, yc-radius,
- xc+radius, yc+radius,
- style="arc",
- start=start,
- extent=extent,
- width=self._width,
- outline=self._color)
- self._items.append(item)
- angle = start + extent
- x1 = xc + abs(radius) * cos(angle * self._invradian)
- y1 = yc - abs(radius) * sin(angle * self._invradian)
- self._angle = (self._angle + extent) % self._fullcircle
- self._position = x1, y1
- if self._filling:
- self._path.append(self._position)
- self._draw_turtle()
+ frac = abs(extent)/self._fullcircle
+ steps = 1+int(min(11+abs(radius)/6.0, 59.0)*frac)
+ w = 1.0 * extent / steps
+ w2 = 0.5 * w
+ l = 2.0 * radius * sin(w2*self._invradian)
+ if radius < 0:
+ l, w, w2 = -l, -w, -w2
+ self.left(w2)
+ for i in range(steps):
+ self.forward(l)
+ self.left(w)
+ self.right(w2)
def heading(self):
""" Return the turtle's current heading.
@@ -634,6 +601,7 @@ class RawPen:
def _draw_turtle(self, position=[]):
if not self._tracing:
+ self._canvas.update()
return
if position == []:
position = self._position
@@ -678,7 +646,7 @@ class Pen(RawPen):
_canvas = Tkinter.Canvas(_root, background="white")
_canvas.pack(expand=1, fill="both")
- setup(width=_width, height= _height, startx=_startx, starty=_starty)
+ setup(width=_width, height= _height, startx=_startx, starty=_starty)
RawPen.__init__(self, _canvas)
@@ -720,7 +688,7 @@ def color(*args): _getpen().color(*args)
def write(arg, move=0): _getpen().write(arg, move)
def fill(flag): _getpen().fill(flag)
def begin_fill(): _getpen().begin_fill()
-def end_fill(): _getpen.end_fill()
+def end_fill(): _getpen().end_fill()
def circle(radius, extent=None): _getpen().circle(radius, extent)
def goto(*args): _getpen().goto(*args)
def heading(): return _getpen().heading()
@@ -745,7 +713,7 @@ for methodname in dir(RawPen):
def setup(**geometry):
""" Sets the size and position of the main window.
- Keywords are width, height, startx and starty
+ Keywords are width, height, startx and starty:
width: either a size in pixels or a fraction of the screen.
Default is 50% of screen.
@@ -820,7 +788,7 @@ def setup(**geometry):
_root.geometry("%dx%d+%d+%d" % (_width, _height, _startx, _starty))
def title(title):
- """ set the window title.
+ """Set the window title.
By default this is set to 'Turtle Graphics'
@@ -929,15 +897,30 @@ def demo2():
speed(speeds[sp])
color(0.25,0,0.75)
fill(0)
- color("green")
- left(130)
+ # draw and fill a concave shape
+ left(120)
up()
- forward(90)
+ forward(70)
+ right(30)
+ down()
color("red")
- speed('fastest')
+ speed("fastest")
+ fill(1)
+ for i in range(4):
+ circle(50,90)
+ right(90)
+ forward(30)
+ right(90)
+ color("yellow")
+ fill(0)
+ left(90)
+ up()
+ forward(30)
down();
+ color("red")
+
# create a second turtle and make the original pursue and catch it
turtle=Turtle()
turtle.reset()
diff --git a/Lib/linecache.py b/Lib/linecache.py
index f49695a..4838625 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -94,6 +94,10 @@ def updatecache(filename, module_globals=None):
except (ImportError, IOError):
pass
else:
+ if data is None:
+ # No luck, the PEP302 loader cannot find the source
+ # for this module.
+ return []
cache[filename] = (
len(data), None,
[line+'\n' for line in data.splitlines()], fullname
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 457ec5c..1d5f8c4 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -79,6 +79,7 @@ def fileConfig(fname, defaults=None):
logging._acquireLock()
try:
logging._handlers.clear()
+ logging._handlerList = []
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers)
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index e0da254..3552950 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -128,12 +128,7 @@ class RotatingFileHandler(BaseRotatingHandler):
dfn = self.baseFilename + ".1"
if os.path.exists(dfn):
os.remove(dfn)
- try:
- os.rename(self.baseFilename, dfn)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- self.handleError(record)
+ os.rename(self.baseFilename, dfn)
#print "%s -> %s" % (self.baseFilename, dfn)
if self.encoding:
self.stream = codecs.open(self.baseFilename, 'w', self.encoding)
@@ -273,12 +268,7 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
if os.path.exists(dfn):
os.remove(dfn)
- try:
- os.rename(self.baseFilename, dfn)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- self.handleError(record)
+ os.rename(self.baseFilename, dfn)
if self.backupCount > 0:
# find the oldest log file and delete it
s = glob.glob(self.baseFilename + ".20*")
@@ -572,6 +562,18 @@ class SysLogHandler(logging.Handler):
"local7": LOG_LOCAL7,
}
+ #The map below appears to be trivially lowercasing the key. However,
+ #there's more to it than meets the eye - in some locales, lowercasing
+ #gives unexpected results. See SF #1524081: in the Turkish locale,
+ #"INFO".lower() != "info"
+ priority_map = {
+ "DEBUG" : "debug",
+ "INFO" : "info",
+ "WARNING" : "warning",
+ "ERROR" : "error",
+ "CRITICAL" : "critical"
+ }
+
def __init__(self, address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER):
"""
Initialize a handler.
@@ -608,7 +610,7 @@ class SysLogHandler(logging.Handler):
# necessary.
log_format_string = '<%d>%s\000'
- def encodePriority (self, facility, priority):
+ def encodePriority(self, facility, priority):
"""
Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
@@ -629,6 +631,16 @@ class SysLogHandler(logging.Handler):
self.socket.close()
logging.Handler.close(self)
+ def mapPriority(self, levelName):
+ """
+ Map a logging level name to a key in the priority_names map.
+ This is useful in two scenarios: when custom levels are being
+ used, and in the case where you can't do a straightforward
+ mapping by lowercasing the logging level name because of locale-
+ specific issues (see SF #1524081).
+ """
+ return self.priority_map.get(levelName, "warning")
+
def emit(self, record):
"""
Emit a record.
@@ -643,8 +655,8 @@ class SysLogHandler(logging.Handler):
"""
msg = self.log_format_string % (
self.encodePriority(self.facility,
- string.lower(record.levelname)),
- msg)
+ self.mapPriority(record.levelname)),
+ msg)
try:
if self.unixsocket:
try:
diff --git a/Lib/mailbox.py b/Lib/mailbox.py
index bb115e1..b72128b 100755
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -15,7 +15,10 @@ import email.Generator
import rfc822
import StringIO
try:
- import fnctl
+ if sys.platform == 'os2emx':
+ # OS/2 EMX fcntl() not adequate
+ raise ImportError
+ import fcntl
except ImportError:
fcntl = None
@@ -565,7 +568,8 @@ class _singlefileMailbox(Mailbox):
try:
os.rename(new_file.name, self._path)
except OSError, e:
- if e.errno == errno.EEXIST:
+ if e.errno == errno.EEXIST or \
+ (os.name == 'os2' and e.errno == errno.EACCES):
os.remove(self._path)
os.rename(new_file.name, self._path)
else:
@@ -1030,6 +1034,9 @@ class MH(Mailbox):
if hasattr(os, 'link'):
os.link(os.path.join(self._path, str(key)),
os.path.join(self._path, str(prev + 1)))
+ if sys.platform == 'os2emx':
+ # cannot unlink an open file on OS/2
+ f.close()
os.unlink(os.path.join(self._path, str(key)))
else:
f.close()
@@ -1798,26 +1805,18 @@ class _PartialFile(_ProxyFile):
def _lock_file(f, dotlock=True):
- """Lock file f using lockf, flock, and dot locking."""
+ """Lock file f using lockf and dot locking."""
dotlock_done = False
try:
if fcntl:
try:
fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError, e:
- if e.errno == errno.EAGAIN:
+ if e.errno in (errno.EAGAIN, errno.EACCES):
raise ExternalClashError('lockf: lock unavailable: %s' %
f.name)
else:
raise
- try:
- fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- except IOError, e:
- if e.errno == errno.EWOULDBLOCK:
- raise ExternalClashError('flock: lock unavailable: %s' %
- f.name)
- else:
- raise
if dotlock:
try:
pre_lock = _create_temporary(f.name + '.lock')
@@ -1836,7 +1835,8 @@ def _lock_file(f, dotlock=True):
os.rename(pre_lock.name, f.name + '.lock')
dotlock_done = True
except OSError, e:
- if e.errno == errno.EEXIST:
+ if e.errno == errno.EEXIST or \
+ (os.name == 'os2' and e.errno == errno.EACCES):
os.remove(pre_lock.name)
raise ExternalClashError('dot lock unavailable: %s' %
f.name)
@@ -1845,16 +1845,14 @@ def _lock_file(f, dotlock=True):
except:
if fcntl:
fcntl.lockf(f, fcntl.LOCK_UN)
- fcntl.flock(f, fcntl.LOCK_UN)
if dotlock_done:
os.remove(f.name + '.lock')
raise
def _unlock_file(f):
- """Unlock file f using lockf, flock, and dot locking."""
+ """Unlock file f using lockf and dot locking."""
if fcntl:
fcntl.lockf(f, fcntl.LOCK_UN)
- fcntl.flock(f, fcntl.LOCK_UN)
if os.path.exists(f.name + '.lock'):
os.remove(f.name + '.lock')
diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py
index bee2ff7..b0d2f18 100644
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -33,6 +33,10 @@ __all__ = [
knownfiles = [
"/etc/mime.types",
+ "/etc/httpd/mime.types", # Mac OS X
+ "/etc/httpd/conf/mime.types", # Apache
+ "/etc/apache/mime.types", # Apache 1
+ "/etc/apache2/mime.types", # Apache 2
"/usr/local/etc/httpd/conf/mime.types",
"/usr/local/lib/netscape/mime.types",
"/usr/local/etc/httpd/conf/mime.types", # Apache 1.2
diff --git a/Lib/msilib/__init__.py b/Lib/msilib/__init__.py
index 0881409..4be82b0 100644
--- a/Lib/msilib/__init__.py
+++ b/Lib/msilib/__init__.py
@@ -187,7 +187,7 @@ class CAB:
self.filenames = sets.Set()
self.index = 0
- def gen_id(self, dir, file):
+ def gen_id(self, file):
logical = _logical = make_id(file)
pos = 1
while logical in self.filenames:
@@ -196,9 +196,11 @@ class CAB:
self.filenames.add(logical)
return logical
- def append(self, full, logical):
+ def append(self, full, file, logical):
if os.path.isdir(full):
return
+ if not logical:
+ logical = self.gen_id(file)
self.index += 1
self.files.append((full, logical))
return self.index, logical
@@ -328,7 +330,7 @@ class Directory:
logical = self.keyfiles[file]
else:
logical = None
- sequence, logical = self.cab.append(absolute, logical)
+ sequence, logical = self.cab.append(absolute, file, logical)
assert logical not in self.ids
self.ids.add(logical)
short = self.make_short(file)
@@ -403,7 +405,7 @@ class Control:
[(self.dlg.name, self.name, event, argument,
condition, ordering)])
- def mapping(self, mapping, attribute):
+ def mapping(self, event, attribute):
add_data(self.dlg.db, "EventMapping",
[(self.dlg.name, self.name, event, attribute)])
diff --git a/Lib/optparse.py b/Lib/optparse.py
index 6b8f5d1..62d2f7e 100644
--- a/Lib/optparse.py
+++ b/Lib/optparse.py
@@ -16,7 +16,7 @@ For support, use the optik-users@lists.sourceforge.net mailing list
# Python developers: please do not make changes to this file, since
# it is automatically generated from the Optik source code.
-__version__ = "1.5.1"
+__version__ = "1.5.3"
__all__ = ['Option',
'SUPPRESS_HELP',
@@ -75,9 +75,9 @@ def _repr(self):
# This file was generated from:
-# Id: option_parser.py 509 2006-04-20 00:58:24Z gward
-# Id: option.py 509 2006-04-20 00:58:24Z gward
-# Id: help.py 509 2006-04-20 00:58:24Z gward
+# Id: option_parser.py 527 2006-07-23 15:21:30Z greg
+# Id: option.py 522 2006-06-11 16:22:03Z gward
+# Id: help.py 527 2006-07-23 15:21:30Z greg
# Id: errors.py 509 2006-04-20 00:58:24Z gward
try:
@@ -1629,6 +1629,13 @@ class OptionParser (OptionContainer):
result.append(self.format_epilog(formatter))
return "".join(result)
+ # used by test suite
+ def _get_encoding(self, file):
+ encoding = getattr(file, "encoding", None)
+ if not encoding:
+ encoding = sys.getdefaultencoding()
+ return encoding
+
def print_help(self, file=None):
"""print_help(file : file = stdout)
@@ -1637,7 +1644,8 @@ class OptionParser (OptionContainer):
"""
if file is None:
file = sys.stdout
- file.write(self.format_help())
+ encoding = self._get_encoding(file)
+ file.write(self.format_help().encode(encoding, "replace"))
# class OptionParser
diff --git a/Lib/os.py b/Lib/os.py
index 31002ac..2d1b29b 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -723,7 +723,7 @@ if not _exists("urandom"):
"""
try:
_urandomfd = open("/dev/urandom", O_RDONLY)
- except:
+ except (OSError, IOError):
raise NotImplementedError("/dev/urandom (or equivalent) not found")
bytes = ""
while len(bytes) < n:
diff --git a/Lib/pdb.py b/Lib/pdb.py
index 94f61f7..06181e7 100755
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -235,7 +235,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
"""Interpret the argument as though it had been typed in response
to the prompt.
- Checks wether this line is typed in the normal prompt or in a breakpoint command list definition
+ Checks whether this line is typed at the normal prompt or in
+ a breakpoint command list definition.
"""
if not self.commands_defining:
return cmd.Cmd.onecmd(self, line)
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index 26c797f..37738e4 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -69,7 +69,33 @@ def simplegeneric(func):
def walk_packages(path=None, prefix='', onerror=None):
- """Yield submodule names+loaders recursively, for path or sys.path"""
+ """Yields (module_loader, name, ispkg) for all modules recursively
+ on path, or, if path is None, all accessible modules.
+
+ 'path' should be either None or a list of paths to look for
+ modules in.
+
+ 'prefix' is a string to output on the front of every module name
+ on output.
+
+ Note that this function must import all *packages* (NOT all
+ modules!) on the given path, in order to access the __path__
+ attribute to find submodules.
+
+ 'onerror' is a function which gets called with one argument (the
+ name of the package which was being imported) if any exception
+ occurs while trying to import a package. If no onerror function is
+ supplied, ImportErrors are caught and ignored, while all other
+ exceptions are propagated, terminating the search.
+
+ Examples:
+
+ # list all modules python can access
+ walk_packages()
+
+ # list all submodules of ctypes
+ walk_packages(ctypes.__path__, ctypes.__name__+'.')
+ """
def seen(p, m={}):
if p in m:
@@ -84,19 +110,33 @@ def walk_packages(path=None, prefix='', onerror=None):
__import__(name)
except ImportError:
if onerror is not None:
- onerror()
+ onerror(name)
+ except Exception:
+ if onerror is not None:
+ onerror(name)
+ else:
+ raise
else:
path = getattr(sys.modules[name], '__path__', None) or []
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
- for item in walk_packages(path, name+'.'):
+ for item in walk_packages(path, name+'.', onerror):
yield item
def iter_modules(path=None, prefix=''):
- """Yield submodule names+loaders for path or sys.path"""
+ """Yields (module_loader, name, ispkg) for all submodules on path,
+ or, if path is None, all top-level modules on sys.path.
+
+ 'path' should be either None or a list of paths to look for
+ modules in.
+
+ 'prefix' is a string to output on the front of every module name
+ on output.
+ """
+
if path is None:
importers = iter_importers()
else:
@@ -208,6 +248,7 @@ class ImpLoader:
def _reopen(self):
if self.file and self.file.closed:
+ mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self.file = open(self.filename, 'rU')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
@@ -340,9 +381,7 @@ def get_importer(path_item):
importer = None
sys.path_importer_cache.setdefault(path_item, importer)
- # The boolean values are used for caching valid and invalid
- # file paths for the built-in import machinery
- if importer in (None, True, False):
+ if importer is None:
try:
importer = ImpImporter(path_item)
except ImportError:
diff --git a/Lib/popen2.py b/Lib/popen2.py
index b966d4c..694979e 100644
--- a/Lib/popen2.py
+++ b/Lib/popen2.py
@@ -72,14 +72,14 @@ class Popen3:
# In case the child hasn't been waited on, check if it's done.
self.poll(_deadstate=sys.maxint)
if self.sts < 0:
- if _active:
+ if _active is not None:
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
def _run_child(self, cmd):
if isinstance(cmd, basestring):
cmd = ['/bin/sh', '-c', cmd]
- for i in range(3, MAXFD):
+ for i in xrange(3, MAXFD):
try:
os.close(i)
except OSError:
diff --git a/Lib/pstats.py b/Lib/pstats.py
index c3a8828..4e94b0c 100644
--- a/Lib/pstats.py
+++ b/Lib/pstats.py
@@ -548,8 +548,10 @@ if __name__ == '__main__':
self.prompt = "% "
if profile is not None:
self.stats = Stats(profile)
+ self.stream = self.stats.stream
else:
self.stats = None
+ self.stream = sys.stdout
def generic(self, fn, line):
args = line.split()
@@ -667,14 +669,15 @@ if __name__ == '__main__':
return None
import sys
- print >> self.stream, "Welcome to the profile statistics browser."
if len(sys.argv) > 1:
initprofile = sys.argv[1]
else:
initprofile = None
try:
- ProfileBrowser(initprofile).cmdloop()
- print >> self.stream, "Goodbye."
+ browser = ProfileBrowser(initprofile)
+ print >> browser.stream, "Welcome to the profile statistics browser."
+ browser.cmdloop()
+ print >> browser.stream, "Goodbye."
except KeyboardInterrupt:
pass
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index cf38630..29c6cc4 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -318,6 +318,8 @@ class Doc:
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
+ if inspect.isgetsetdescriptor(object): return self.docdata(*args)
+ if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
@@ -333,7 +335,7 @@ class Doc:
name and ' ' + repr(name), type(object).__name__)
raise TypeError, message
- docmodule = docclass = docroutine = docother = fail
+ docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
@@ -915,6 +917,10 @@ class HTMLDoc(Doc):
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
+ def docdata(self, object, name=None, mod=None, cl=None):
+ """Produce html documentation for a data descriptor."""
+ return self._docdescriptor(name, object, mod)
+
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
@@ -1268,6 +1274,10 @@ class TextDoc(Doc):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
+ def docdata(self, object, name=None, mod=None, cl=None):
+ """Produce text documentation for a data descriptor."""
+ return self._docdescriptor(name, object, mod)
+
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
@@ -1397,6 +1407,14 @@ def describe(thing):
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
+ if inspect.isgetsetdescriptor(thing):
+ return 'getset descriptor %s.%s.%s' % (
+ thing.__objclass__.__module__, thing.__objclass__.__name__,
+ thing.__name__)
+ if inspect.ismemberdescriptor(thing):
+ return 'member descriptor %s.%s.%s' % (
+ thing.__objclass__.__module__, thing.__objclass__.__name__,
+ thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
@@ -1453,6 +1471,8 @@ def doc(thing, title='Python Library Documentation: %s', forceload=0):
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
+ inspect.isgetsetdescriptor(object) or
+ inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
diff --git a/Lib/random.py b/Lib/random.py
index 465f477..ae2d434 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -29,13 +29,12 @@
General notes on the underlying Mersenne Twister core generator:
* The period is 2**19937-1.
-* It is one of the most extensively tested generators in existence
-* Without a direct way to compute N steps forward, the
- semantics of jumpahead(n) are weakened to simply jump
- to another distant state and rely on the large period
- to avoid overlapping sequences.
-* The random() method is implemented in C, executes in
- a single Python step, and is, therefore, threadsafe.
+* It is one of the most extensively tested generators in existence.
+* Without a direct way to compute N steps forward, the semantics of
+ jumpahead(n) are weakened to simply jump to another distant state and rely
+ on the large period to avoid overlapping sequences.
+* The random() method is implemented in C, executes in a single Python step,
+ and is, therefore, threadsafe.
"""
@@ -253,11 +252,6 @@ class Random(_random.Random):
Optional arg random is a 0-argument function returning a random
float in [0.0, 1.0); by default, the standard random.random.
-
- Note that for even rather small len(x), the total number of
- permutations of x is larger than the period of most random number
- generators; this implies that "most" permutations of a long
- sequence can never be generated.
"""
if random is None:
diff --git a/Lib/sgmllib.py b/Lib/sgmllib.py
index 3e85a91..3020d11 100644
--- a/Lib/sgmllib.py
+++ b/Lib/sgmllib.py
@@ -29,11 +29,16 @@ starttagopen = re.compile('<[>a-zA-Z]')
shorttagopen = re.compile('<[a-zA-Z][-.a-zA-Z0-9]*/')
shorttag = re.compile('<([a-zA-Z][-.a-zA-Z0-9]*)/([^/]*)/')
piclose = re.compile('>')
-endbracket = re.compile('[<>]')
+starttag = re.compile(r'<[a-zA-Z][-_.:a-zA-Z0-9]*\s*('
+ r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
+ r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]'
+ r'[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*(?=[\s>/<])))?'
+ r')*\s*/?\s*(?=[<>])')
+endtag = re.compile(r'</?[a-zA-Z][-_.:a-zA-Z0-9]*\s*/?\s*(?=[<>])')
tagfind = re.compile('[a-zA-Z][-_.a-zA-Z0-9]*')
attrfind = re.compile(
r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
- r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*))?')
+ r'(\'[^\']*\'|"[^"]*"|[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*))?')
class SGMLParseError(RuntimeError):
@@ -53,6 +58,10 @@ class SGMLParseError(RuntimeError):
# self.handle_entityref() with the entity reference as argument.
class SGMLParser(markupbase.ParserBase):
+ # Definition of entities -- derived classes may override
+ entity_or_charref = re.compile('&(?:'
+ '([a-zA-Z][-.a-zA-Z0-9]*)|#([0-9]+)'
+ ')(;?)')
def __init__(self, verbose=0):
"""Initialize and reset this instance."""
@@ -245,11 +254,10 @@ class SGMLParser(markupbase.ParserBase):
self.finish_shorttag(tag, data)
self.__starttag_text = rawdata[start_pos:match.end(1) + 1]
return k
- # XXX The following should skip matching quotes (' or ")
- match = endbracket.search(rawdata, i+1)
+ match = starttag.match(rawdata, i)
if not match:
return -1
- j = match.start(0)
+ j = match.end(0)
# Now parse the data between i+1 and j into a tag and attrs
attrs = []
if rawdata[i:i+2] == '<>':
@@ -274,32 +282,8 @@ class SGMLParser(markupbase.ParserBase):
attrvalue[:1] == '"' == attrvalue[-1:]):
# strip quotes
attrvalue = attrvalue[1:-1]
- l = 0
- new_attrvalue = ''
- while l < len(attrvalue):
- av_match = entityref.match(attrvalue, l)
- if (av_match and av_match.group(1) in self.entitydefs and
- attrvalue[av_match.end(1)] == ';'):
- # only substitute entityrefs ending in ';' since
- # otherwise we may break <a href='?p=x&q=y'>
- # which is very common
- new_attrvalue += self.entitydefs[av_match.group(1)]
- l = av_match.end(0)
- continue
- ch_match = charref.match(attrvalue, l)
- if ch_match:
- try:
- char = chr(int(ch_match.group(1)))
- new_attrvalue += char
- l = ch_match.end(0)
- continue
- except ValueError:
- # invalid character reference, don't substitute
- pass
- # all other cases
- new_attrvalue += attrvalue[l]
- l += 1
- attrvalue = new_attrvalue
+ attrvalue = self.entity_or_charref.sub(
+ self._convert_ref, attrvalue)
attrs.append((attrname.lower(), attrvalue))
k = match.end(0)
if rawdata[j] == '>':
@@ -308,13 +292,24 @@ class SGMLParser(markupbase.ParserBase):
self.finish_starttag(tag, attrs)
return j
+ # Internal -- convert entity or character reference
+ def _convert_ref(self, match):
+ if match.group(2):
+ return self.convert_charref(match.group(2)) or \
+ '&#%s%s' % match.groups()[1:]
+ elif match.group(3):
+ return self.convert_entityref(match.group(1)) or \
+ '&%s;' % match.group(1)
+ else:
+ return '&%s' % match.group(1)
+
# Internal -- parse endtag
def parse_endtag(self, i):
rawdata = self.rawdata
- match = endbracket.search(rawdata, i+1)
+ match = endtag.match(rawdata, i)
if not match:
return -1
- j = match.start(0)
+ j = match.end(0)
tag = rawdata[i+2:j].strip().lower()
if rawdata[j] == '>':
j = j+1
@@ -391,35 +386,51 @@ class SGMLParser(markupbase.ParserBase):
print '*** Unbalanced </' + tag + '>'
print '*** Stack:', self.stack
- def handle_charref(self, name):
- """Handle character reference, no need to override."""
+ def convert_charref(self, name):
+ """Convert character reference, may be overridden."""
try:
n = int(name)
except ValueError:
- self.unknown_charref(name)
return
if not 0 <= n <= 255:
- self.unknown_charref(name)
return
- self.handle_data(chr(n))
+ return self.convert_codepoint(n)
+
+ def convert_codepoint(self, codepoint):
+ return chr(codepoint)
+
+ def handle_charref(self, name):
+ """Handle character reference, no need to override."""
+ replacement = self.convert_charref(name)
+ if replacement is None:
+ self.unknown_charref(name)
+ else:
+ self.handle_data(replacement)
# Definition of entities -- derived classes may override
entitydefs = \
{'lt': '<', 'gt': '>', 'amp': '&', 'quot': '"', 'apos': '\''}
- def handle_entityref(self, name):
- """Handle entity references.
+ def convert_entityref(self, name):
+ """Convert entity references.
- There should be no need to override this method; it can be
- tailored by setting up the self.entitydefs mapping appropriately.
+ As an alternative to overriding this method; one can tailor the
+ results by setting up the self.entitydefs mapping appropriately.
"""
table = self.entitydefs
if name in table:
- self.handle_data(table[name])
+ return table[name]
else:
- self.unknown_entityref(name)
return
+ def handle_entityref(self, name):
+ """Handle entity references, no need to override."""
+ replacement = self.convert_entityref(name)
+ if replacement is None:
+ self.unknown_entityref(name)
+ else:
+ self.handle_data(self.convert_entityref(name))
+
# Example -- handle data, should be overridden
def handle_data(self, data):
pass
diff --git a/Lib/shelve.py b/Lib/shelve.py
index 4959c26..7a75445 100644
--- a/Lib/shelve.py
+++ b/Lib/shelve.py
@@ -139,6 +139,9 @@ class Shelf(UserDict.DictMixin):
self.dict = 0
def __del__(self):
+ if not hasattr(self, 'writeback'):
+ # __init__ didn't succeed, so don't bother closing
+ return
self.close()
def sync(self):
diff --git a/Lib/shutil.py b/Lib/shutil.py
index c50184c..c3ff687 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -127,7 +127,13 @@ def copytree(src, dst, symlinks=False):
# continue with other files
except Error, err:
errors.extend(err.args[0])
- copystat(src, dst)
+ try:
+ copystat(src, dst)
+ except WindowsError:
+ # can't copy file access times on Windows
+ pass
+ except OSError, why:
+ errors.extend((src, dst, str(why)))
if errors:
raise Error, errors
diff --git a/Lib/site.py b/Lib/site.py
index 47eda24..01086b7 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -11,10 +11,11 @@ import, this is no longer necessary (but code that does it still
works).
This will append site-specific paths to the module search path. On
-Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
-appends lib/python<version>/site-packages as well as lib/site-python.
-On other platforms (mainly Mac and Windows), it uses just sys.prefix
-(and sys.exec_prefix, if different, but this is unlikely). The
+Unix (including Mac OSX), it starts with sys.prefix and
+sys.exec_prefix (if different) and appends
+lib/python<version>/site-packages as well as lib/site-python.
+On other platforms (such as Windows), it tries each of the
+prefixes directly, as well as with lib/site-packages appended. The
resulting directories, if they exist, are appended to sys.path, and
also inspected for path configuration files.
diff --git a/Lib/socket.py b/Lib/socket.py
index fa0e663..52fb8e3 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -130,35 +130,40 @@ _socketmethods = (
if sys.platform == "riscos":
_socketmethods = _socketmethods + ('sleeptaskw',)
+# All the method names that must be delegated to either the real socket
+# object or the _closedsocket object.
+_delegate_methods = ("recv", "recvfrom", "recv_into", "recvfrom_into",
+ "send", "sendto")
+
class _closedsocket(object):
__slots__ = []
def _dummy(*args):
raise error(EBADF, 'Bad file descriptor')
- send = recv = sendto = recvfrom = __getattr__ = _dummy
+ def close(self):
+ pass
+ # All _delegate_methods must also be initialized here.
+ send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy
+ __getattr__ = _dummy
class _socketobject(object):
__doc__ = _realsocket.__doc__
- __slots__ = ["_sock",
- "recv", "recv_into", "recvfrom_into",
- "send", "sendto", "recvfrom",
- "__weakref__"]
+ __slots__ = ["_sock", "__weakref__"] + list(_delegate_methods)
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
if _sock is None:
_sock = _realsocket(family, type, proto)
self._sock = _sock
- self.send = self._sock.send
- self.recv = self._sock.recv
- self.recv_into = self._sock.recv_into
- self.sendto = self._sock.sendto
- self.recvfrom = self._sock.recvfrom
- self.recvfrom_into = self._sock.recvfrom_into
+ for method in _delegate_methods:
+ setattr(self, method, getattr(_sock, method))
def close(self):
+ self._sock.close()
self._sock = _closedsocket()
- self.send = self.recv = self.sendto = self.recvfrom = self._sock._dummy
+ dummy = self._sock._dummy
+ for method in _delegate_methods:
+ setattr(self, method, dummy)
close.__doc__ = _realsocket.close.__doc__
def accept(self):
diff --git a/Lib/sqlite3/test/hooks.py b/Lib/sqlite3/test/hooks.py
index b10b3ef..761bdaa 100644
--- a/Lib/sqlite3/test/hooks.py
+++ b/Lib/sqlite3/test/hooks.py
@@ -48,6 +48,8 @@ class CollationTests(unittest.TestCase):
pass
def CheckCollationIsUsed(self):
+ if sqlite.version_info < (3, 2, 1): # old SQLite versions crash on this test
+ return
def mycoll(x, y):
# reverse order
return -cmp(x, y)
diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py
index 25e4b63..c8733b9 100644
--- a/Lib/sqlite3/test/regression.py
+++ b/Lib/sqlite3/test/regression.py
@@ -61,6 +61,14 @@ class RegressionTests(unittest.TestCase):
con.rollback()
+ def CheckColumnNameWithSpaces(self):
+ cur = self.con.cursor()
+ cur.execute('select 1 as "foo bar [datetime]"')
+ self.failUnlessEqual(cur.description[0][0], "foo bar")
+
+ cur.execute('select 1 as "foo baz"')
+ self.failUnlessEqual(cur.description[0][0], "foo baz")
+
def suite():
regression_suite = unittest.makeSuite(RegressionTests, "Check")
return unittest.TestSuite((regression_suite,))
diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py
index e49f7dd..8da5722 100644
--- a/Lib/sqlite3/test/types.py
+++ b/Lib/sqlite3/test/types.py
@@ -21,7 +21,7 @@
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
-import datetime
+import bz2, datetime
import unittest
import sqlite3 as sqlite
@@ -101,16 +101,16 @@ class DeclTypesTests(unittest.TestCase):
self.cur.execute("create table test(i int, s str, f float, b bool, u unicode, foo foo, bin blob)")
# override float, make them always return the same number
- sqlite.converters["float"] = lambda x: 47.2
+ sqlite.converters["FLOAT"] = lambda x: 47.2
# and implement two custom ones
- sqlite.converters["bool"] = lambda x: bool(int(x))
- sqlite.converters["foo"] = DeclTypesTests.Foo
+ sqlite.converters["BOOL"] = lambda x: bool(int(x))
+ sqlite.converters["FOO"] = DeclTypesTests.Foo
def tearDown(self):
- del sqlite.converters["float"]
- del sqlite.converters["bool"]
- del sqlite.converters["foo"]
+ del sqlite.converters["FLOAT"]
+ del sqlite.converters["BOOL"]
+ del sqlite.converters["FOO"]
self.cur.close()
self.con.close()
@@ -208,14 +208,14 @@ class ColNamesTests(unittest.TestCase):
self.cur = self.con.cursor()
self.cur.execute("create table test(x foo)")
- sqlite.converters["foo"] = lambda x: "[%s]" % x
- sqlite.converters["bar"] = lambda x: "<%s>" % x
- sqlite.converters["exc"] = lambda x: 5/0
+ sqlite.converters["FOO"] = lambda x: "[%s]" % x
+ sqlite.converters["BAR"] = lambda x: "<%s>" % x
+ sqlite.converters["EXC"] = lambda x: 5/0
def tearDown(self):
- del sqlite.converters["foo"]
- del sqlite.converters["bar"]
- del sqlite.converters["exc"]
+ del sqlite.converters["FOO"]
+ del sqlite.converters["BAR"]
+ del sqlite.converters["EXC"]
self.cur.close()
self.con.close()
@@ -231,12 +231,6 @@ class ColNamesTests(unittest.TestCase):
val = self.cur.fetchone()[0]
self.failUnlessEqual(val, None)
- def CheckExc(self):
- # Exceptions in type converters result in returned Nones
- self.cur.execute('select 5 as "x [exc]"')
- val = self.cur.fetchone()[0]
- self.failUnlessEqual(val, None)
-
def CheckColName(self):
self.cur.execute("insert into test(x) values (?)", ("xxx",))
self.cur.execute('select x as "x [bar]" from test')
@@ -279,6 +273,23 @@ class ObjectAdaptationTests(unittest.TestCase):
val = self.cur.fetchone()[0]
self.failUnlessEqual(type(val), float)
+class BinaryConverterTests(unittest.TestCase):
+ def convert(s):
+ return bz2.decompress(s)
+ convert = staticmethod(convert)
+
+ def setUp(self):
+ self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES)
+ sqlite.register_converter("bin", BinaryConverterTests.convert)
+
+ def tearDown(self):
+ self.con.close()
+
+ def CheckBinaryInputForConverter(self):
+ testdata = "abcdefg" * 10
+ result = self.con.execute('select ? as "x [bin]"', (buffer(bz2.compress(testdata)),)).fetchone()[0]
+ self.failUnlessEqual(testdata, result)
+
class DateTimeTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES)
@@ -328,8 +339,9 @@ def suite():
decltypes_type_suite = unittest.makeSuite(DeclTypesTests, "Check")
colnames_type_suite = unittest.makeSuite(ColNamesTests, "Check")
adaptation_suite = unittest.makeSuite(ObjectAdaptationTests, "Check")
+ bin_suite = unittest.makeSuite(BinaryConverterTests, "Check")
date_suite = unittest.makeSuite(DateTimeTests, "Check")
- return unittest.TestSuite((sqlite_type_suite, decltypes_type_suite, colnames_type_suite, adaptation_suite, date_suite))
+ return unittest.TestSuite((sqlite_type_suite, decltypes_type_suite, colnames_type_suite, adaptation_suite, bin_suite, date_suite))
def test():
runner = unittest.TextTestRunner()
diff --git a/Lib/sqlite3/test/userfunctions.py b/Lib/sqlite3/test/userfunctions.py
index 78656e7..31bf289 100644
--- a/Lib/sqlite3/test/userfunctions.py
+++ b/Lib/sqlite3/test/userfunctions.py
@@ -55,6 +55,9 @@ class AggrNoStep:
def __init__(self):
pass
+ def finalize(self):
+ return 1
+
class AggrNoFinalize:
def __init__(self):
pass
@@ -144,9 +147,12 @@ class FunctionTests(unittest.TestCase):
def CheckFuncRefCount(self):
def getfunc():
def f():
- return val
+ return 1
return f
- self.con.create_function("reftest", 0, getfunc())
+ f = getfunc()
+ globals()["foo"] = f
+ # self.con.create_function("reftest", 0, getfunc())
+ self.con.create_function("reftest", 0, f)
cur = self.con.cursor()
cur.execute("select reftest()")
@@ -195,9 +201,12 @@ class FunctionTests(unittest.TestCase):
def CheckFuncException(self):
cur = self.con.cursor()
- cur.execute("select raiseexception()")
- val = cur.fetchone()[0]
- self.failUnlessEqual(val, None)
+ try:
+ cur.execute("select raiseexception()")
+ cur.fetchone()
+ self.fail("should have raised OperationalError")
+ except sqlite.OperationalError, e:
+ self.failUnlessEqual(e.args[0], 'user-defined function raised exception')
def CheckParamString(self):
cur = self.con.cursor()
@@ -267,31 +276,47 @@ class AggregateTests(unittest.TestCase):
def CheckAggrNoStep(self):
cur = self.con.cursor()
- cur.execute("select nostep(t) from test")
+ try:
+ cur.execute("select nostep(t) from test")
+ self.fail("should have raised an AttributeError")
+ except AttributeError, e:
+ self.failUnlessEqual(e.args[0], "AggrNoStep instance has no attribute 'step'")
def CheckAggrNoFinalize(self):
cur = self.con.cursor()
- cur.execute("select nofinalize(t) from test")
- val = cur.fetchone()[0]
- self.failUnlessEqual(val, None)
+ try:
+ cur.execute("select nofinalize(t) from test")
+ val = cur.fetchone()[0]
+ self.fail("should have raised an OperationalError")
+ except sqlite.OperationalError, e:
+ self.failUnlessEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
def CheckAggrExceptionInInit(self):
cur = self.con.cursor()
- cur.execute("select excInit(t) from test")
- val = cur.fetchone()[0]
- self.failUnlessEqual(val, None)
+ try:
+ cur.execute("select excInit(t) from test")
+ val = cur.fetchone()[0]
+ self.fail("should have raised an OperationalError")
+ except sqlite.OperationalError, e:
+ self.failUnlessEqual(e.args[0], "user-defined aggregate's '__init__' method raised error")
def CheckAggrExceptionInStep(self):
cur = self.con.cursor()
- cur.execute("select excStep(t) from test")
- val = cur.fetchone()[0]
- self.failUnlessEqual(val, 42)
+ try:
+ cur.execute("select excStep(t) from test")
+ val = cur.fetchone()[0]
+ self.fail("should have raised an OperationalError")
+ except sqlite.OperationalError, e:
+ self.failUnlessEqual(e.args[0], "user-defined aggregate's 'step' method raised error")
def CheckAggrExceptionInFinalize(self):
cur = self.con.cursor()
- cur.execute("select excFinalize(t) from test")
- val = cur.fetchone()[0]
- self.failUnlessEqual(val, None)
+ try:
+ cur.execute("select excFinalize(t) from test")
+ val = cur.fetchone()[0]
+ self.fail("should have raised an OperationalError")
+ except sqlite.OperationalError, e:
+ self.failUnlessEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
def CheckAggrCheckParamStr(self):
cur = self.con.cursor()
@@ -331,10 +356,54 @@ class AggregateTests(unittest.TestCase):
val = cur.fetchone()[0]
self.failUnlessEqual(val, 60)
+def authorizer_cb(action, arg1, arg2, dbname, source):
+ if action != sqlite.SQLITE_SELECT:
+ return sqlite.SQLITE_DENY
+ if arg2 == 'c2' or arg1 == 't2':
+ return sqlite.SQLITE_DENY
+ return sqlite.SQLITE_OK
+
+class AuthorizerTests(unittest.TestCase):
+ def setUp(self):
+ self.con = sqlite.connect(":memory:")
+ self.con.executescript("""
+ create table t1 (c1, c2);
+ create table t2 (c1, c2);
+ insert into t1 (c1, c2) values (1, 2);
+ insert into t2 (c1, c2) values (4, 5);
+ """)
+
+ # For our security test:
+ self.con.execute("select c2 from t2")
+
+ self.con.set_authorizer(authorizer_cb)
+
+ def tearDown(self):
+ pass
+
+ def CheckTableAccess(self):
+ try:
+ self.con.execute("select * from t2")
+ except sqlite.DatabaseError, e:
+ if not e.args[0].endswith("prohibited"):
+ self.fail("wrong exception text: %s" % e.args[0])
+ return
+ self.fail("should have raised an exception due to missing privileges")
+
+ def CheckColumnAccess(self):
+ try:
+ self.con.execute("select c2 from t1")
+ except sqlite.DatabaseError, e:
+ if not e.args[0].endswith("prohibited"):
+ self.fail("wrong exception text: %s" % e.args[0])
+ return
+ self.fail("should have raised an exception due to missing privileges")
+
def suite():
function_suite = unittest.makeSuite(FunctionTests, "Check")
aggregate_suite = unittest.makeSuite(AggregateTests, "Check")
- return unittest.TestSuite((function_suite, aggregate_suite))
+ authorizer_suite = unittest.makeSuite(AuthorizerTests, "Check")
+ return unittest.TestSuite((function_suite, aggregate_suite, authorizer_suite))
def test():
runner = unittest.TextTestRunner()
diff --git a/Lib/string.py b/Lib/string.py
index ba85a49..a5837e9 100644
--- a/Lib/string.py
+++ b/Lib/string.py
@@ -161,7 +161,7 @@ class Template:
val = mapping[named]
# We use this idiom instead of str() because the latter will
# fail if val is a Unicode containing non-ASCII characters.
- return '%s' % val
+ return '%s' % (val,)
if mo.group('escaped') is not None:
return self.delimiter
if mo.group('invalid') is not None:
@@ -186,13 +186,13 @@ class Template:
try:
# We use this idiom instead of str() because the latter
# will fail if val is a Unicode containing non-ASCII
- return '%s' % mapping[named]
+ return '%s' % (mapping[named],)
except KeyError:
return self.delimiter + named
braced = mo.group('braced')
if braced is not None:
try:
- return '%s' % mapping[braced]
+ return '%s' % (mapping[braced],)
except KeyError:
return self.delimiter + '{' + braced + '}'
if mo.group('escaped') is not None:
diff --git a/Lib/struct.py b/Lib/struct.py
index 9113e71..07c21bf 100644
--- a/Lib/struct.py
+++ b/Lib/struct.py
@@ -64,7 +64,7 @@ def pack(fmt, *args):
def pack_into(fmt, buf, offset, *args):
"""
- Pack the values v2, v2, ... according to fmt, write
+ Pack the values v1, v2, ... according to fmt, write
the packed bytes into the writable buffer buf starting at offset.
See struct.__doc__ for more on format strings.
"""
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index a6af7e7..0d19129 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -121,7 +121,7 @@ check_call(*popenargs, **kwargs):
Run command with arguments. Wait for command to complete. If the
exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
- return code in the errno attribute.
+ return code in the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
@@ -141,8 +141,8 @@ should prepare for OSErrors.
A ValueError will be raised if Popen is called with invalid arguments.
-check_call() will raise CalledProcessError, which is a subclass of
-OSError, if the called process returns a non-zero return code.
+check_call() will raise CalledProcessError, if the called process
+returns a non-zero return code.
Security
@@ -234,7 +234,7 @@ Replacing os.system()
sts = os.system("mycmd" + " myarg")
==>
p = Popen("mycmd" + " myarg", shell=True)
-sts = os.waitpid(p.pid, 0)
+pid, sts = os.waitpid(p.pid, 0)
Note:
@@ -360,11 +360,16 @@ import types
import traceback
# Exception classes used by this module.
-class CalledProcessError(OSError):
+class CalledProcessError(Exception):
"""This exception is raised when a process run by check_call() returns
a non-zero exit status. The exit status will be stored in the
- errno attribute. This exception is a subclass of
- OSError."""
+ returncode attribute."""
+ def __init__(self, returncode, cmd):
+ self.returncode = returncode
+ self.cmd = cmd
+ def __str__(self):
+ return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
+
if mswindows:
import threading
@@ -442,7 +447,7 @@ def check_call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete. If
the exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
- return code in the errno attribute.
+ return code in the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
@@ -453,7 +458,7 @@ def check_call(*popenargs, **kwargs):
if cmd is None:
cmd = popenargs[0]
if retcode:
- raise CalledProcessError(retcode, "Command %s returned non-zero exit status" % cmd)
+ raise CalledProcessError(retcode, cmd)
return retcode
@@ -613,7 +618,7 @@ class Popen(object):
return
# In case the child hasn't been waited on, check if it's done.
self.poll(_deadstate=sys.maxint)
- if self.returncode is None:
+ if self.returncode is None and _active is not None:
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
@@ -941,7 +946,7 @@ class Popen(object):
def _close_fds(self, but):
- for i in range(3, MAXFD):
+ for i in xrange(3, MAXFD):
if i == but:
continue
try:
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 061d0f5..c185fbd 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -417,7 +417,13 @@ class _Stream:
self.fileobj.write(self.buf)
self.buf = ""
if self.comptype == "gz":
- self.fileobj.write(struct.pack("<l", self.crc))
+ # The native zlib crc is an unsigned 32-bit integer, but
+ # the Python wrapper implicitly casts that to a signed C
+ # long. So, on a 32-bit box self.crc may "look negative",
+ # while the same crc on a 64-bit box may "look positive".
+ # To avoid irksome warnings from the `struct` module, force
+ # it to look positive on all boxes.
+ self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
if not self._extfileobj:
@@ -1750,13 +1756,6 @@ class TarFile(object):
try:
tarinfo = TarInfo.frombuf(buf)
- # We shouldn't rely on this checksum, because some tar programs
- # calculate it differently and it is merely validating the
- # header block. We could just as well skip this part, which would
- # have a slight effect on performance...
- if tarinfo.chksum not in calc_chksums(buf):
- self._dbg(1, "tarfile: Bad Checksum %r" % tarinfo.name)
-
# Set the TarInfo object's offset to the current position of the
# TarFile and set self.offset to the position where the data blocks
# should begin.
diff --git a/Lib/telnetlib.py b/Lib/telnetlib.py
index 3523037..a13e85c 100644
--- a/Lib/telnetlib.py
+++ b/Lib/telnetlib.py
@@ -311,6 +311,8 @@ class Telnet:
s_args = s_reply
if timeout is not None:
s_args = s_args + (timeout,)
+ from time import time
+ time_start = time()
while not self.eof and select.select(*s_args) == s_reply:
i = max(0, len(self.cookedq)-n)
self.fill_rawq()
@@ -321,6 +323,11 @@ class Telnet:
buf = self.cookedq[:i]
self.cookedq = self.cookedq[i:]
return buf
+ if timeout is not None:
+ elapsed = time() - time_start
+ if elapsed >= timeout:
+ break
+ s_args = s_reply + (timeout-elapsed,)
return self.read_very_lazy()
def read_all(self):
@@ -601,6 +608,9 @@ class Telnet:
if not hasattr(list[i], "search"):
if not re: import re
list[i] = re.compile(list[i])
+ if timeout is not None:
+ from time import time
+ time_start = time()
while 1:
self.process_rawq()
for i in indices:
@@ -613,7 +623,11 @@ class Telnet:
if self.eof:
break
if timeout is not None:
- r, w, x = select.select([self.fileno()], [], [], timeout)
+ elapsed = time() - time_start
+ if elapsed >= timeout:
+ break
+ s_args = ([self.fileno()], [], [], timeout-elapsed)
+ r, w, x = select.select(*s_args)
if not r:
break
self.fill_rawq()
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
index dd7e864..2e8cd6d 100644
--- a/Lib/tempfile.py
+++ b/Lib/tempfile.py
@@ -446,7 +446,7 @@ else:
prefix=template, dir=None):
"""Create and return a temporary file.
Arguments:
- 'prefix', 'suffix', 'directory' -- as for mkstemp.
+ 'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to os.fdopen (default "w+b").
'bufsize' -- the buffer size argument to os.fdopen (default -1).
The file is created as mkstemp() would do it.
diff --git a/Lib/test/crashers/bogus_code_obj.py b/Lib/test/crashers/bogus_code_obj.py
new file mode 100644
index 0000000..613ae51
--- /dev/null
+++ b/Lib/test/crashers/bogus_code_obj.py
@@ -0,0 +1,19 @@
+"""
+Broken bytecode objects can easily crash the interpreter.
+
+This is not going to be fixed. It is generally agreed that there is no
+point in writing a bytecode verifier and putting it in CPython just for
+this. Moreover, a verifier is bound to accept only a subset of all safe
+bytecodes, so it could lead to unnecessary breakage.
+
+For security purposes, "restricted" interpreters are not going to let
+the user build or load random bytecodes anyway. Otherwise, this is a
+"won't fix" case.
+
+"""
+
+import types
+
+co = types.CodeType(0, 0, 0, 0, '\x04\x71\x00\x00', (),
+ (), (), '', '', 1, '')
+exec co
diff --git a/Lib/test/crashers/borrowed_ref_1.py b/Lib/test/crashers/borrowed_ref_1.py
new file mode 100644
index 0000000..d16ede2
--- /dev/null
+++ b/Lib/test/crashers/borrowed_ref_1.py
@@ -0,0 +1,29 @@
+"""
+_PyType_Lookup() returns a borrowed reference.
+This attacks the call in dictobject.c.
+"""
+
+class A(object):
+ pass
+
+class B(object):
+ def __del__(self):
+ print 'hi'
+ del D.__missing__
+
+class D(dict):
+ class __missing__:
+ def __init__(self, *args):
+ pass
+
+
+d = D()
+a = A()
+a.cycle = a
+a.other = B()
+del a
+
+prev = None
+while 1:
+ d[5]
+ prev = (prev,)
diff --git a/Lib/test/crashers/borrowed_ref_2.py b/Lib/test/crashers/borrowed_ref_2.py
new file mode 100644
index 0000000..1a7b3ff
--- /dev/null
+++ b/Lib/test/crashers/borrowed_ref_2.py
@@ -0,0 +1,38 @@
+"""
+_PyType_Lookup() returns a borrowed reference.
+This attacks PyObject_GenericSetAttr().
+
+NB. on my machine this crashes in 2.5 debug but not release.
+"""
+
+class A(object):
+ pass
+
+class B(object):
+ def __del__(self):
+ print "hi"
+ del C.d
+
+class D(object):
+ def __set__(self, obj, value):
+ self.hello = 42
+
+class C(object):
+ d = D()
+
+ def g():
+ pass
+
+
+c = C()
+a = A()
+a.cycle = a
+a.other = B()
+
+lst = [None] * 1000000
+i = 0
+del a
+while 1:
+ c.d = 42 # segfaults in PyMethod_New(im_func=D.__set__, im_self=d)
+ lst[i] = c.g # consume the free list of instancemethod objects
+ i += 1
diff --git a/Lib/test/crashers/coerce.py b/Lib/test/crashers/coerce.py
deleted file mode 100644
index 574956b..0000000
--- a/Lib/test/crashers/coerce.py
+++ /dev/null
@@ -1,9 +0,0 @@
-
-# http://python.org/sf/992017
-
-class foo:
- def __coerce__(self, other):
- return other, self
-
-if __name__ == '__main__':
- foo()+1 # segfault: infinite recursion in C
diff --git a/Lib/test/crashers/gc_inspection.py b/Lib/test/crashers/gc_inspection.py
new file mode 100644
index 0000000..10caa79
--- /dev/null
+++ b/Lib/test/crashers/gc_inspection.py
@@ -0,0 +1,32 @@
+"""
+gc.get_referrers() can be used to see objects before they are fully built.
+
+Note that this is only an example. There are many ways to crash Python
+by using gc.get_referrers(), as well as many extension modules (even
+when they are using perfectly documented patterns to build objects).
+
+Identifying and removing all places that expose to the GC a
+partially-built object is a long-term project. A patch was proposed on
+SF specifically for this example but I consider fixing just this single
+example a bit pointless (#1517042).
+
+A fix would include a whole-scale code review, possibly with an API
+change to decouple object creation and GC registration, and according
+fixes to the documentation for extension module writers. It's unlikely
+to happen, though. So this is currently classified as
+"gc.get_referrers() is dangerous, use only for debugging".
+"""
+
+import gc
+
+
+def g():
+ marker = object()
+ yield marker
+ # now the marker is in the tuple being constructed
+ [tup] = [x for x in gc.get_referrers(marker) if type(x) is tuple]
+ print tup
+ print tup[1]
+
+
+tuple(g())
diff --git a/Lib/test/crashers/infinite_rec_3.py b/Lib/test/crashers/infinite_rec_3.py
deleted file mode 100644
index 0b04e4c..0000000
--- a/Lib/test/crashers/infinite_rec_3.py
+++ /dev/null
@@ -1,9 +0,0 @@
-
-# http://python.org/sf/1202533
-
-class A(object):
- pass
-A.__call__ = A()
-
-if __name__ == '__main__':
- A()() # segfault: infinite recursion in C
diff --git a/Lib/test/crashers/recursion_limit_too_high.py b/Lib/test/crashers/recursion_limit_too_high.py
new file mode 100644
index 0000000..1fa4d32
--- /dev/null
+++ b/Lib/test/crashers/recursion_limit_too_high.py
@@ -0,0 +1,16 @@
+# The following example may crash or not depending on the platform.
+# E.g. on 32-bit Intel Linux in a "standard" configuration it seems to
+# crash on Python 2.5 (but not 2.4 nor 2.3). On Windows the import
+# eventually fails to find the module, possibly because we run out of
+# file handles.
+
+# The point of this example is to show that sys.setrecursionlimit() is a
+# hack, and not a robust solution. This example simply exercices a path
+# where it takes many C-level recursions, consuming a lot of stack
+# space, for each Python-level recursion. So 1000 times this amount of
+# stack space may be too much for standard platforms already.
+
+import sys
+if 'recursion_limit_too_high' in sys.modules:
+ del sys.modules['recursion_limit_too_high']
+import recursion_limit_too_high
diff --git a/Lib/test/crashers/recursive_call.py b/Lib/test/crashers/recursive_call.py
index 0776479..31c8963 100644
--- a/Lib/test/crashers/recursive_call.py
+++ b/Lib/test/crashers/recursive_call.py
@@ -1,6 +1,11 @@
#!/usr/bin/env python
# No bug report AFAIK, mail on python-dev on 2006-01-10
+
+# This is a "won't fix" case. It is known that setting a high enough
+# recursion limit crashes by overflowing the stack. Unless this is
+# redesigned somehow, it won't go away.
+
import sys
sys.setrecursionlimit(1 << 30)
diff --git a/Lib/test/crashers/xml_parsers.py b/Lib/test/crashers/xml_parsers.py
deleted file mode 100644
index e6b5727..0000000
--- a/Lib/test/crashers/xml_parsers.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from xml.parsers import expat
-
-# http://python.org/sf/1296433
-
-def test_parse_only_xml_data():
- #
- xml = "<?xml version='1.0' encoding='iso8859'?><s>%s</s>" % ('a' * 1025)
- # this one doesn't crash
- #xml = "<?xml version='1.0'?><s>%s</s>" % ('a' * 10000)
-
- def handler(text):
- raise Exception
-
- parser = expat.ParserCreate()
- parser.CharacterDataHandler = handler
-
- try:
- parser.Parse(xml)
- except:
- pass
-
-if __name__ == '__main__':
- test_parse_only_xml_data()
-
-# Invalid read of size 4
-# at 0x43F936: PyObject_Free (obmalloc.c:735)
-# by 0x45A7C7: unicode_dealloc (unicodeobject.c:246)
-# by 0x1299021D: PyUnknownEncodingHandler (pyexpat.c:1314)
-# by 0x12993A66: processXmlDecl (xmlparse.c:3330)
-# by 0x12999211: doProlog (xmlparse.c:3678)
-# by 0x1299C3F0: prologInitProcessor (xmlparse.c:3550)
-# by 0x12991EA3: XML_ParseBuffer (xmlparse.c:1562)
-# by 0x1298F8EC: xmlparse_Parse (pyexpat.c:895)
-# by 0x47B3A1: PyEval_EvalFrameEx (ceval.c:3565)
-# by 0x47CCAC: PyEval_EvalCodeEx (ceval.c:2739)
-# by 0x47CDE1: PyEval_EvalCode (ceval.c:490)
-# by 0x499820: PyRun_SimpleFileExFlags (pythonrun.c:1198)
-# by 0x4117F1: Py_Main (main.c:492)
-# by 0x12476D1F: __libc_start_main (in /lib/libc-2.3.5.so)
-# by 0x410DC9: (within /home/neal/build/python/svn/clean/python)
-# Address 0x12704020 is 264 bytes inside a block of size 592 free'd
-# at 0x11B1BA8A: free (vg_replace_malloc.c:235)
-# by 0x124B5F18: (within /lib/libc-2.3.5.so)
-# by 0x48DE43: find_module (import.c:1320)
-# by 0x48E997: import_submodule (import.c:2249)
-# by 0x48EC15: load_next (import.c:2083)
-# by 0x48F091: import_module_ex (import.c:1914)
-# by 0x48F385: PyImport_ImportModuleEx (import.c:1955)
-# by 0x46D070: builtin___import__ (bltinmodule.c:44)
-# by 0x4186CF: PyObject_Call (abstract.c:1777)
-# by 0x474E9B: PyEval_CallObjectWithKeywords (ceval.c:3432)
-# by 0x47928E: PyEval_EvalFrameEx (ceval.c:2038)
-# by 0x47CCAC: PyEval_EvalCodeEx (ceval.c:2739)
-# by 0x47CDE1: PyEval_EvalCode (ceval.c:490)
-# by 0x48D0F7: PyImport_ExecCodeModuleEx (import.c:635)
-# by 0x48D4F4: load_source_module (import.c:913)
diff --git a/Lib/test/fork_wait.py b/Lib/test/fork_wait.py
index 5600bdb..7eb55f6 100644
--- a/Lib/test/fork_wait.py
+++ b/Lib/test/fork_wait.py
@@ -34,7 +34,14 @@ class ForkWait(unittest.TestCase):
pass
def wait_impl(self, cpid):
- spid, status = os.waitpid(cpid, 0)
+ for i in range(10):
+ # waitpid() shouldn't hang, but some of the buildbots seem to hang
+ # in the forking tests. This is an attempt to fix the problem.
+ spid, status = os.waitpid(cpid, os.WNOHANG)
+ if spid == cpid:
+ break
+ time.sleep(2 * SHORTSLEEP)
+
self.assertEquals(spid, cpid)
self.assertEquals(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
diff --git a/Lib/test/output/test_ossaudiodev b/Lib/test/output/test_ossaudiodev
index 9f55afa..f0df5d2 100644
--- a/Lib/test/output/test_ossaudiodev
+++ b/Lib/test/output/test_ossaudiodev
@@ -1,3 +1,2 @@
test_ossaudiodev
-playing test sound file...
-elapsed time: 3.1 sec
+playing test sound file (expected running time: 2.93 sec)
diff --git a/Lib/test/output/test_thread b/Lib/test/output/test_thread
index d49651d..68c6a92 100644
--- a/Lib/test/output/test_thread
+++ b/Lib/test/output/test_thread
@@ -4,3 +4,15 @@ all tasks done
*** Barrier Test ***
all tasks done
+
+*** Changing thread stack size ***
+caught expected ValueError setting stack_size(4096)
+successfully set stack_size(262144)
+successfully set stack_size(1048576)
+successfully set stack_size(0)
+trying stack_size = 262144
+waiting for all tasks to complete
+all tasks done
+trying stack_size = 1048576
+waiting for all tasks to complete
+all tasks done
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index ca4a3b5..4553838 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -66,7 +66,9 @@ reports are written to. These parameters all have defaults (5, 4 and
-M runs tests that require an exorbitant amount of memory. These tests
typically try to ascertain containers keep working when containing more than
-2 bilion objects, and only work on 64-bit systems. The passed-in memlimit,
+2 billion objects, which only works on 64-bit systems. There are also some
+tests that try to exhaust the address space of the process, which only makes
+sense on 32-bit systems with at least 2Gb of memory. The passed-in memlimit,
which is a string in the form of '2.5Gb', determines howmuch memory the
tests will limit themselves to (but they may go slightly over.) The number
shouldn't be more memory than the machine has (including swap memory). You
@@ -496,14 +498,30 @@ def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
def runtest(test, generate, verbose, quiet, testdir=None, huntrleaks=False):
"""Run a single test.
+
test -- the name of the test
generate -- if true, generate output, instead of running the test
- and comparing it to a previously created output file
+ and comparing it to a previously created output file
verbose -- if true, print more messages
quiet -- if true, don't print 'skipped' messages (probably redundant)
testdir -- test directory
+ huntrleaks -- run multiple times to test for leaks; requires a debug
+ build; a triple corresponding to -R's three arguments
+ Return:
+ -2 test skipped because resource denied
+ -1 test skipped for some other reason
+ 0 test failed
+ 1 test passed
"""
+ try:
+ return runtest_inner(test, generate, verbose, quiet, testdir,
+ huntrleaks)
+ finally:
+ cleanup_test_droppings(test, verbose)
+
+def runtest_inner(test, generate, verbose, quiet,
+ testdir=None, huntrleaks=False):
test_support.unload(test)
if not testdir:
testdir = findtestdir()
@@ -595,6 +613,37 @@ def runtest(test, generate, verbose, quiet, testdir=None, huntrleaks=False):
sys.stdout.flush()
return 0
+def cleanup_test_droppings(testname, verbose):
+ import shutil
+
+ # Try to clean up junk commonly left behind. While tests shouldn't leave
+ # any files or directories behind, when a test fails that can be tedious
+ # for it to arrange. The consequences can be especially nasty on Windows,
+ # since if a test leaves a file open, it cannot be deleted by name (while
+ # there's nothing we can do about that here either, we can display the
+ # name of the offending test, which is a real help).
+ for name in (test_support.TESTFN,
+ "db_home",
+ ):
+ if not os.path.exists(name):
+ continue
+
+ if os.path.isdir(name):
+ kind, nuker = "directory", shutil.rmtree
+ elif os.path.isfile(name):
+ kind, nuker = "file", os.unlink
+ else:
+ raise SystemError("os.path says %r exists but is neither "
+ "directory nor file" % name)
+
+ if verbose:
+ print "%r left behind %s %r" % (testname, kind, name)
+ try:
+ nuker(name)
+ except Exception, msg:
+ print >> sys.stderr, ("%r left behind %s %r and it couldn't be "
+ "removed: %s" % (testname, kind, name, msg))
+
def dash_R(the_module, test, indirect_test, huntrleaks):
# This code is hackish and inelegant, but it seems to do the job.
import copy_reg
@@ -637,7 +686,7 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
def dash_R_cleanup(fs, ps, pic):
import gc, copy_reg
- import _strptime, linecache, warnings, dircache
+ import _strptime, linecache, dircache
import urlparse, urllib, urllib2, mimetypes, doctest
import struct, filecmp
from distutils.dir_util import _path_created
@@ -1227,6 +1276,37 @@ _expectations = {
test_winreg
test_winsound
""",
+ 'netbsd3':
+ """
+ test_aepack
+ test_al
+ test_applesingle
+ test_bsddb
+ test_bsddb185
+ test_bsddb3
+ test_cd
+ test_cl
+ test_ctypes
+ test_curses
+ test_dl
+ test_gdbm
+ test_gl
+ test_imgfile
+ test_linuxaudiodev
+ test_locale
+ test_macfs
+ test_macostools
+ test_nis
+ test_ossaudiodev
+ test_pep277
+ test_sqlite
+ test_startfile
+ test_sunaudiodev
+ test_tcl
+ test_unicode_file
+ test_winreg
+ test_winsound
+ """,
}
_expectations['freebsd5'] = _expectations['freebsd4']
_expectations['freebsd6'] = _expectations['freebsd4']
diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py
index aaa2dc2..73447ad 100644
--- a/Lib/test/string_tests.py
+++ b/Lib/test/string_tests.py
@@ -147,8 +147,8 @@ class CommonTest(unittest.TestCase):
else:
r2, rem = len(i)+1, 0
if rem or r1 != r2:
- self.assertEqual(rem, 0)
- self.assertEqual(r1, r2)
+ self.assertEqual(rem, 0, '%s != 0 for %s' % (rem, i))
+ self.assertEqual(r1, r2, '%s != %s for %s' % (r1, r2, i))
def test_find(self):
self.checkequal(0, 'abcdefghiabc', 'find', 'abc')
@@ -636,6 +636,11 @@ class CommonTest(unittest.TestCase):
EQ("bobobXbobob", "bobobobXbobobob", "replace", "bobob", "bob")
EQ("BOBOBOB", "BOBOBOB", "replace", "bob", "bobby")
+ ba = buffer('a')
+ bb = buffer('b')
+ EQ("bbc", "abc", "replace", ba, bb)
+ EQ("aac", "abc", "replace", bb, ba)
+
#
self.checkequal('one@two!three!', 'one!two!three!', 'replace', '!', '@', 1)
self.checkequal('onetwothree', 'one!two!three!', 'replace', '!', '')
@@ -819,6 +824,21 @@ class MixinStrUnicodeUserStringTest:
self.checkraises(TypeError, 'hello', 'startswith')
self.checkraises(TypeError, 'hello', 'startswith', 42)
+ # test tuple arguments
+ self.checkequal(True, 'hello', 'startswith', ('he', 'ha'))
+ self.checkequal(False, 'hello', 'startswith', ('lo', 'llo'))
+ self.checkequal(True, 'hello', 'startswith', ('hellox', 'hello'))
+ self.checkequal(False, 'hello', 'startswith', ())
+ self.checkequal(True, 'helloworld', 'startswith', ('hellowo',
+ 'rld', 'lowo'), 3)
+ self.checkequal(False, 'helloworld', 'startswith', ('hellowo', 'ello',
+ 'rld'), 3)
+ self.checkequal(True, 'hello', 'startswith', ('lo', 'he'), 0, -1)
+ self.checkequal(False, 'hello', 'startswith', ('he', 'hel'), 0, 1)
+ self.checkequal(True, 'hello', 'startswith', ('he', 'hel'), 0, 2)
+
+ self.checkraises(TypeError, 'hello', 'startswith', (42,))
+
def test_endswith(self):
self.checkequal(True, 'hello', 'endswith', 'lo')
self.checkequal(False, 'hello', 'endswith', 'he')
@@ -853,6 +873,21 @@ class MixinStrUnicodeUserStringTest:
self.checkraises(TypeError, 'hello', 'endswith')
self.checkraises(TypeError, 'hello', 'endswith', 42)
+ # test tuple arguments
+ self.checkequal(False, 'hello', 'endswith', ('he', 'ha'))
+ self.checkequal(True, 'hello', 'endswith', ('lo', 'llo'))
+ self.checkequal(True, 'hello', 'endswith', ('hellox', 'hello'))
+ self.checkequal(False, 'hello', 'endswith', ())
+ self.checkequal(True, 'helloworld', 'endswith', ('hellowo',
+ 'rld', 'lowo'), 3)
+ self.checkequal(False, 'helloworld', 'endswith', ('hellowo', 'ello',
+ 'rld'), 3, -1)
+ self.checkequal(True, 'hello', 'endswith', ('hell', 'ell'), 0, -1)
+ self.checkequal(False, 'hello', 'endswith', ('he', 'hel'), 0, 1)
+ self.checkequal(True, 'hello', 'endswith', ('he', 'hell'), 0, 4)
+
+ self.checkraises(TypeError, 'hello', 'endswith', (42,))
+
def test___contains__(self):
self.checkequal(True, '', '__contains__', '') # vereq('' in '', True)
self.checkequal(True, 'abc', '__contains__', '') # vereq('' in 'abc', True)
@@ -872,7 +907,7 @@ class MixinStrUnicodeUserStringTest:
self.checkequal(u'abc', 'abc', '__getitem__', slice(0, 1000))
self.checkequal(u'a', 'abc', '__getitem__', slice(0, 1))
self.checkequal(u'', 'abc', '__getitem__', slice(0, 0))
- # FIXME What about negative indizes? This is handled differently by [] and __getitem__(slice)
+ # FIXME What about negative indices? This is handled differently by [] and __getitem__(slice)
self.checkraises(TypeError, 'abc', '__getitem__', 'def')
@@ -908,6 +943,8 @@ class MixinStrUnicodeUserStringTest:
# test.test_string.StringTest.test_join)
self.checkequal('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequal('abcd', '', 'join', ('a', 'b', 'c', 'd'))
+ self.checkequal('bd', '', 'join', ('', 'b', '', 'd'))
+ self.checkequal('ac', '', 'join', ('a', '', 'c', ''))
self.checkequal('w x y z', ' ', 'join', Sequence())
self.checkequal('abc', 'a', 'join', ('abc',))
self.checkequal('z', 'a', 'join', UserList(['z']))
diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py
index 9799f89..ec59d71 100644
--- a/Lib/test/test__locale.py
+++ b/Lib/test/test__locale.py
@@ -113,6 +113,9 @@ class _LocaleTests(unittest.TestCase):
"using eval('3.14') failed for %s" % loc)
self.assertEquals(int(float('3.14') * 100), 314,
"using float('3.14') failed for %s" % loc)
+ if localeconv()['decimal_point'] != '.':
+ self.assertRaises(ValueError, float,
+ localeconv()['decimal_point'].join(['1', '23']))
def test_main():
run_unittest(_LocaleTests)
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index c64ad28..14fc010 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -160,7 +160,7 @@ exec_results = [
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, []), [('Return', (1, 8), ('Num', (1, 15), 1))], [])]),
('Module', [('Delete', (1, 0), [('Name', (1, 4), 'v', ('Del',))])]),
('Module', [('Assign', (1, 0), [('Name', (1, 0), 'v', ('Store',))], ('Num', (1, 4), 1))]),
-('Module', [('AugAssign', (1, 0), ('Name', (1, 0), 'v', ('Load',)), ('Add',), ('Num', (1, 5), 1))]),
+('Module', [('AugAssign', (1, 0), ('Name', (1, 0), 'v', ('Store',)), ('Add',), ('Num', (1, 5), 1))]),
('Module', [('Print', (1, 0), ('Name', (1, 8), 'f', ('Load',)), [('Num', (1, 11), 1)], False)]),
('Module', [('For', (1, 0), ('Name', (1, 4), 'v', ('Store',)), ('Name', (1, 9), 'v', ('Load',)), [('Pass', (1, 11))], [])]),
('Module', [('While', (1, 0), ('Name', (1, 6), 'v', ('Load',)), [('Pass', (1, 8))], [])]),
diff --git a/Lib/test/test_asynchat.py b/Lib/test/test_asynchat.py
index f93587a..9926167 100644
--- a/Lib/test/test_asynchat.py
+++ b/Lib/test/test_asynchat.py
@@ -13,7 +13,8 @@ class echo_server(threading.Thread):
def run(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- sock.bind((HOST, PORT))
+ global PORT
+ PORT = test_support.bind_port(sock, HOST, PORT)
sock.listen(1)
conn, client = sock.accept()
buffer = ""
diff --git a/Lib/test/test_bigaddrspace.py b/Lib/test/test_bigaddrspace.py
new file mode 100644
index 0000000..8c215fe
--- /dev/null
+++ b/Lib/test/test_bigaddrspace.py
@@ -0,0 +1,46 @@
+from test import test_support
+from test.test_support import bigaddrspacetest, MAX_Py_ssize_t
+
+import unittest
+import operator
+import sys
+
+
+class StrTest(unittest.TestCase):
+
+ @bigaddrspacetest
+ def test_concat(self):
+ s1 = 'x' * MAX_Py_ssize_t
+ self.assertRaises(OverflowError, operator.add, s1, '?')
+
+ @bigaddrspacetest
+ def test_optimized_concat(self):
+ x = 'x' * MAX_Py_ssize_t
+ try:
+ x = x + '?' # this statement uses a fast path in ceval.c
+ except OverflowError:
+ pass
+ else:
+ self.fail("should have raised OverflowError")
+ try:
+ x += '?' # this statement uses a fast path in ceval.c
+ except OverflowError:
+ pass
+ else:
+ self.fail("should have raised OverflowError")
+ self.assertEquals(len(x), MAX_Py_ssize_t)
+
+ ### the following test is pending a patch
+ # (http://mail.python.org/pipermail/python-dev/2006-July/067774.html)
+ #@bigaddrspacetest
+ #def test_repeat(self):
+ # self.assertRaises(OverflowError, operator.mul, 'x', MAX_Py_ssize_t + 1)
+
+
+def test_main():
+ test_support.run_unittest(StrTest)
+
+if __name__ == '__main__':
+ if len(sys.argv) > 1:
+ test_support.set_memlimit(sys.argv[1])
+ test_main()
diff --git a/Lib/test/test_bigmem.py b/Lib/test/test_bigmem.py
index 255428f..6d6c37c 100644
--- a/Lib/test/test_bigmem.py
+++ b/Lib/test/test_bigmem.py
@@ -28,7 +28,7 @@ import sys
# - While the bigmemtest decorator speaks of 'minsize', all tests will
# actually be called with a much smaller number too, in the normal
# test run (5Kb currently.) This is so the tests themselves get frequent
-# testing Consequently, always make all large allocations based on the
+# testing. Consequently, always make all large allocations based on the
# passed-in 'size', and don't rely on the size being very large. Also,
# memuse-per-size should remain sane (less than a few thousand); if your
# test uses more, adjust 'size' upward, instead.
diff --git a/Lib/test/test_bsddb.py b/Lib/test/test_bsddb.py
index 513e541..474f3da 100755
--- a/Lib/test/test_bsddb.py
+++ b/Lib/test/test_bsddb.py
@@ -8,7 +8,6 @@ import bsddb
import dbhash # Just so we know it's imported
import unittest
from test import test_support
-from sets import Set
class TestBSDDB(unittest.TestCase):
openflag = 'c'
@@ -53,7 +52,7 @@ class TestBSDDB(unittest.TestCase):
self.assertEqual(self.f[k], v)
def assertSetEquals(self, seqn1, seqn2):
- self.assertEqual(Set(seqn1), Set(seqn2))
+ self.assertEqual(set(seqn1), set(seqn2))
def test_mapping_iteration_methods(self):
f = self.f
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index e6e4440..c7e4394 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -532,13 +532,24 @@ class BuiltinTest(unittest.TestCase):
@run_with_locale('LC_NUMERIC', 'fr_FR', 'de_DE')
def test_float_with_comma(self):
# set locale to something that doesn't use '.' for the decimal point
+ # float must not accept the locale specific decimal point but
+ # it still has to accept the normal python syntac
import locale
if not locale.localeconv()['decimal_point'] == ',':
return
- self.assertEqual(float(" 3,14 "), 3.14)
- self.assertEqual(float(" +3,14 "), 3.14)
- self.assertEqual(float(" -3,14 "), -3.14)
+ self.assertEqual(float(" 3.14 "), 3.14)
+ self.assertEqual(float("+3.14 "), 3.14)
+ self.assertEqual(float("-3.14 "), -3.14)
+ self.assertEqual(float(".14 "), .14)
+ self.assertEqual(float("3. "), 3.0)
+ self.assertEqual(float("3.e3 "), 3000.0)
+ self.assertEqual(float("3.2e3 "), 3200.0)
+ self.assertEqual(float("2.5e-1 "), 0.25)
+ self.assertEqual(float("5e-1"), 0.5)
+ self.assertRaises(ValueError, float, " 3,14 ")
+ self.assertRaises(ValueError, float, " +3,14 ")
+ self.assertRaises(ValueError, float, " -3,14 ")
self.assertRaises(ValueError, float, " 0x3.1 ")
self.assertRaises(ValueError, float, " -0x3.p-1 ")
self.assertEqual(float(" 25.e-1 "), 2.5)
@@ -603,6 +614,19 @@ class BuiltinTest(unittest.TestCase):
def f(): pass
self.assertRaises(TypeError, hash, [])
self.assertRaises(TypeError, hash, {})
+ # Bug 1536021: Allow hash to return long objects
+ class X:
+ def __hash__(self):
+ return 2**100
+ self.assertEquals(type(hash(X())), int)
+ class Y(object):
+ def __hash__(self):
+ return 2**100
+ self.assertEquals(type(hash(Y())), int)
+ class Z(long):
+ def __hash__(self):
+ return self
+ self.assertEquals(hash(Z(42)), hash(42L))
def test_hex(self):
self.assertEqual(hex(16), '0x10')
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index 356c2e3..f198116 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -250,7 +250,7 @@ class BZ2FileTest(BaseTest):
bz2f = BZ2File(self.filename)
xlines = list(bz2f.readlines())
bz2f.close()
- self.assertEqual(lines, ['Test'])
+ self.assertEqual(xlines, ['Test'])
class BZ2CompressorTest(BaseTest):
@@ -344,6 +344,7 @@ def test_main():
BZ2DecompressorTest,
FuncTest
)
+ test_support.reap_children()
if __name__ == '__main__':
test_main()
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index ec860d1..5e89863 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -87,6 +87,7 @@ class CmdLineTest(unittest.TestCase):
def test_main():
test.test_support.run_unittest(CmdLineTest)
+ test.test_support.reap_children()
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index 52bc894..4e68638 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -61,6 +61,23 @@ nlocals: 1
flags: 67
consts: ('None',)
+>>> def optimize_away():
+... 'doc string'
+... 'not a docstring'
+... 53
+... 53L
+
+>>> dump(optimize_away.func_code)
+name: optimize_away
+argcount: 0
+names: ()
+varnames: ()
+cellvars: ()
+freevars: ()
+nlocals: 0
+flags: 67
+consts: ("'doc string'", 'None')
+
"""
def consts(t):
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index 6ea49cc..8153979 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -1166,6 +1166,12 @@ class BasicUnicodeTest(unittest.TestCase):
encoder = codecs.getencoder(encoding)
self.assertRaises(TypeError, encoder)
+ def test_encoding_map_type_initialized(self):
+ from encodings import cp1140
+ # This used to crash, we are only verifying there's no crash.
+ table_type = type(cp1140.encoding_table)
+ self.assertEqual(table_type, table_type)
+
class BasicStrTest(unittest.TestCase):
def test_basics(self):
s = "abc123"
diff --git a/Lib/test/test_commands.py b/Lib/test/test_commands.py
index 0f7d15f..b72a1b9 100644
--- a/Lib/test/test_commands.py
+++ b/Lib/test/test_commands.py
@@ -5,7 +5,7 @@
import unittest
import os, tempfile, re
-from test.test_support import TestSkipped, run_unittest
+from test.test_support import TestSkipped, run_unittest, reap_children
from commands import *
# The module says:
@@ -58,6 +58,7 @@ class CommandTests(unittest.TestCase):
def test_main():
run_unittest(CommandTests)
+ reap_children()
if __name__ == "__main__":
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
index 72c4f7e..a3f15bf 100644
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -166,6 +166,16 @@ if 1:
pass"""
compile(s, "<string>", "exec")
+ # This test is probably specific to CPython and may not generalize
+ # to other implementations. We are trying to ensure that when
+ # the first line of code starts after 256, correct line numbers
+ # in tracebacks are still produced.
+ def test_leading_newlines(self):
+ s256 = "".join(["\n"] * 256 + ["spam"])
+ co = compile(s256, 'fn', 'exec')
+ self.assertEqual(co.co_firstlineno, 257)
+ self.assertEqual(co.co_lnotab, '')
+
def test_literals_with_leading_zeroes(self):
for arg in ["077787", "0xj", "0x.", "0e", "090000000000000",
"080000000000000", "000000000000009", "000000000000008"]:
@@ -211,6 +221,25 @@ if 1:
self.assertEqual(eval("-" + all_one_bits), -18446744073709551615L)
else:
self.fail("How many bits *does* this machine have???")
+ # Verify treatment of contant folding on -(sys.maxint+1)
+ # i.e. -2147483648 on 32 bit platforms. Should return int, not long.
+ self.assertTrue(isinstance(eval("%s" % (-sys.maxint - 1)), int))
+ self.assertTrue(isinstance(eval("%s" % (-sys.maxint - 2)), long))
+
+ if sys.maxint == 9223372036854775807:
+ def test_32_63_bit_values(self):
+ a = +4294967296 # 1 << 32
+ b = -4294967296 # 1 << 32
+ c = +281474976710656 # 1 << 48
+ d = -281474976710656 # 1 << 48
+ e = +4611686018427387904 # 1 << 62
+ f = -4611686018427387904 # 1 << 62
+ g = +9223372036854775807 # 1 << 63 - 1
+ h = -9223372036854775807 # 1 << 63 - 1
+
+ for variable in self.test_32_63_bit_values.func_code.co_consts:
+ if variable is not None:
+ self.assertTrue(isinstance(variable, int))
def test_sequence_unpacking_error(self):
# Verify sequence packing/unpacking with "or". SF bug #757818
@@ -238,6 +267,8 @@ if 1:
succeed = [
'import sys',
'import os, sys',
+ 'import os as bar',
+ 'import os.path as bar',
'from __future__ import nested_scopes, generators',
'from __future__ import (nested_scopes,\ngenerators)',
'from __future__ import (nested_scopes,\ngenerators,)',
@@ -257,6 +288,10 @@ if 1:
'import (sys',
'import sys)',
'import (os,)',
+ 'import os As bar',
+ 'import os.path a bar',
+ 'from sys import stdin As stdout',
+ 'from sys import stdin a stdout',
'from (sys) import stdin',
'from __future__ import (nested_scopes',
'from __future__ import nested_scopes)',
diff --git a/Lib/test/test_compiler.py b/Lib/test/test_compiler.py
index 48f1643..1efb6a6 100644
--- a/Lib/test/test_compiler.py
+++ b/Lib/test/test_compiler.py
@@ -56,13 +56,30 @@ class CompilerTest(unittest.TestCase):
def testYieldExpr(self):
compiler.compile("def g(): yield\n\n", "<string>", "exec")
+ def testTryExceptFinally(self):
+ # Test that except and finally clauses in one try stmt are recognized
+ c = compiler.compile("try:\n 1/0\nexcept:\n e = 1\nfinally:\n f = 1",
+ "<string>", "exec")
+ dct = {}
+ exec c in dct
+ self.assertEquals(dct.get('e'), 1)
+ self.assertEquals(dct.get('f'), 1)
+
def testDefaultArgs(self):
self.assertRaises(SyntaxError, compiler.parse, "def foo(a=1, b): pass")
+ def testDocstrings(self):
+ c = compiler.compile('"doc"', '<string>', 'exec')
+ self.assert_('__doc__' in c.co_names)
+ c = compiler.compile('def f():\n "doc"', '<string>', 'exec')
+ g = {}
+ exec c in g
+ self.assertEquals(g['f'].__doc__, "doc")
+
def testLineNo(self):
# Test that all nodes except Module have a correct lineno attribute.
filename = __file__
- if filename.endswith(".pyc") or filename.endswith(".pyo"):
+ if filename.endswith((".pyc", ".pyo")):
filename = filename[:-1]
tree = compiler.parseFile(filename)
self.check_lineno(tree)
@@ -87,6 +104,19 @@ class CompilerTest(unittest.TestCase):
self.assertEquals(flatten([1, [2]]), [1, 2])
self.assertEquals(flatten((1, (2,))), [1, 2])
+ def testNestedScope(self):
+ c = compiler.compile('def g():\n'
+ ' a = 1\n'
+ ' def f(): return a + 2\n'
+ ' return f()\n'
+ 'result = g()',
+ '<string>',
+ 'exec')
+ dct = {}
+ exec c in dct
+ self.assertEquals(dct.get('result'), 3)
+
+
NOLINENO = (compiler.ast.Module, compiler.ast.Stmt, compiler.ast.Discard)
###############################################################################
@@ -103,6 +133,12 @@ a, b = 2, 3
l = [(x, y) for x, y in zip(range(5), range(5,10))]
l[0]
l[3:4]
+d = {'a': 2}
+d = {}
+t = ()
+t = (1, 2)
+l = []
+l = [1, 2]
if l:
pass
else:
diff --git a/Lib/test/test_curses.py b/Lib/test/test_curses.py
index dc2f20b..4022149 100644
--- a/Lib/test/test_curses.py
+++ b/Lib/test/test_curses.py
@@ -212,6 +212,13 @@ def module_funcs(stdscr):
m = curses.getmouse()
curses.ungetmouse(*m)
+ if hasattr(curses, 'is_term_resized'):
+ curses.is_term_resized(*stdscr.getmaxyx())
+ if hasattr(curses, 'resizeterm'):
+ curses.resizeterm(*stdscr.getmaxyx())
+ if hasattr(curses, 'resize_term'):
+ curses.resize_term(*stdscr.getmaxyx())
+
def unit_tests():
from curses import ascii
for ch, expected in [('a', 'a'), ('A', 'A'),
diff --git a/Lib/test/test_defaultdict.py b/Lib/test/test_defaultdict.py
index b5a6628..134b5a8 100644
--- a/Lib/test/test_defaultdict.py
+++ b/Lib/test/test_defaultdict.py
@@ -4,6 +4,7 @@ import os
import copy
import tempfile
import unittest
+from test import test_support
from collections import defaultdict
@@ -131,5 +132,8 @@ class TestDefaultDict(unittest.TestCase):
self.assertEqual(d2, d1)
+def test_main():
+ test_support.run_unittest(TestDefaultDict)
+
if __name__ == "__main__":
- unittest.main()
+ test_main()
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 8ee431b..4a39be5 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1899,6 +1899,16 @@ def properties():
prop2 = property(fset=setter)
vereq(prop2.__doc__, None)
+ # this segfaulted in 2.5b2
+ try:
+ import _testcapi
+ except ImportError:
+ pass
+ else:
+ class X(object):
+ p = property(_testcapi.test_with_docstring)
+
+
def supers():
if verbose: print "Testing super..."
@@ -3046,6 +3056,21 @@ def kwdargs():
list.__init__(a, sequence=[0, 1, 2])
vereq(a, [0, 1, 2])
+def recursive__call__():
+ if verbose: print ("Testing recursive __call__() by setting to instance of "
+ "class ...")
+ class A(object):
+ pass
+
+ A.__call__ = A()
+ try:
+ A()()
+ except RuntimeError:
+ pass
+ else:
+ raise TestFailed("Recursion limit should have been reached for "
+ "__call__()")
+
def delhook():
if verbose: print "Testing __del__ hook..."
log = []
@@ -3803,6 +3828,13 @@ def weakref_segfault():
o.whatever = Provoker(o)
del o
+def wrapper_segfault():
+ # SF 927248: deeply nested wrappers could cause stack overflow
+ f = lambda:None
+ for i in xrange(1000000):
+ f = f.__call__
+ f = None
+
# Fix SF #762455, segfault when sys.stdout is changed in getattr
def filefault():
if verbose:
@@ -3957,6 +3989,7 @@ def notimplemented():
def test_main():
weakref_segfault() # Must be first, somehow
+ wrapper_segfault()
do_this_first()
class_docstrings()
lists()
@@ -4015,6 +4048,7 @@ def test_main():
buffer_inherit()
str_of_str_subclass()
kwdargs()
+ recursive__call__()
delhook()
hashinherit()
strops()
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index 081941d..c31092c 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -81,6 +81,13 @@ dis_bug1333982 = """\
bug1333982.func_code.co_firstlineno + 2,
bug1333982.func_code.co_firstlineno + 3)
+_BIG_LINENO_FORMAT = """\
+%3d 0 LOAD_GLOBAL 0 (spam)
+ 3 POP_TOP
+ 4 LOAD_CONST 0 (None)
+ 7 RETURN_VALUE
+"""
+
class DisTests(unittest.TestCase):
def do_disassembly_test(self, func, expected):
s = StringIO.StringIO()
@@ -124,6 +131,23 @@ class DisTests(unittest.TestCase):
if __debug__:
self.do_disassembly_test(bug1333982, dis_bug1333982)
+ def test_big_linenos(self):
+ def func(count):
+ namespace = {}
+ func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
+ exec func in namespace
+ return namespace['foo']
+
+ # Test all small ranges
+ for i in xrange(1, 300):
+ expected = _BIG_LINENO_FORMAT % (i + 2)
+ self.do_disassembly_test(func(i), expected)
+
+ # Test some larger ranges too
+ for i in xrange(300, 5000, 10):
+ expected = _BIG_LINENO_FORMAT % (i + 2)
+ self.do_disassembly_test(func(i), expected)
+
def test_main():
run_unittest(DisTests)
diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py
index 01f7acd..e8379c5 100644
--- a/Lib/test/test_doctest.py
+++ b/Lib/test/test_doctest.py
@@ -419,7 +419,6 @@ methods, classmethods, staticmethods, properties, and nested classes.
>>> finder = doctest.DocTestFinder()
>>> tests = finder.find(SampleClass)
- >>> tests.sort()
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
@@ -435,7 +434,6 @@ methods, classmethods, staticmethods, properties, and nested classes.
New-style classes are also supported:
>>> tests = finder.find(SampleNewStyleClass)
- >>> tests.sort()
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
1 SampleNewStyleClass
@@ -475,7 +473,6 @@ functions, classes, and the `__test__` dictionary, if it exists:
>>> # ignoring the objects since they weren't defined in m.
>>> import test.test_doctest
>>> tests = finder.find(m, module=test.test_doctest)
- >>> tests.sort()
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
1 some_module
@@ -499,7 +496,6 @@ will only be generated for it once:
>>> from test import doctest_aliases
>>> tests = excl_empty_finder.find(doctest_aliases)
- >>> tests.sort()
>>> print len(tests)
2
>>> print tests[0].name
@@ -517,7 +513,6 @@ Empty Tests
By default, an object with no doctests doesn't create any tests:
>>> tests = doctest.DocTestFinder().find(SampleClass)
- >>> tests.sort()
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
@@ -536,7 +531,6 @@ is really to support backward compatibility in what doctest.master.summarize()
displays.
>>> tests = doctest.DocTestFinder(exclude_empty=False).find(SampleClass)
- >>> tests.sort()
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
@@ -557,7 +551,6 @@ DocTestFinder can be told not to look for tests in contained objects
using the `recurse` flag:
>>> tests = doctest.DocTestFinder(recurse=False).find(SampleClass)
- >>> tests.sort()
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
diff --git a/Lib/test/test_email_codecs.py b/Lib/test/test_email_codecs.py
index aadd537..c550a6f 100644
--- a/Lib/test/test_email_codecs.py
+++ b/Lib/test/test_email_codecs.py
@@ -1,11 +1,15 @@
# Copyright (C) 2002 Python Software Foundation
# email package unit tests for (optional) Asian codecs
-import unittest
# The specific tests now live in Lib/email/test
-from email.test.test_email_codecs import suite
+from email.test import test_email_codecs
+from email.test import test_email_codecs_renamed
+from test import test_support
+def test_main():
+ suite = test_email_codecs.suite()
+ suite.addTest(test_email_codecs_renamed.suite())
+ test_support.run_suite(suite)
-
if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+ test_main()
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index ebe60c1..be2cca1 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -314,6 +314,18 @@ class ExceptionTests(unittest.TestCase):
x = DerivedException(fancy_arg=42)
self.assertEquals(x.fancy_arg, 42)
+ def testInfiniteRecursion(self):
+ def f():
+ return f()
+ self.assertRaises(RuntimeError, f)
+
+ def g():
+ try:
+ return g()
+ except ValueError:
+ return -1
+ self.assertRaises(RuntimeError, g)
+
def test_main():
run_unittest(ExceptionTests)
diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py
index f53b13a..58a57b5 100755
--- a/Lib/test/test_fcntl.py
+++ b/Lib/test/test_fcntl.py
@@ -20,9 +20,10 @@ else:
if sys.platform.startswith('atheos'):
start_len = "qq"
-if sys.platform in ('netbsd1', 'netbsd2', 'Darwin1.2', 'darwin',
- 'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5', 'freebsd6',
- 'freebsd7',
+if sys.platform in ('netbsd1', 'netbsd2', 'netbsd3',
+ 'Darwin1.2', 'darwin',
+ 'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5',
+ 'freebsd6', 'freebsd7',
'bsdos2', 'bsdos3', 'bsdos4',
'openbsd', 'openbsd2', 'openbsd3'):
if struct.calcsize('l') == 8:
diff --git a/Lib/test/test_file.py b/Lib/test/test_file.py
index dcfa265..234920d 100644
--- a/Lib/test/test_file.py
+++ b/Lib/test/test_file.py
@@ -11,14 +11,12 @@ class AutoFileTests(unittest.TestCase):
# file tests for which a test file is automatically set up
def setUp(self):
- self.f = file(TESTFN, 'wb')
+ self.f = open(TESTFN, 'wb')
def tearDown(self):
- try:
- if self.f:
- self.f.close()
- except IOError:
- pass
+ if self.f:
+ self.f.close()
+ os.remove(TESTFN)
def testWeakRefs(self):
# verify weak references
@@ -80,9 +78,11 @@ class AutoFileTests(unittest.TestCase):
def testWritelinesNonString(self):
# verify writelines with non-string object
- class NonString: pass
+ class NonString:
+ pass
- self.assertRaises(TypeError, self.f.writelines, [NonString(), NonString()])
+ self.assertRaises(TypeError, self.f.writelines,
+ [NonString(), NonString()])
def testRepr(self):
# verify repr works
@@ -93,19 +93,21 @@ class AutoFileTests(unittest.TestCase):
self.assertEquals(f.name, TESTFN)
self.assert_(not f.isatty())
self.assert_(not f.closed)
-
+
self.assertRaises(TypeError, f.readinto, "")
f.close()
self.assert_(f.closed)
def testMethods(self):
methods = ['fileno', 'flush', 'isatty', 'next', 'read', 'readinto',
- 'readline', 'readlines', 'seek', 'tell', 'truncate', 'write',
- '__iter__']
+ 'readline', 'readlines', 'seek', 'tell', 'truncate',
+ 'write', '__iter__']
if sys.platform.startswith('atheos'):
methods.remove('truncate')
- self.f.close()
+ # __exit__ should close the file
+ self.f.__exit__(None, None, None)
+ self.assert_(self.f.closed)
for methodname in methods:
method = getattr(self.f, methodname)
@@ -113,6 +115,14 @@ class AutoFileTests(unittest.TestCase):
self.assertRaises(ValueError, method)
self.assertRaises(ValueError, self.f.writelines, [])
+ # file is closed, __exit__ shouldn't do anything
+ self.assertEquals(self.f.__exit__(None, None, None), None)
+ # it must also return None if an exception was given
+ try:
+ 1/0
+ except:
+ self.assertEquals(self.f.__exit__(*sys.exc_info()), None)
+
class OtherFileTests(unittest.TestCase):
@@ -120,7 +130,7 @@ class OtherFileTests(unittest.TestCase):
# check invalid mode strings
for mode in ("", "aU", "wU+"):
try:
- f = file(TESTFN, mode)
+ f = open(TESTFN, mode)
except ValueError:
pass
else:
@@ -142,6 +152,7 @@ class OtherFileTests(unittest.TestCase):
f = open(unicode(TESTFN), "w")
self.assert_(repr(f).startswith("<open file u'" + TESTFN))
f.close()
+ os.unlink(TESTFN)
def testBadModeArgument(self):
# verify that we get a sensible error message for bad mode argument
@@ -182,11 +193,11 @@ class OtherFileTests(unittest.TestCase):
def bug801631():
# SF bug <http://www.python.org/sf/801631>
# "file.truncate fault on windows"
- f = file(TESTFN, 'wb')
+ f = open(TESTFN, 'wb')
f.write('12345678901') # 11 bytes
f.close()
- f = file(TESTFN,'rb+')
+ f = open(TESTFN,'rb+')
data = f.read(5)
if data != '12345':
self.fail("Read on file opened for update failed %r" % data)
@@ -208,14 +219,14 @@ class OtherFileTests(unittest.TestCase):
os.unlink(TESTFN)
def testIteration(self):
- # Test the complex interaction when mixing file-iteration and the various
- # read* methods. Ostensibly, the mixture could just be tested to work
- # when it should work according to the Python language, instead of fail
- # when it should fail according to the current CPython implementation.
- # People don't always program Python the way they should, though, and the
- # implemenation might change in subtle ways, so we explicitly test for
- # errors, too; the test will just have to be updated when the
- # implementation changes.
+ # Test the complex interaction when mixing file-iteration and the
+ # various read* methods. Ostensibly, the mixture could just be tested
+ # to work when it should work according to the Python language,
+ # instead of fail when it should fail according to the current CPython
+ # implementation. People don't always program Python the way they
+ # should, though, and the implemenation might change in subtle ways,
+ # so we explicitly test for errors, too; the test will just have to
+ # be updated when the implementation changes.
dataoffset = 16384
filler = "ham\n"
assert not dataoffset % len(filler), \
@@ -253,12 +264,13 @@ class OtherFileTests(unittest.TestCase):
(methodname, args))
f.close()
- # Test to see if harmless (by accident) mixing of read* and iteration
- # still works. This depends on the size of the internal iteration
- # buffer (currently 8192,) but we can test it in a flexible manner.
- # Each line in the bag o' ham is 4 bytes ("h", "a", "m", "\n"), so
- # 4096 lines of that should get us exactly on the buffer boundary for
- # any power-of-2 buffersize between 4 and 16384 (inclusive).
+ # Test to see if harmless (by accident) mixing of read* and
+ # iteration still works. This depends on the size of the internal
+ # iteration buffer (currently 8192,) but we can test it in a
+ # flexible manner. Each line in the bag o' ham is 4 bytes
+ # ("h", "a", "m", "\n"), so 4096 lines of that should get us
+ # exactly on the buffer boundary for any power-of-2 buffersize
+ # between 4 and 16384 (inclusive).
f = open(TESTFN, 'rb')
for i in range(nchunks):
f.next()
@@ -319,7 +331,13 @@ class OtherFileTests(unittest.TestCase):
def test_main():
- run_unittest(AutoFileTests, OtherFileTests)
+ # Historically, these tests have been sloppy about removing TESTFN.
+ # So get rid of it no matter what.
+ try:
+ run_unittest(AutoFileTests, OtherFileTests)
+ finally:
+ if os.path.exists(TESTFN):
+ os.unlink(TESTFN)
if __name__ == '__main__':
test_main()
diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py
index c54119c..503562b 100644
--- a/Lib/test/test_filecmp.py
+++ b/Lib/test/test_filecmp.py
@@ -1,5 +1,5 @@
-import os, filecmp, shutil, tempfile
+import os, filecmp, shutil, tempfile, shutil
import unittest
from test import test_support
@@ -49,6 +49,7 @@ class DirCompareTestCase(unittest.TestCase):
self.caseinsensitive = os.path.normcase('A') == os.path.normcase('a')
data = 'Contents of file go here.\n'
for dir in [self.dir, self.dir_same, self.dir_diff]:
+ shutil.rmtree(dir, True)
os.mkdir(dir)
if self.caseinsensitive and dir is self.dir_same:
fn = 'FiLe' # Verify case-insensitive comparison
diff --git a/Lib/test/test_fork1.py b/Lib/test/test_fork1.py
index cba5fc7..e64e398 100644
--- a/Lib/test/test_fork1.py
+++ b/Lib/test/test_fork1.py
@@ -2,8 +2,9 @@
"""
import os
+import time
from test.fork_wait import ForkWait
-from test.test_support import TestSkipped, run_unittest
+from test.test_support import TestSkipped, run_unittest, reap_children
try:
os.fork
@@ -12,12 +13,20 @@ except AttributeError:
class ForkTest(ForkWait):
def wait_impl(self, cpid):
- spid, status = os.waitpid(cpid, 0)
+ for i in range(10):
+ # waitpid() shouldn't hang, but some of the buildbots seem to hang
+ # in the forking tests. This is an attempt to fix the problem.
+ spid, status = os.waitpid(cpid, os.WNOHANG)
+ if spid == cpid:
+ break
+ time.sleep(1.0)
+
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
def test_main():
run_unittest(ForkTest)
+ reap_children()
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index a184a8b..ee36413 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -1497,22 +1497,55 @@ And a more sane, but still weird usage:
<type 'generator'>
+A yield expression with augmented assignment.
+
+>>> def coroutine(seq):
+... count = 0
+... while count < 200:
+... count += yield
+... seq.append(count)
+>>> seq = []
+>>> c = coroutine(seq)
+>>> c.next()
+>>> print seq
+[]
+>>> c.send(10)
+>>> print seq
+[10]
+>>> c.send(10)
+>>> print seq
+[10, 20]
+>>> c.send(10)
+>>> print seq
+[10, 20, 30]
+
+
Check some syntax errors for yield expressions:
>>> f=lambda: (yield 1),(yield 2)
Traceback (most recent call last):
...
-SyntaxError: 'yield' outside function (<doctest test.test_generators.__test__.coroutine[10]>, line 1)
+SyntaxError: 'yield' outside function (<doctest test.test_generators.__test__.coroutine[21]>, line 1)
>>> def f(): return lambda x=(yield): 1
Traceback (most recent call last):
...
-SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.coroutine[11]>, line 1)
+SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.coroutine[22]>, line 1)
>>> def f(): x = yield = y
Traceback (most recent call last):
...
-SyntaxError: assignment to yield expression not possible (<doctest test.test_generators.__test__.coroutine[12]>, line 1)
+SyntaxError: assignment to yield expression not possible (<doctest test.test_generators.__test__.coroutine[23]>, line 1)
+
+>>> def f(): (yield bar) = y
+Traceback (most recent call last):
+ ...
+SyntaxError: can't assign to yield expression (<doctest test.test_generators.__test__.coroutine[24]>, line 1)
+
+>>> def f(): (yield bar) += y
+Traceback (most recent call last):
+ ...
+SyntaxError: augmented assignment to yield expression not possible (<doctest test.test_generators.__test__.coroutine[25]>, line 1)
Now check some throw() conditions:
diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py
index e414757..2598a79 100644
--- a/Lib/test/test_genexps.py
+++ b/Lib/test/test_genexps.py
@@ -109,7 +109,7 @@ for iterability
Traceback (most recent call last):
File "<pyshell#4>", line 1, in -toplevel-
(i for i in 6)
- TypeError: iteration over non-sequence
+ TypeError: 'int' object is not iterable
Verify late binding for the outermost if-expression
diff --git a/Lib/test/test_getargs2.py b/Lib/test/test_getargs2.py
index 8864e8e..c428f45 100644
--- a/Lib/test/test_getargs2.py
+++ b/Lib/test/test_getargs2.py
@@ -233,8 +233,25 @@ class LongLong_TestCase(unittest.TestCase):
self.failUnlessEqual(VERY_LARGE & ULLONG_MAX, getargs_K(VERY_LARGE))
+
+class Tuple_TestCase(unittest.TestCase):
+ def test_tuple(self):
+ from _testcapi import getargs_tuple
+
+ ret = getargs_tuple(1, (2, 3))
+ self.assertEquals(ret, (1,2,3))
+
+ # make sure invalid tuple arguments are handled correctly
+ class seq:
+ def __len__(self):
+ return 2
+ def __getitem__(self, n):
+ raise ValueError
+ self.assertRaises(TypeError, getargs_tuple, 1, seq())
+
+
def test_main():
- tests = [Signed_TestCase, Unsigned_TestCase]
+ tests = [Signed_TestCase, Unsigned_TestCase, Tuple_TestCase]
try:
from _testcapi import getargs_L, getargs_K
except ImportError:
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index 4bb4e45..f160867 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -531,6 +531,11 @@ n = 0
for x in Squares(10): n = n+x
if n != 285: raise TestFailed, 'for over growing sequence'
+result = []
+for x, in [(1,), (2,), (3,)]:
+ result.append(x)
+vereq(result, [1, 2, 3])
+
print 'try_stmt'
### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
### | 'try' ':' suite 'finally' ':' suite
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index d9fd93d..99140d2 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -1,6 +1,8 @@
import sys
+import types
import unittest
import inspect
+import datetime
from test.test_support import TESTFN, run_unittest
@@ -15,7 +17,7 @@ from test import inspect_fodder2 as mod2
# isdatadescriptor
modfile = mod.__file__
-if modfile.endswith('c') or modfile.endswith('o'):
+if modfile.endswith(('c', 'o')):
modfile = modfile[:-1]
import __builtin__
@@ -40,10 +42,12 @@ class IsTestBase(unittest.TestCase):
self.failIf(other(obj), 'not %s(%s)' % (other.__name__, exp))
class TestPredicates(IsTestBase):
- def test_eleven(self):
- # Doc/lib/libinspect.tex claims there are 11 such functions
+ def test_thirteen(self):
count = len(filter(lambda x:x.startswith('is'), dir(inspect)))
- self.assertEqual(count, 11, "There are %d (not 11) is* functions" % count)
+ # Doc/lib/libinspect.tex claims there are 13 such functions
+ expected = 13
+ err_msg = "There are %d (not %d) is* functions" % (count, expected)
+ self.assertEqual(count, expected, err_msg)
def test_excluding_predicates(self):
self.istest(inspect.isbuiltin, 'sys.exit')
@@ -58,6 +62,15 @@ class TestPredicates(IsTestBase):
self.istest(inspect.istraceback, 'tb')
self.istest(inspect.isdatadescriptor, '__builtin__.file.closed')
self.istest(inspect.isdatadescriptor, '__builtin__.file.softspace')
+ if hasattr(types, 'GetSetDescriptorType'):
+ self.istest(inspect.isgetsetdescriptor,
+ 'type(tb.tb_frame).f_locals')
+ else:
+ self.failIf(inspect.isgetsetdescriptor(type(tb.tb_frame).f_locals))
+ if hasattr(types, 'MemberDescriptorType'):
+ self.istest(inspect.ismemberdescriptor, 'datetime.timedelta.days')
+ else:
+ self.failIf(inspect.ismemberdescriptor(datetime.timedelta.days))
def test_isroutine(self):
self.assert_(inspect.isroutine(mod.spam))
@@ -180,6 +193,17 @@ class TestRetrievingSourceCode(GetSourceBase):
def test_getfile(self):
self.assertEqual(inspect.getfile(mod.StupidGit), mod.__file__)
+ def test_getmodule_recursion(self):
+ from new import module
+ name = '__inspect_dummy'
+ m = sys.modules[name] = module(name)
+ m.__file__ = "<string>" # hopefully not a real filename...
+ m.__loader__ = "dummy" # pretend the filename is understood by a loader
+ exec "def x(): pass" in m.__dict__
+ self.assertEqual(inspect.getsourcefile(m.x.func_code), '<string>')
+ del sys.modules[name]
+ inspect.getmodule(compile('a=10','','single'))
+
class TestDecorators(GetSourceBase):
fodderFile = mod2
diff --git a/Lib/test/test_iterlen.py b/Lib/test/test_iterlen.py
index bcd0a6f..af4467e 100644
--- a/Lib/test/test_iterlen.py
+++ b/Lib/test/test_iterlen.py
@@ -235,9 +235,7 @@ class TestSeqIterReversed(TestInvariantWithoutMutations):
self.assertEqual(len(it), 0)
-
-if __name__ == "__main__":
-
+def test_main():
unittests = [
TestRepeat,
TestXrange,
@@ -255,3 +253,6 @@ if __name__ == "__main__":
TestSeqIterReversed,
]
test_support.run_unittest(*unittests)
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index 73f8288..68c23c2 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -480,6 +480,8 @@ def test4():
f.close()
try:
logging.config.fileConfig(fn)
+ #call again to make sure cleanup is correct
+ logging.config.fileConfig(fn)
except:
t = sys.exc_info()[0]
message(str(t))
diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py
index 914a20c..45dd118 100644
--- a/Lib/test/test_mailbox.py
+++ b/Lib/test/test_mailbox.py
@@ -461,7 +461,7 @@ class TestMaildir(TestMailbox):
def setUp(self):
TestMailbox.setUp(self)
- if os.name == 'nt':
+ if os.name in ('nt', 'os2'):
self._box.colon = '!'
def test_add_MM(self):
@@ -520,7 +520,7 @@ class TestMaildir(TestMailbox):
# Initialize an existing mailbox
self.tearDown()
for subdir in '', 'tmp', 'new', 'cur':
- os.mkdir(os.path.join(self._path, subdir))
+ os.mkdir(os.path.normpath(os.path.join(self._path, subdir)))
self._box = mailbox.Maildir(self._path)
self._check_basics(factory=rfc822.Message)
self._box = mailbox.Maildir(self._path, factory=None)
@@ -720,6 +720,30 @@ class _TestMboxMMDF(TestMailbox):
self.assert_(contents == open(self._path, 'rb').read())
self._box = self._factory(self._path)
+ def test_lock_conflict(self):
+ # Fork off a subprocess that will lock the file for 2 seconds,
+ # unlock it, and then exit.
+ if not hasattr(os, 'fork'):
+ return
+ pid = os.fork()
+ if pid == 0:
+ # In the child, lock the mailbox.
+ self._box.lock()
+ time.sleep(2)
+ self._box.unlock()
+ os._exit(0)
+
+ # In the parent, sleep a bit to give the child time to acquire
+ # the lock.
+ time.sleep(0.5)
+ self.assertRaises(mailbox.ExternalClashError,
+ self._box.lock)
+
+ # Wait for child to exit. Locking should now succeed.
+ exited_pid, status = os.waitpid(pid, 0)
+ self._box.lock()
+ self._box.unlock()
+
class TestMbox(_TestMboxMMDF):
@@ -1761,6 +1785,7 @@ def test_main():
TestMessageConversion, TestProxyFile, TestPartialFile,
MaildirTestCase)
test_support.run_unittest(*tests)
+ test_support.reap_children()
if __name__ == '__main__':
diff --git a/Lib/test/test_mimetools.py b/Lib/test/test_mimetools.py
index 96bbb36..b0b5b01 100644
--- a/Lib/test/test_mimetools.py
+++ b/Lib/test/test_mimetools.py
@@ -1,7 +1,7 @@
import unittest
from test import test_support
-import string, StringIO, mimetools, sets
+import string, StringIO, mimetools
msgtext1 = mimetools.Message(StringIO.StringIO(
"""Content-Type: text/plain; charset=iso-8859-1; format=flowed
@@ -25,7 +25,7 @@ class MimeToolsTest(unittest.TestCase):
self.assertEqual(o.getvalue(), start)
def test_boundary(self):
- s = sets.Set([""])
+ s = set([""])
for i in xrange(100):
nb = mimetools.choose_boundary()
self.assert_(nb not in s)
diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py
index 8c584ad..0190c2f 100644
--- a/Lib/test/test_mimetypes.py
+++ b/Lib/test/test_mimetypes.py
@@ -1,7 +1,6 @@
import mimetypes
import StringIO
import unittest
-from sets import Set
from test import test_support
@@ -52,8 +51,8 @@ class MimeTypesTestCase(unittest.TestCase):
# First try strict. Use a set here for testing the results because if
# test_urllib2 is run before test_mimetypes, global state is modified
# such that the 'all' set will have more items in it.
- all = Set(self.db.guess_all_extensions('text/plain', strict=True))
- unless(all >= Set(['.bat', '.c', '.h', '.ksh', '.pl', '.txt']))
+ all = set(self.db.guess_all_extensions('text/plain', strict=True))
+ unless(all >= set(['.bat', '.c', '.h', '.ksh', '.pl', '.txt']))
# And now non-strict
all = self.db.guess_all_extensions('image/jpg', strict=False)
all.sort()
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
index b9377ae..a6d309f 100644
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -1,4 +1,4 @@
-# test for xmlcore.dom.minidom
+# test for xml.dom.minidom
import os
import sys
@@ -7,12 +7,12 @@ import traceback
from StringIO import StringIO
from test.test_support import verbose
-import xmlcore.dom
-import xmlcore.dom.minidom
-import xmlcore.parsers.expat
+import xml.dom
+import xml.dom.minidom
+import xml.parsers.expat
-from xmlcore.dom.minidom import parse, Node, Document, parseString
-from xmlcore.dom.minidom import getDOMImplementation
+from xml.dom.minidom import parse, Node, Document, parseString
+from xml.dom.minidom import getDOMImplementation
if __name__ == "__main__":
@@ -138,29 +138,29 @@ def testLegalChildren():
text = dom.createTextNode('text')
try: dom.appendChild(text)
- except xmlcore.dom.HierarchyRequestErr: pass
+ except xml.dom.HierarchyRequestErr: pass
else:
print "dom.appendChild didn't raise HierarchyRequestErr"
dom.appendChild(elem)
try: dom.insertBefore(text, elem)
- except xmlcore.dom.HierarchyRequestErr: pass
+ except xml.dom.HierarchyRequestErr: pass
else:
print "dom.appendChild didn't raise HierarchyRequestErr"
try: dom.replaceChild(text, elem)
- except xmlcore.dom.HierarchyRequestErr: pass
+ except xml.dom.HierarchyRequestErr: pass
else:
print "dom.appendChild didn't raise HierarchyRequestErr"
nodemap = elem.attributes
try: nodemap.setNamedItem(text)
- except xmlcore.dom.HierarchyRequestErr: pass
+ except xml.dom.HierarchyRequestErr: pass
else:
print "NamedNodeMap.setNamedItem didn't raise HierarchyRequestErr"
try: nodemap.setNamedItemNS(text)
- except xmlcore.dom.HierarchyRequestErr: pass
+ except xml.dom.HierarchyRequestErr: pass
else:
print "NamedNodeMap.setNamedItemNS didn't raise HierarchyRequestErr"
@@ -439,7 +439,7 @@ def testProcessingInstruction():
and pi.firstChild is None
and pi.lastChild is None
and pi.localName is None
- and pi.namespaceURI == xmlcore.dom.EMPTY_NAMESPACE)
+ and pi.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def testProcessingInstructionRepr(): pass
@@ -454,7 +454,7 @@ def testTooManyDocumentElements():
elem = doc.createElement("extra")
try:
doc.appendChild(elem)
- except xmlcore.dom.HierarchyRequestErr:
+ except xml.dom.HierarchyRequestErr:
pass
else:
print "Failed to catch expected exception when" \
@@ -491,7 +491,7 @@ def testRemoveNamedItem():
confirm(a1.isSameNode(a2))
try:
attrs.removeNamedItem("a")
- except xmlcore.dom.NotFoundErr:
+ except xml.dom.NotFoundErr:
pass
def testRemoveNamedItemNS():
@@ -503,7 +503,7 @@ def testRemoveNamedItemNS():
confirm(a1.isSameNode(a2))
try:
attrs.removeNamedItemNS("http://xml.python.org/", "b")
- except xmlcore.dom.NotFoundErr:
+ except xml.dom.NotFoundErr:
pass
def testAttrListValues(): pass
@@ -682,7 +682,7 @@ def check_import_document(deep, testName):
doc2 = parseString("<doc/>")
try:
doc1.importNode(doc2, deep)
- except xmlcore.dom.NotSupportedErr:
+ except xml.dom.NotSupportedErr:
pass
else:
raise Exception(testName +
@@ -705,14 +705,12 @@ def create_nonempty_doctype():
doctype = getDOMImplementation().createDocumentType("doc", None, None)
doctype.entities._seq = []
doctype.notations._seq = []
- notation = xmlcore.dom.minidom.Notation(
- "my-notation", None,
- "http://xml.python.org/notations/my")
+ notation = xml.dom.minidom.Notation("my-notation", None,
+ "http://xml.python.org/notations/my")
doctype.notations._seq.append(notation)
- entity = xmlcore.dom.minidom.Entity(
- "my-entity", None,
- "http://xml.python.org/entities/my",
- "my-notation")
+ entity = xml.dom.minidom.Entity("my-entity", None,
+ "http://xml.python.org/entities/my",
+ "my-notation")
entity.version = "1.0"
entity.encoding = "utf-8"
entity.actualEncoding = "us-ascii"
@@ -731,7 +729,7 @@ def testImportDocumentTypeShallow():
target = create_doc_without_doctype()
try:
imported = target.importNode(src.doctype, 0)
- except xmlcore.dom.NotSupportedErr:
+ except xml.dom.NotSupportedErr:
pass
else:
raise Exception(
@@ -742,7 +740,7 @@ def testImportDocumentTypeDeep():
target = create_doc_without_doctype()
try:
imported = target.importNode(src.doctype, 1)
- except xmlcore.dom.NotSupportedErr:
+ except xml.dom.NotSupportedErr:
pass
else:
raise Exception(
@@ -850,7 +848,7 @@ def testNodeListItem():
doc.unlink()
def testSAX2DOM():
- from xmlcore.dom import pulldom
+ from xml.dom import pulldom
sax2dom = pulldom.SAX2DOM()
sax2dom.startDocument()
@@ -940,11 +938,11 @@ def testRenameAttribute():
attr = elem.attributes['a']
# Simple renaming
- attr = doc.renameNode(attr, xmlcore.dom.EMPTY_NAMESPACE, "b")
+ attr = doc.renameNode(attr, xml.dom.EMPTY_NAMESPACE, "b")
confirm(attr.name == "b"
and attr.nodeName == "b"
and attr.localName is None
- and attr.namespaceURI == xmlcore.dom.EMPTY_NAMESPACE
+ and attr.namespaceURI == xml.dom.EMPTY_NAMESPACE
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
@@ -989,11 +987,11 @@ def testRenameAttribute():
and attrmap[("http://xml.python.org/ns2", "d")].isSameNode(attr))
# Rename back to a simple non-NS node
- attr = doc.renameNode(attr, xmlcore.dom.EMPTY_NAMESPACE, "e")
+ attr = doc.renameNode(attr, xml.dom.EMPTY_NAMESPACE, "e")
confirm(attr.name == "e"
and attr.nodeName == "e"
and attr.localName is None
- and attr.namespaceURI == xmlcore.dom.EMPTY_NAMESPACE
+ and attr.namespaceURI == xml.dom.EMPTY_NAMESPACE
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
@@ -1007,7 +1005,7 @@ def testRenameAttribute():
try:
doc.renameNode(attr, "http://xml.python.org/ns", "xmlns")
- except xmlcore.dom.NamespaceErr:
+ except xml.dom.NamespaceErr:
pass
else:
print "expected NamespaceErr"
@@ -1020,11 +1018,11 @@ def testRenameElement():
elem = doc.documentElement
# Simple renaming
- elem = doc.renameNode(elem, xmlcore.dom.EMPTY_NAMESPACE, "a")
+ elem = doc.renameNode(elem, xml.dom.EMPTY_NAMESPACE, "a")
confirm(elem.tagName == "a"
and elem.nodeName == "a"
and elem.localName is None
- and elem.namespaceURI == xmlcore.dom.EMPTY_NAMESPACE
+ and elem.namespaceURI == xml.dom.EMPTY_NAMESPACE
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
@@ -1047,11 +1045,11 @@ def testRenameElement():
and elem.ownerDocument.isSameNode(doc))
# Rename back to a simple non-NS node
- elem = doc.renameNode(elem, xmlcore.dom.EMPTY_NAMESPACE, "d")
+ elem = doc.renameNode(elem, xml.dom.EMPTY_NAMESPACE, "d")
confirm(elem.tagName == "d"
and elem.nodeName == "d"
and elem.localName is None
- and elem.namespaceURI == xmlcore.dom.EMPTY_NAMESPACE
+ and elem.namespaceURI == xml.dom.EMPTY_NAMESPACE
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
@@ -1062,15 +1060,15 @@ def checkRenameNodeSharedConstraints(doc, node):
# Make sure illegal NS usage is detected:
try:
doc.renameNode(node, "http://xml.python.org/ns", "xmlns:foo")
- except xmlcore.dom.NamespaceErr:
+ except xml.dom.NamespaceErr:
pass
else:
print "expected NamespaceErr"
doc2 = parseString("<doc/>")
try:
- doc2.renameNode(node, xmlcore.dom.EMPTY_NAMESPACE, "foo")
- except xmlcore.dom.WrongDocumentErr:
+ doc2.renameNode(node, xml.dom.EMPTY_NAMESPACE, "foo")
+ except xml.dom.WrongDocumentErr:
pass
else:
print "expected WrongDocumentErr"
@@ -1078,12 +1076,12 @@ def checkRenameNodeSharedConstraints(doc, node):
def testRenameOther():
# We have to create a comment node explicitly since not all DOM
# builders used with minidom add comments to the DOM.
- doc = xmlcore.dom.minidom.getDOMImplementation().createDocument(
- xmlcore.dom.EMPTY_NAMESPACE, "e", None)
+ doc = xml.dom.minidom.getDOMImplementation().createDocument(
+ xml.dom.EMPTY_NAMESPACE, "e", None)
node = doc.createComment("comment")
try:
- doc.renameNode(node, xmlcore.dom.EMPTY_NAMESPACE, "foo")
- except xmlcore.dom.NotSupportedErr:
+ doc.renameNode(node, xml.dom.EMPTY_NAMESPACE, "foo")
+ except xml.dom.NotSupportedErr:
pass
else:
print "expected NotSupportedErr when renaming comment node"
@@ -1194,13 +1192,13 @@ def testSchemaType():
# since each supports a different level of DTD information.
t = elem.schemaType
confirm(t.name is None
- and t.namespace == xmlcore.dom.EMPTY_NAMESPACE)
+ and t.namespace == xml.dom.EMPTY_NAMESPACE)
names = "id notid text enum ref refs ent ents nm nms".split()
for name in names:
a = elem.getAttributeNode(name)
t = a.schemaType
confirm(hasattr(t, "name")
- and t.namespace == xmlcore.dom.EMPTY_NAMESPACE)
+ and t.namespace == xml.dom.EMPTY_NAMESPACE)
def testSetIdAttribute():
doc = parseString("<doc a1='v' a2='w'/>")
@@ -1229,7 +1227,7 @@ def testSetIdAttribute():
and a2.isId
and not a3.isId)
# renaming an attribute should not affect its ID-ness:
- doc.renameNode(a2, xmlcore.dom.EMPTY_NAMESPACE, "an")
+ doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
@@ -1265,7 +1263,7 @@ def testSetIdAttributeNS():
confirm(not a3.isId)
confirm(doc.getElementById("v") is None)
# renaming an attribute should not affect its ID-ness:
- doc.renameNode(a2, xmlcore.dom.EMPTY_NAMESPACE, "an")
+ doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
@@ -1301,7 +1299,7 @@ def testSetIdAttributeNode():
confirm(not a3.isId)
confirm(doc.getElementById("v") is None)
# renaming an attribute should not affect its ID-ness:
- doc.renameNode(a2, xmlcore.dom.EMPTY_NAMESPACE, "an")
+ doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
diff --git a/Lib/test/test_multibytecodec.py b/Lib/test/test_multibytecodec.py
index 276b9af..397ebeb 100644
--- a/Lib/test/test_multibytecodec.py
+++ b/Lib/test/test_multibytecodec.py
@@ -6,17 +6,37 @@
from test import test_support
from test import test_multibytecodec_support
-import unittest, StringIO, codecs, sys
+from test.test_support import TESTFN
+import unittest, StringIO, codecs, sys, os
+
+ALL_CJKENCODINGS = [
+# _codecs_cn
+ 'gb2312', 'gbk', 'gb18030', 'hz',
+# _codecs_hk
+ 'big5hkscs',
+# _codecs_jp
+ 'cp932', 'shift_jis', 'euc_jp', 'euc_jisx0213', 'shift_jisx0213',
+ 'euc_jis_2004', 'shift_jis_2004',
+# _codecs_kr
+ 'cp949', 'euc_kr', 'johab',
+# _codecs_tw
+ 'big5', 'cp950',
+# _codecs_iso2022
+ 'iso2022_jp', 'iso2022_jp_1', 'iso2022_jp_2', 'iso2022_jp_2004',
+ 'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr',
+]
class Test_MultibyteCodec(unittest.TestCase):
def test_nullcoding(self):
- self.assertEqual(''.decode('gb18030'), u'')
- self.assertEqual(unicode('', 'gb18030'), u'')
- self.assertEqual(u''.encode('gb18030'), '')
+ for enc in ALL_CJKENCODINGS:
+ self.assertEqual(''.decode(enc), u'')
+ self.assertEqual(unicode('', enc), u'')
+ self.assertEqual(u''.encode(enc), '')
def test_str_decode(self):
- self.assertEqual('abcd'.encode('gb18030'), 'abcd')
+ for enc in ALL_CJKENCODINGS:
+ self.assertEqual('abcd'.encode(enc), 'abcd')
def test_errorcallback_longindex(self):
dec = codecs.getdecoder('euc-kr')
@@ -25,6 +45,14 @@ class Test_MultibyteCodec(unittest.TestCase):
self.assertRaises(IndexError, dec,
'apple\x92ham\x93spam', 'test.cjktest')
+ def test_codingspec(self):
+ try:
+ for enc in ALL_CJKENCODINGS:
+ print >> open(TESTFN, 'w'), '# coding:', enc
+ exec open(TESTFN)
+ finally:
+ os.unlink(TESTFN)
+
class Test_IncrementalEncoder(unittest.TestCase):
def test_stateless(self):
diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py
index 79df906..4582fa7 100644
--- a/Lib/test/test_optparse.py
+++ b/Lib/test/test_optparse.py
@@ -15,7 +15,7 @@ import copy
import types
import unittest
-from cStringIO import StringIO
+from StringIO import StringIO
from pprint import pprint
from test import test_support
@@ -164,15 +164,23 @@ and kwargs %(kwargs)r
expected_error=None):
"""Assert the parser prints the expected output on stdout."""
save_stdout = sys.stdout
+ encoding = getattr(save_stdout, 'encoding', None)
try:
try:
sys.stdout = StringIO()
+ if encoding:
+ sys.stdout.encoding = encoding
self.parser.parse_args(cmdline_args)
finally:
output = sys.stdout.getvalue()
sys.stdout = save_stdout
except InterceptedError, err:
+ self.assert_(
+ type(output) is types.StringType,
+ "expected output to be an ordinary string, not %r"
+ % type(output))
+
if output != expected_output:
self.fail("expected: \n'''\n" + expected_output +
"'''\nbut got \n'''\n" + output + "'''")
@@ -1452,10 +1460,26 @@ class TestHelp(BaseTest):
make_option("--foo", action="append", type="string", dest='foo',
help="store FOO in the foo list for later fooing"),
]
+
+ # We need to set COLUMNS for the OptionParser constructor, but
+ # we must restore its original value -- otherwise, this test
+ # screws things up for other tests when it's part of the Python
+ # test suite.
+ orig_columns = os.environ.get('COLUMNS')
os.environ['COLUMNS'] = str(columns)
- return InterceptingOptionParser(option_list=options)
+ try:
+ return InterceptingOptionParser(option_list=options)
+ finally:
+ if orig_columns is None:
+ del os.environ['COLUMNS']
+ else:
+ os.environ['COLUMNS'] = orig_columns
def assertHelpEquals(self, expected_output):
+ if type(expected_output) is types.UnicodeType:
+ encoding = self.parser._get_encoding(sys.stdout)
+ expected_output = expected_output.encode(encoding, "replace")
+
save_argv = sys.argv[:]
try:
# Make optparse believe bar.py is being executed.
@@ -1486,6 +1510,27 @@ class TestHelp(BaseTest):
self.parser = self.make_parser(60)
self.assertHelpEquals(_expected_help_short_lines)
+ def test_help_unicode(self):
+ self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
+ self.parser.add_option("-a", action="store_true", help=u"ol\u00E9!")
+ expect = u"""\
+Options:
+ -h, --help show this help message and exit
+ -a ol\u00E9!
+"""
+ self.assertHelpEquals(expect)
+
+ def test_help_unicode_description(self):
+ self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE,
+ description=u"ol\u00E9!")
+ expect = u"""\
+ol\u00E9!
+
+Options:
+ -h, --help show this help message and exit
+"""
+ self.assertHelpEquals(expect)
+
def test_help_description_groups(self):
self.parser.set_description(
"This is the program description for %prog. %prog has "
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index ffc9420..9497777 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -11,6 +11,19 @@ from test import test_support
warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, __name__)
warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning, __name__)
+# Tests creating TESTFN
+class FileTests(unittest.TestCase):
+ def setUp(self):
+ if os.path.exists(test_support.TESTFN):
+ os.unlink(test_support.TESTFN)
+ tearDown = setUp
+
+ def test_access(self):
+ f = os.open(test_support.TESTFN, os.O_CREAT|os.O_RDWR)
+ os.close(f)
+ self.assert_(os.access(test_support.TESTFN, os.W_OK))
+
+
class TemporaryFileTests(unittest.TestCase):
def setUp(self):
self.files = []
@@ -393,6 +406,7 @@ if sys.platform != 'win32':
def test_main():
test_support.run_unittest(
+ FileTests,
TemporaryFileTests,
StatAttributeTests,
EnvironTests,
diff --git a/Lib/test/test_ossaudiodev.py b/Lib/test/test_ossaudiodev.py
index 8810516..5868ea7 100644
--- a/Lib/test/test_ossaudiodev.py
+++ b/Lib/test/test_ossaudiodev.py
@@ -40,6 +40,10 @@ def read_sound_file(path):
data = audioop.ulaw2lin(data, 2)
return (data, rate, 16, nchannels)
+# version of assert that still works with -O
+def _assert(expr, message=None):
+ if not expr:
+ raise AssertionError(message or "assertion failed")
def play_sound_file(data, rate, ssize, nchannels):
try:
@@ -57,9 +61,9 @@ def play_sound_file(data, rate, ssize, nchannels):
dsp.fileno()
# Make sure the read-only attributes work.
- assert dsp.closed is False, "dsp.closed is not False"
- assert dsp.name == "/dev/dsp"
- assert dsp.mode == 'w', "bad dsp.mode: %r" % dsp.mode
+ _assert(dsp.closed is False, "dsp.closed is not False")
+ _assert(dsp.name == "/dev/dsp")
+ _assert(dsp.mode == 'w', "bad dsp.mode: %r" % dsp.mode)
# And make sure they're really read-only.
for attr in ('closed', 'name', 'mode'):
@@ -69,14 +73,23 @@ def play_sound_file(data, rate, ssize, nchannels):
except TypeError:
pass
+ # Compute expected running time of sound sample (in seconds).
+ expected_time = float(len(data)) / (ssize/8) / nchannels / rate
+
# set parameters based on .au file headers
dsp.setparameters(AFMT_S16_NE, nchannels, rate)
+ print ("playing test sound file (expected running time: %.2f sec)"
+ % expected_time)
t1 = time.time()
- print "playing test sound file..."
dsp.write(data)
dsp.close()
t2 = time.time()
- print "elapsed time: %.1f sec" % (t2-t1)
+ elapsed_time = t2 - t1
+
+ percent_diff = (abs(elapsed_time - expected_time) / expected_time) * 100
+ _assert(percent_diff <= 10.0, \
+ ("elapsed time (%.2f sec) > 10%% off of expected time (%.2f sec)"
+ % (elapsed_time, expected_time)))
def test_setparameters(dsp):
# Two configurations for testing:
@@ -101,11 +114,11 @@ def test_setparameters(dsp):
# setparameters() should be able to set this configuration in
# either strict or non-strict mode.
result = dsp.setparameters(fmt, channels, rate, False)
- assert result == (fmt, channels, rate), \
- "setparameters%r: returned %r" % (config + result)
+ _assert(result == (fmt, channels, rate),
+ "setparameters%r: returned %r" % (config, result))
result = dsp.setparameters(fmt, channels, rate, True)
- assert result == (fmt, channels, rate), \
- "setparameters%r: returned %r" % (config + result)
+ _assert(result == (fmt, channels, rate),
+ "setparameters%r: returned %r" % (config, result))
def test_bad_setparameters(dsp):
@@ -123,8 +136,8 @@ def test_bad_setparameters(dsp):
]:
(fmt, channels, rate) = config
result = dsp.setparameters(fmt, channels, rate, False)
- assert result != config, \
- "setparameters: unexpectedly got requested configuration"
+ _assert(result != config,
+ "setparameters: unexpectedly got requested configuration")
try:
result = dsp.setparameters(fmt, channels, rate, True)
@@ -145,6 +158,6 @@ def test():
#test_bad_setparameters(dsp)
finally:
dsp.close()
- assert dsp.closed is True, "dsp.closed is not True"
+ _assert(dsp.closed is True, "dsp.closed is not True")
test()
diff --git a/Lib/test/test_pep292.py b/Lib/test/test_pep292.py
index 2a4353a..d1100ea 100644
--- a/Lib/test/test_pep292.py
+++ b/Lib/test/test_pep292.py
@@ -58,6 +58,13 @@ class TestTemplate(unittest.TestCase):
s = Template('tim has eaten ${count} bags of ham today')
eq(s.substitute(d), 'tim has eaten 7 bags of ham today')
+ def test_tupleargs(self):
+ eq = self.assertEqual
+ s = Template('$who ate ${meal}')
+ d = dict(who=('tim', 'fred'), meal=('ham', 'kung pao'))
+ eq(s.substitute(d), "('tim', 'fred') ate ('ham', 'kung pao')")
+ eq(s.safe_substitute(d), "('tim', 'fred') ate ('ham', 'kung pao')")
+
def test_SafeTemplate(self):
eq = self.assertEqual
s = Template('$who likes ${what} for ${meal}')
diff --git a/Lib/test/test_popen.py b/Lib/test/test_popen.py
index 2b687ad..fbf5e05 100644
--- a/Lib/test/test_popen.py
+++ b/Lib/test/test_popen.py
@@ -6,7 +6,7 @@
import os
import sys
-from test.test_support import TestSkipped
+from test.test_support import TestSkipped, reap_children
from os import popen
# Test that command-lines get down as we expect.
@@ -35,5 +35,6 @@ def _test_commandline():
def main():
print "Test popen:"
_test_commandline()
+ reap_children()
main()
diff --git a/Lib/test/test_popen2.py b/Lib/test/test_popen2.py
index 4db3cd1..2d54eb0 100644
--- a/Lib/test/test_popen2.py
+++ b/Lib/test/test_popen2.py
@@ -5,7 +5,7 @@
import os
import sys
-from test.test_support import TestSkipped
+from test.test_support import TestSkipped, reap_children
# popen2 contains its own testing routine
# which is especially useful to see if open files
@@ -75,3 +75,4 @@ def _test():
main()
_test()
+reap_children()
diff --git a/Lib/test/test_pyexpat.py b/Lib/test/test_pyexpat.py
index a9a5e8f..0698818 100644
--- a/Lib/test/test_pyexpat.py
+++ b/Lib/test/test_pyexpat.py
@@ -365,3 +365,24 @@ parser.Parse('''<a>
<c/>
</b>
</a>''', 1)
+
+
+def test_parse_only_xml_data():
+ # http://python.org/sf/1296433
+ #
+ xml = "<?xml version='1.0' encoding='iso8859'?><s>%s</s>" % ('a' * 1025)
+ # this one doesn't crash
+ #xml = "<?xml version='1.0'?><s>%s</s>" % ('a' * 10000)
+
+ def handler(text):
+ raise Exception
+
+ parser = expat.ParserCreate()
+ parser.CharacterDataHandler = handler
+
+ try:
+ parser.Parse(xml)
+ except:
+ pass
+
+test_parse_only_xml_data()
diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py
index ded81fb..af4c7dd 100644
--- a/Lib/test/test_sax.py
+++ b/Lib/test/test_sax.py
@@ -1,17 +1,17 @@
# regression test for SAX 2.0 -*- coding: iso-8859-1 -*-
# $Id$
-from xmlcore.sax import make_parser, ContentHandler, \
- SAXException, SAXReaderNotAvailable, SAXParseException
+from xml.sax import make_parser, ContentHandler, \
+ SAXException, SAXReaderNotAvailable, SAXParseException
try:
make_parser()
except SAXReaderNotAvailable:
# don't try to test this module if we cannot create a parser
raise ImportError("no XML parsers available")
-from xmlcore.sax.saxutils import XMLGenerator, escape, unescape, quoteattr, \
- XMLFilterBase
-from xmlcore.sax.expatreader import create_parser
-from xmlcore.sax.xmlreader import InputSource, AttributesImpl, AttributesNSImpl
+from xml.sax.saxutils import XMLGenerator, escape, unescape, quoteattr, \
+ XMLFilterBase
+from xml.sax.expatreader import create_parser
+from xml.sax.xmlreader import InputSource, AttributesImpl, AttributesNSImpl
from cStringIO import StringIO
from test.test_support import verify, verbose, TestFailed, findfile
import os
@@ -36,17 +36,17 @@ def test_make_parser2():
# Creating parsers several times in a row should succeed.
# Testing this because there have been failures of this kind
# before.
- from xmlcore.sax import make_parser
+ from xml.sax import make_parser
p = make_parser()
- from xmlcore.sax import make_parser
+ from xml.sax import make_parser
p = make_parser()
- from xmlcore.sax import make_parser
+ from xml.sax import make_parser
p = make_parser()
- from xmlcore.sax import make_parser
+ from xml.sax import make_parser
p = make_parser()
- from xmlcore.sax import make_parser
+ from xml.sax import make_parser
p = make_parser()
- from xmlcore.sax import make_parser
+ from xml.sax import make_parser
p = make_parser()
except:
return 0
@@ -108,7 +108,7 @@ def test_make_parser():
try:
# Creating a parser should succeed - it should fall back
# to the expatreader
- p = make_parser(['xmlcore.parsers.no_such_parser'])
+ p = make_parser(['xml.parsers.no_such_parser'])
except:
return 0
else:
@@ -175,11 +175,14 @@ def test_xmlgen_attr_escape():
gen.endElement("e")
gen.startElement("e", {"a": "'\""})
gen.endElement("e")
+ gen.startElement("e", {"a": "\n\r\t"})
+ gen.endElement("e")
gen.endElement("doc")
gen.endDocument()
- return result.getvalue() == start \
- + "<doc a='\"'><e a=\"'\"></e><e a=\"'&quot;\"></e></doc>"
+ return result.getvalue() == start + ("<doc a='\"'><e a=\"'\"></e>"
+ "<e a=\"'&quot;\"></e>"
+ "<e a=\"&#10;&#13;&#9;\"></e></doc>")
def test_xmlgen_ignorable():
result = StringIO()
@@ -668,6 +671,55 @@ def test_nsattrs_wattr():
attrs.getQNameByName((ns_uri, "attr")) == "ns:attr"
+# During the development of Python 2.5, an attempt to move the "xml"
+# package implementation to a new package ("xmlcore") proved painful.
+# The goal of this change was to allow applications to be able to
+# obtain and rely on behavior in the standard library implementation
+# of the XML support without needing to be concerned about the
+# availability of the PyXML implementation.
+#
+# While the existing import hackery in Lib/xml/__init__.py can cause
+# PyXML's _xmlpus package to supplant the "xml" package, that only
+# works because either implementation uses the "xml" package name for
+# imports.
+#
+# The move resulted in a number of problems related to the fact that
+# the import machinery's "package context" is based on the name that's
+# being imported rather than the __name__ of the actual package
+# containment; it wasn't possible for the "xml" package to be replaced
+# by a simple module that indirected imports to the "xmlcore" package.
+#
+# The following two tests exercised bugs that were introduced in that
+# attempt. Keeping these tests around will help detect problems with
+# other attempts to provide reliable access to the standard library's
+# implementation of the XML support.
+
+def test_sf_1511497():
+ # Bug report: http://www.python.org/sf/1511497
+ import sys
+ old_modules = sys.modules.copy()
+ for modname in sys.modules.keys():
+ if modname.startswith("xml."):
+ del sys.modules[modname]
+ try:
+ import xml.sax.expatreader
+ module = xml.sax.expatreader
+ return module.__name__ == "xml.sax.expatreader"
+ finally:
+ sys.modules.update(old_modules)
+
+def test_sf_1513611():
+ # Bug report: http://www.python.org/sf/1513611
+ sio = StringIO("invalid")
+ parser = make_parser()
+ from xml.sax import SAXParseException
+ try:
+ parser.parse(sio)
+ except SAXParseException:
+ return True
+ else:
+ return False
+
# ===== Main program
def make_test_output():
diff --git a/Lib/test/test_scope.py b/Lib/test/test_scope.py
index f37254c..239745c 100644
--- a/Lib/test/test_scope.py
+++ b/Lib/test/test_scope.py
@@ -299,6 +299,17 @@ except NameError:
else:
raise TestFailed
+# test for bug #1501934: incorrect LOAD/STORE_GLOBAL generation
+global_x = 1
+def f():
+ global_x += 1
+try:
+ f()
+except UnboundLocalError:
+ pass
+else:
+ raise TestFailed, 'scope of global_x not correctly determined'
+
print "14. complex definitions"
def makeReturner(*lst):
diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py
index eaec52b..d341324 100644
--- a/Lib/test/test_select.py
+++ b/Lib/test/test_select.py
@@ -1,5 +1,5 @@
# Testing select module
-from test.test_support import verbose
+from test.test_support import verbose, reap_children
import select
import os
@@ -65,5 +65,6 @@ def test():
continue
print 'Unexpected return values from select():', rfd, wfd, xfd
p.close()
+ reap_children()
test()
diff --git a/Lib/test/test_sgmllib.py b/Lib/test/test_sgmllib.py
index 8e8b02f..28a21a4 100644
--- a/Lib/test/test_sgmllib.py
+++ b/Lib/test/test_sgmllib.py
@@ -1,4 +1,6 @@
+import htmlentitydefs
import pprint
+import re
import sgmllib
import unittest
from test import test_support
@@ -64,6 +66,37 @@ class CDATAEventCollector(EventCollector):
self.setliteral()
+class HTMLEntityCollector(EventCollector):
+
+ entity_or_charref = re.compile('(?:&([a-zA-Z][-.a-zA-Z0-9]*)'
+ '|&#(x[0-9a-zA-Z]+|[0-9]+))(;?)')
+
+ def convert_charref(self, name):
+ self.append(("charref", "convert", name))
+ if name[0] != "x":
+ return EventCollector.convert_charref(self, name)
+
+ def convert_codepoint(self, codepoint):
+ self.append(("codepoint", "convert", codepoint))
+ EventCollector.convert_codepoint(self, codepoint)
+
+ def convert_entityref(self, name):
+ self.append(("entityref", "convert", name))
+ return EventCollector.convert_entityref(self, name)
+
+ # These to record that they were called, then pass the call along
+ # to the default implementation so that it's actions can be
+ # recorded.
+
+ def handle_charref(self, data):
+ self.append(("charref", data))
+ sgmllib.SGMLParser.handle_charref(self, data)
+
+ def handle_entityref(self, data):
+ self.append(("entityref", data))
+ sgmllib.SGMLParser.handle_entityref(self, data)
+
+
class SGMLParserTestCase(unittest.TestCase):
collector = EventCollector
@@ -218,7 +251,9 @@ DOCTYPE html PUBLIC '-//W3C//DTD HTML 4.01//EN'
"""Substitution of entities and charrefs in attribute values"""
# SF bug #1452246
self.check_events("""<a b=&lt; c=&lt;&gt; d=&lt-&gt; e='&lt; '
- f="&xxx;" g='&#32;&#33;' h='&#500;' i='x?a=b&c=d;'>""",
+ f="&xxx;" g='&#32;&#33;' h='&#500;'
+ i='x?a=b&c=d;'
+ j='&amp;#42;' k='&#38;#42;'>""",
[("starttag", "a", [("b", "<"),
("c", "<>"),
("d", "&lt->"),
@@ -226,13 +261,59 @@ DOCTYPE html PUBLIC '-//W3C//DTD HTML 4.01//EN'
("f", "&xxx;"),
("g", " !"),
("h", "&#500;"),
- ("i", "x?a=b&c=d;"), ])])
+ ("i", "x?a=b&c=d;"),
+ ("j", "&#42;"),
+ ("k", "&#42;"),
+ ])])
+
+ def test_convert_overrides(self):
+ # This checks that the character and entity reference
+ # conversion helpers are called at the documented times. No
+ # attempt is made to really change what the parser accepts.
+ #
+ self.collector = HTMLEntityCollector
+ self.check_events(('<a title="&ldquo;test&#x201d;">foo</a>'
+ '&foobar;&#42;'), [
+ ('entityref', 'convert', 'ldquo'),
+ ('charref', 'convert', 'x201d'),
+ ('starttag', 'a', [('title', '&ldquo;test&#x201d;')]),
+ ('data', 'foo'),
+ ('endtag', 'a'),
+ ('entityref', 'foobar'),
+ ('entityref', 'convert', 'foobar'),
+ ('charref', '42'),
+ ('charref', 'convert', '42'),
+ ('codepoint', 'convert', 42),
+ ])
+
+ def test_attr_values_quoted_markup(self):
+ """Multi-line and markup in attribute values"""
+ self.check_events("""<a title='foo\n<br>bar'>text</a>""",
+ [("starttag", "a", [("title", "foo\n<br>bar")]),
+ ("data", "text"),
+ ("endtag", "a")])
+ self.check_events("""<a title='less < than'>text</a>""",
+ [("starttag", "a", [("title", "less < than")]),
+ ("data", "text"),
+ ("endtag", "a")])
+ self.check_events("""<a title='greater > than'>text</a>""",
+ [("starttag", "a", [("title", "greater > than")]),
+ ("data", "text"),
+ ("endtag", "a")])
def test_attr_funky_names(self):
self.check_events("""<a a.b='v' c:d=v e-f=v>""", [
("starttag", "a", [("a.b", "v"), ("c:d", "v"), ("e-f", "v")]),
])
+ def test_attr_value_ip6_url(self):
+ # http://www.python.org/sf/853506
+ self.check_events(("<a href='http://[1080::8:800:200C:417A]/'>"
+ "<a href=http://[1080::8:800:200C:417A]/>"), [
+ ("starttag", "a", [("href", "http://[1080::8:800:200C:417A]/")]),
+ ("starttag", "a", [("href", "http://[1080::8:800:200C:417A]/")]),
+ ])
+
def test_illegal_declarations(self):
s = 'abc<!spacer type="block" height="25">def'
self.check_events(s, [
@@ -301,8 +382,8 @@ DOCTYPE html PUBLIC '-//W3C//DTD HTML 4.01//EN'
# that needs to be carefully considered before changing it.
def _test_starttag_end_boundary(self):
- self.check_events("""<a b='<'>""", [("starttag", "a", [("b", "<")])])
- self.check_events("""<a b='>'>""", [("starttag", "a", [("b", ">")])])
+ self.check_events("<a b='<'>", [("starttag", "a", [("b", "<")])])
+ self.check_events("<a b='>'>", [("starttag", "a", [("b", ">")])])
def _test_buffer_artefacts(self):
output = [("starttag", "a", [("b", "<")])]
@@ -322,17 +403,17 @@ DOCTYPE html PUBLIC '-//W3C//DTD HTML 4.01//EN'
self.check_events(["<a b='>'", ">"], output)
output = [("comment", "abc")]
- self._run_check(["", "<!--abc-->"], output)
- self._run_check(["<", "!--abc-->"], output)
- self._run_check(["<!", "--abc-->"], output)
- self._run_check(["<!-", "-abc-->"], output)
- self._run_check(["<!--", "abc-->"], output)
- self._run_check(["<!--a", "bc-->"], output)
- self._run_check(["<!--ab", "c-->"], output)
- self._run_check(["<!--abc", "-->"], output)
- self._run_check(["<!--abc-", "->"], output)
- self._run_check(["<!--abc--", ">"], output)
- self._run_check(["<!--abc-->", ""], output)
+ self.check_events(["", "<!--abc-->"], output)
+ self.check_events(["<", "!--abc-->"], output)
+ self.check_events(["<!", "--abc-->"], output)
+ self.check_events(["<!-", "-abc-->"], output)
+ self.check_events(["<!--", "abc-->"], output)
+ self.check_events(["<!--a", "bc-->"], output)
+ self.check_events(["<!--ab", "c-->"], output)
+ self.check_events(["<!--abc", "-->"], output)
+ self.check_events(["<!--abc-", "->"], output)
+ self.check_events(["<!--abc--", ">"], output)
+ self.check_events(["<!--abc-->", ""], output)
def _test_starttag_junk_chars(self):
self.check_parse_error("<")
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index 6ab5a35..da71fa8 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -74,6 +74,53 @@ class TestShutil(unittest.TestCase):
except:
pass
+ def test_copytree_simple(self):
+ def write_data(path, data):
+ f = open(path, "w")
+ f.write(data)
+ f.close()
+
+ def read_data(path):
+ f = open(path)
+ data = f.read()
+ f.close()
+ return data
+
+ src_dir = tempfile.mkdtemp()
+ dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
+
+ write_data(os.path.join(src_dir, 'test.txt'), '123')
+
+ os.mkdir(os.path.join(src_dir, 'test_dir'))
+ write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+ try:
+ shutil.copytree(src_dir, dst_dir)
+ self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
+ self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
+ self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
+ 'test.txt')))
+ actual = read_data(os.path.join(dst_dir, 'test.txt'))
+ self.assertEqual(actual, '123')
+ actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
+ self.assertEqual(actual, '456')
+ finally:
+ for path in (
+ os.path.join(src_dir, 'test.txt'),
+ os.path.join(dst_dir, 'test.txt'),
+ os.path.join(src_dir, 'test_dir', 'test.txt'),
+ os.path.join(dst_dir, 'test_dir', 'test.txt'),
+ ):
+ if os.path.exists(path):
+ os.remove(path)
+ for path in (
+ os.path.join(src_dir, 'test_dir'),
+ os.path.join(dst_dir, 'test_dir'),
+ ):
+ if os.path.exists(path):
+ os.removedirs(path)
+
+
if hasattr(os, "symlink"):
def test_dont_copy_file_onto_link_to_itself(self):
# bug 851123.
diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py
index f7fcb04..a6267d2 100644
--- a/Lib/test/test_signal.py
+++ b/Lib/test/test_signal.py
@@ -25,7 +25,11 @@ script = """
) &
""" % vars()
+a_called = b_called = False
+
def handlerA(*args):
+ global a_called
+ a_called = True
if verbose:
print "handlerA", args
@@ -33,11 +37,14 @@ class HandlerBCalled(Exception):
pass
def handlerB(*args):
+ global b_called
+ b_called = True
if verbose:
print "handlerB", args
raise HandlerBCalled, args
-signal.alarm(20) # Entire test lasts at most 20 sec.
+MAX_DURATION = 20
+signal.alarm(MAX_DURATION) # Entire test should last at most 20 sec.
hup = signal.signal(signal.SIGHUP, handlerA)
usr1 = signal.signal(signal.SIGUSR1, handlerB)
usr2 = signal.signal(signal.SIGUSR2, signal.SIG_IGN)
@@ -65,9 +72,35 @@ try:
except TypeError:
pass
+# Set up a child to send an alarm signal to us (the parent) after waiting
+# long enough to receive the alarm. It seems we miss the alarm for some
+# reason. This will hopefully stop the hangs on Tru64/Alpha.
+def force_test_exit():
+ # Sigh, both imports seem necessary to avoid errors.
+ import os
+ fork_pid = os.fork()
+ if fork_pid == 0:
+ # In child
+ import os, time
+ try:
+ # Wait 5 seconds longer than the expected alarm to give enough
+ # time for the normal sequence of events to occur. This is
+ # just a stop-gap to prevent the test from hanging.
+ time.sleep(MAX_DURATION + 5)
+ print >> sys.__stdout__, ' child should not have to kill parent'
+ for i in range(3):
+ os.kill(pid, signal.SIGALARM)
+ finally:
+ os._exit(0)
+ # In parent (or error)
+ return fork_pid
+
try:
os.system(script)
+ # Try to ensure this test exits even if there is some problem with alarm.
+ # Tru64/Alpha sometimes hangs and is ultimately killed by the buildbot.
+ fork_pid = force_test_exit()
print "starting pause() loop..."
try:
@@ -88,6 +121,22 @@ try:
if verbose:
print "KeyboardInterrupt (assume the alarm() went off)"
+ # Forcibly kill the child we created to ping us if there was a test error.
+ try:
+ # Make sure we don't kill ourself if there was a fork error.
+ if fork_pid > 0:
+ os.kill(fork_pid, signal.SIGKILL)
+ except:
+ # If the child killed us, it has probably exited. Killing a
+ # non-existant process will raise an error which we don't care about.
+ pass
+
+ if not a_called:
+ print 'HandlerA not called'
+
+ if not b_called:
+ print 'HandlerB not called'
+
finally:
signal.signal(signal.SIGHUP, hup)
signal.signal(signal.SIGUSR1, usr1)
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index 01b9b5b..356b801 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -11,6 +11,7 @@ import Queue
import sys
import array
from weakref import proxy
+import signal
PORT = 50007
HOST = 'localhost'
@@ -21,7 +22,8 @@ class SocketTCPTest(unittest.TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- self.serv.bind((HOST, PORT))
+ global PORT
+ PORT = test_support.bind_port(self.serv, HOST, PORT)
self.serv.listen(1)
def tearDown(self):
@@ -33,7 +35,8 @@ class SocketUDPTest(unittest.TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- self.serv.bind((HOST, PORT))
+ global PORT
+ PORT = test_support.bind_port(self.serv, HOST, PORT)
def tearDown(self):
self.serv.close()
@@ -447,7 +450,12 @@ class GeneralModuleTests(unittest.TestCase):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("0.0.0.0", PORT+1))
name = sock.getsockname()
- self.assertEqual(name, ("0.0.0.0", PORT+1))
+ # XXX(nnorwitz): http://tinyurl.com/os5jz seems to indicate
+ # it reasonable to get the host's addr in addition to 0.0.0.0.
+ # At least for eCos. This is required for the S/390 to pass.
+ my_ip_addr = socket.gethostbyname(socket.gethostname())
+ self.assert_(name[0] in ("0.0.0.0", my_ip_addr), '%s invalid' % name[0])
+ self.assertEqual(name[1], PORT+1)
def testGetSockOpt(self):
# Testing getsockopt()
@@ -575,6 +583,21 @@ class BasicUDPTest(ThreadedUDPSocketTest):
def _testRecvFrom(self):
self.cli.sendto(MSG, 0, (HOST, PORT))
+class TCPCloserTest(ThreadedTCPSocketTest):
+
+ def testClose(self):
+ conn, addr = self.serv.accept()
+ conn.close()
+
+ sd = self.cli
+ read, write, err = select.select([sd], [], [], 1.0)
+ self.assertEqual(read, [sd])
+ self.assertEqual(sd.recv(1), '')
+
+ def _testClose(self):
+ self.cli.connect((HOST, PORT))
+ time.sleep(1.0)
+
class BasicSocketPairTest(SocketPairTest):
def __init__(self, methodName='runTest'):
@@ -795,6 +818,37 @@ class TCPTimeoutTest(SocketTCPTest):
if not ok:
self.fail("accept() returned success when we did not expect it")
+ def testInterruptedTimeout(self):
+ # XXX I don't know how to do this test on MSWindows or any other
+ # plaform that doesn't support signal.alarm() or os.kill(), though
+ # the bug should have existed on all platforms.
+ if not hasattr(signal, "alarm"):
+ return # can only test on *nix
+ self.serv.settimeout(5.0) # must be longer than alarm
+ class Alarm(Exception):
+ pass
+ def alarm_handler(signal, frame):
+ raise Alarm
+ old_alarm = signal.signal(signal.SIGALRM, alarm_handler)
+ try:
+ signal.alarm(2) # POSIX allows alarm to be up to 1 second early
+ try:
+ foo = self.serv.accept()
+ except socket.timeout:
+ self.fail("caught timeout instead of Alarm")
+ except Alarm:
+ pass
+ except:
+ self.fail("caught other exception instead of Alarm")
+ else:
+ self.fail("nothing caught")
+ signal.alarm(0) # shut off alarm
+ except Alarm:
+ self.fail("got Alarm in wrong place")
+ finally:
+ # no alarm can be pending. Safe to restore old handler.
+ signal.signal(signal.SIGALRM, old_alarm)
+
class UDPTimeoutTest(SocketTCPTest):
def testUDPTimeout(self):
@@ -883,8 +937,8 @@ class BufferIOTest(SocketConnectedTest):
self.serv_conn.send(buf)
def test_main():
- tests = [GeneralModuleTests, BasicTCPTest, TCPTimeoutTest, TestExceptions,
- BufferIOTest]
+ tests = [GeneralModuleTests, BasicTCPTest, TCPCloserTest, TCPTimeoutTest,
+ TestExceptions, BufferIOTest]
if sys.platform != 'mac':
tests.extend([ BasicUDPTest, UDPTimeoutTest ])
@@ -899,7 +953,10 @@ def test_main():
tests.append(BasicSocketPairTest)
if sys.platform == 'linux2':
tests.append(TestLinuxAbstractNamespace)
+
+ thread_info = test_support.threading_setup()
test_support.run_unittest(*tests)
+ test_support.threading_cleanup(*thread_info)
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_socket_ssl.py b/Lib/test/test_socket_ssl.py
index 1091383..3c9c9f0 100644
--- a/Lib/test/test_socket_ssl.py
+++ b/Lib/test/test_socket_ssl.py
@@ -3,6 +3,7 @@
import sys
from test import test_support
import socket
+import errno
# Optionally test SSL support. This requires the 'network' resource as given
# on the regrtest command line.
@@ -33,6 +34,13 @@ def test_basic():
def test_timeout():
test_support.requires('network')
+ def error_msg(extra_msg):
+ print >> sys.stderr, """\
+ WARNING: an attempt to connect to %r %s, in
+ test_timeout. That may be legitimate, but is not the outcome we hoped
+ for. If this message is seen often, test_timeout should be changed to
+ use a more reliable address.""" % (ADDR, extra_msg)
+
if test_support.verbose:
print "test_timeout ..."
@@ -48,12 +56,14 @@ def test_timeout():
try:
s.connect(ADDR)
except socket.timeout:
- print >> sys.stderr, """\
- WARNING: an attempt to connect to %r timed out, in
- test_timeout. That may be legitimate, but is not the outcome we hoped
- for. If this message is seen often, test_timeout should be changed to
- use a more reliable address.""" % (ADDR,)
+ error_msg('timed out')
return
+ except socket.error, exc: # In case connection is refused.
+ if exc.args[0] == errno.ECONNREFUSED:
+ error_msg('was refused')
+ return
+ else:
+ raise
ss = socket.ssl(s)
# Read part of return welcome banner twice.
@@ -71,7 +81,7 @@ def test_rude_shutdown():
return
# Some random port to connect to.
- PORT = 9934
+ PORT = [9934]
listener_ready = threading.Event()
listener_gone = threading.Event()
@@ -82,7 +92,7 @@ def test_rude_shutdown():
# know the socket is gone.
def listener():
s = socket.socket()
- s.bind(('', PORT))
+ PORT[0] = test_support.bind_port(s, '', PORT[0])
s.listen(5)
listener_ready.set()
s.accept()
@@ -92,7 +102,7 @@ def test_rude_shutdown():
def connector():
listener_ready.wait()
s = socket.socket()
- s.connect(('localhost', PORT))
+ s.connect(('localhost', PORT[0]))
listener_gone.wait()
try:
ssl_sock = socket.ssl(s)
diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py
index 1245ba5..dd4532f 100644
--- a/Lib/test/test_socketserver.py
+++ b/Lib/test/test_socketserver.py
@@ -1,11 +1,13 @@
# Test suite for SocketServer.py
from test import test_support
-from test.test_support import verbose, verify, TESTFN, TestSkipped
+from test.test_support import (verbose, verify, TESTFN, TestSkipped,
+ reap_children)
test_support.requires('network')
from SocketServer import *
import socket
+import errno
import select
import time
import threading
@@ -77,6 +79,11 @@ class ServerThread(threading.Thread):
pass
if verbose: print "thread: creating server"
svr = svrcls(self.__addr, self.__hdlrcls)
+ # pull the address out of the server in case it changed
+ # this can happen if another process is using the port
+ addr = getattr(svr, 'server_address')
+ if addr:
+ self.__addr = addr
if verbose: print "thread: serving three times"
svr.serve_a_few()
if verbose: print "thread: done"
@@ -136,7 +143,25 @@ def testloop(proto, servers, hdlrcls, testfunc):
t.join()
if verbose: print "done"
-tcpservers = [TCPServer, ThreadingTCPServer]
+class ForgivingTCPServer(TCPServer):
+ # prevent errors if another process is using the port we want
+ def server_bind(self):
+ host, default_port = self.server_address
+ # this code shamelessly stolen from test.test_support
+ # the ports were changed to protect the innocent
+ import sys
+ for port in [default_port, 3434, 8798, 23833]:
+ try:
+ self.server_address = host, port
+ TCPServer.server_bind(self)
+ break
+ except socket.error, (err, msg):
+ if err != errno.EADDRINUSE:
+ raise
+ print >>sys.__stderr__, \
+ ' WARNING: failed to listen on port %d, trying another' % port
+
+tcpservers = [ForgivingTCPServer, ThreadingTCPServer]
if hasattr(os, 'fork') and os.name not in ('os2',):
tcpservers.append(ForkingTCPServer)
udpservers = [UDPServer, ThreadingUDPServer]
@@ -175,6 +200,7 @@ def test_main():
testall()
finally:
cleanup()
+ reap_children()
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py
index aa458e6..66fd667 100644
--- a/Lib/test/test_struct.py
+++ b/Lib/test/test_struct.py
@@ -15,9 +15,11 @@ try:
except ImportError:
PY_STRUCT_RANGE_CHECKING = 0
PY_STRUCT_OVERFLOW_MASKING = 1
+ PY_STRUCT_FLOAT_COERCE = 2
else:
- PY_STRUCT_RANGE_CHECKING = _struct._PY_STRUCT_RANGE_CHECKING
- PY_STRUCT_OVERFLOW_MASKING = _struct._PY_STRUCT_OVERFLOW_MASKING
+ PY_STRUCT_RANGE_CHECKING = getattr(_struct, '_PY_STRUCT_RANGE_CHECKING', 0)
+ PY_STRUCT_OVERFLOW_MASKING = getattr(_struct, '_PY_STRUCT_OVERFLOW_MASKING', 0)
+ PY_STRUCT_FLOAT_COERCE = getattr(_struct, '_PY_STRUCT_FLOAT_COERCE', 0)
def string_reverse(s):
return "".join(reversed(s))
@@ -46,33 +48,40 @@ def any_err(func, *args):
raise TestFailed, "%s%s did not raise error" % (
func.__name__, args)
+def with_warning_restore(func):
+ def _with_warning_restore(*args, **kw):
+ # The `warnings` module doesn't have an advertised way to restore
+ # its filter list. Cheat.
+ save_warnings_filters = warnings.filters[:]
+ # Grrr, we need this function to warn every time. Without removing
+ # the warningregistry, running test_tarfile then test_struct would fail
+ # on 64-bit platforms.
+ globals = func.func_globals
+ if '__warningregistry__' in globals:
+ del globals['__warningregistry__']
+ warnings.filterwarnings("error", r"""^struct.*""", DeprecationWarning)
+ warnings.filterwarnings("error", r""".*format requires.*""",
+ DeprecationWarning)
+ try:
+ return func(*args, **kw)
+ finally:
+ warnings.filters[:] = save_warnings_filters[:]
+ return _with_warning_restore
+
def deprecated_err(func, *args):
- # The `warnings` module doesn't have an advertised way to restore
- # its filter list. Cheat.
- save_warnings_filters = warnings.filters[:]
- # Grrr, we need this function to warn every time. Without removing
- # the warningregistry, running test_tarfile then test_struct would fail
- # on 64-bit platforms.
- globals = func.func_globals
- if '__warningregistry__' in globals:
- del globals['__warningregistry__']
- warnings.filterwarnings("error", r"""^struct.*""", DeprecationWarning)
- warnings.filterwarnings("error", r""".*format requires.*""",
- DeprecationWarning)
try:
- try:
- func(*args)
- except (struct.error, TypeError):
- pass
- except DeprecationWarning:
- if not PY_STRUCT_OVERFLOW_MASKING:
- raise TestFailed, "%s%s expected to raise struct.error" % (
- func.__name__, args)
- else:
- raise TestFailed, "%s%s did not raise error" % (
+ func(*args)
+ except (struct.error, TypeError):
+ pass
+ except DeprecationWarning:
+ if not PY_STRUCT_OVERFLOW_MASKING:
+ raise TestFailed, "%s%s expected to raise struct.error" % (
func.__name__, args)
- finally:
- warnings.filters[:] = save_warnings_filters[:]
+ else:
+ raise TestFailed, "%s%s did not raise error" % (
+ func.__name__, args)
+deprecated_err = with_warning_restore(deprecated_err)
+
simple_err(struct.calcsize, 'Z')
@@ -475,6 +484,9 @@ def test_705836():
test_705836()
+###########################################################################
+# SF bug 1229380. No struct.pack exception for some out of range integers
+
def test_1229380():
import sys
for endian in ('', '>', '<'):
@@ -491,6 +503,37 @@ def test_1229380():
if PY_STRUCT_RANGE_CHECKING:
test_1229380()
+###########################################################################
+# SF bug 1530559. struct.pack raises TypeError where it used to convert.
+
+def check_float_coerce(format, number):
+ if PY_STRUCT_FLOAT_COERCE == 2:
+ # Test for pre-2.5 struct module
+ packed = struct.pack(format, number)
+ floored = struct.unpack(format, packed)[0]
+ if floored != int(number):
+ raise TestFailed("did not correcly coerce float to int")
+ return
+ try:
+ func(*args)
+ except (struct.error, TypeError):
+ if PY_STRUCT_FLOAT_COERCE:
+ raise TestFailed("expected DeprecationWarning for float coerce")
+ except DeprecationWarning:
+ if not PY_STRUCT_FLOAT_COERCE:
+ raise TestFailed("expected to raise struct.error for float coerce")
+ else:
+ raise TestFailed("did not raise error for float coerce")
+
+check_float_coerce = with_warning_restore(deprecated_err)
+
+def test_1530559():
+ for endian in ('', '>', '<'):
+ for fmt in ('B', 'H', 'I', 'L', 'b', 'h', 'i', 'l'):
+ check_float_coerce(endian + fmt, 1.0)
+ check_float_coerce(endian + fmt, 1.5)
+
+test_1530559()
###########################################################################
# Packing and unpacking to/from buffers.
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index edf5bd0..8c8ac40 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -27,6 +27,18 @@ def remove_stderr_debug_decorations(stderr):
return re.sub(r"\[\d+ refs\]\r?\n?$", "", stderr)
class ProcessTestCase(unittest.TestCase):
+ def setUp(self):
+ # Try to minimize the number of children we have so this test
+ # doesn't crash on some buildbots (Alphas in particular).
+ if hasattr(test_support, "reap_children"):
+ test_support.reap_children()
+
+ def tearDown(self):
+ # Try to minimize the number of children we have so this test
+ # doesn't crash on some buildbots (Alphas in particular).
+ if hasattr(test_support, "reap_children"):
+ test_support.reap_children()
+
def mkstemp(self):
"""wrapper for mkstemp, calling mktemp if mkstemp is not available"""
if hasattr(tempfile, "mkstemp"):
@@ -56,7 +68,7 @@ class ProcessTestCase(unittest.TestCase):
subprocess.check_call([sys.executable, "-c",
"import sys; sys.exit(47)"])
except subprocess.CalledProcessError, e:
- self.assertEqual(e.errno, 47)
+ self.assertEqual(e.returncode, 47)
else:
self.fail("Expected CalledProcessError")
@@ -384,7 +396,8 @@ class ProcessTestCase(unittest.TestCase):
def test_no_leaking(self):
# Make sure we leak no resources
- if test_support.is_resource_enabled("subprocess") and not mswindows:
+ if not hasattr(test_support, "is_resource_enabled") \
+ or test_support.is_resource_enabled("subprocess") and not mswindows:
max_handles = 1026 # too much for most UNIX systems
else:
max_handles = 65
@@ -463,10 +476,36 @@ class ProcessTestCase(unittest.TestCase):
else:
self.fail("Expected OSError")
+ def _suppress_core_files(self):
+ """Try to prevent core files from being created.
+ Returns previous ulimit if successful, else None.
+ """
+ try:
+ import resource
+ old_limit = resource.getrlimit(resource.RLIMIT_CORE)
+ resource.setrlimit(resource.RLIMIT_CORE, (0,0))
+ return old_limit
+ except (ImportError, ValueError, resource.error):
+ return None
+
+ def _unsuppress_core_files(self, old_limit):
+ """Return core file behavior to default."""
+ if old_limit is None:
+ return
+ try:
+ import resource
+ resource.setrlimit(resource.RLIMIT_CORE, old_limit)
+ except (ImportError, ValueError, resource.error):
+ return
+
def test_run_abort(self):
# returncode handles signal termination
- p = subprocess.Popen([sys.executable,
- "-c", "import os; os.abort()"])
+ old_limit = self._suppress_core_files()
+ try:
+ p = subprocess.Popen([sys.executable,
+ "-c", "import os; os.abort()"])
+ finally:
+ self._unsuppress_core_files(old_limit)
p.wait()
self.assertEqual(-p.returncode, signal.SIGABRT)
@@ -599,6 +638,8 @@ class ProcessTestCase(unittest.TestCase):
def test_main():
test_support.run_unittest(ProcessTestCase)
+ if hasattr(test_support, "reap_children"):
+ test_support.reap_children()
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py
index 2d08f4d..a9d5dab 100644
--- a/Lib/test/test_support.py
+++ b/Lib/test/test_support.py
@@ -89,6 +89,24 @@ def requires(resource, msg=None):
msg = "Use of the `%s' resource not enabled" % resource
raise ResourceDenied(msg)
+def bind_port(sock, host='', preferred_port=54321):
+ """Try to bind the sock to a port. If we are running multiple
+ tests and we don't try multiple ports, the test can fails. This
+ makes the test more robust."""
+
+ import socket, errno
+ # some random ports that hopefully no one is listening on.
+ for port in [preferred_port, 9907, 10243, 32999]:
+ try:
+ sock.bind((host, port))
+ return port
+ except socket.error, (err, msg):
+ if err != errno.EADDRINUSE:
+ raise
+ print >>sys.__stderr__, \
+ ' WARNING: failed to listen on port %d, trying another' % port
+ raise TestFailed, 'unable to find port to listen on'
+
FUZZ = 1e-6
def fcmp(x, y): # fuzzy comparison function
@@ -296,6 +314,12 @@ _1M = 1024*1024
_1G = 1024 * _1M
_2G = 2 * _1G
+# Hack to get at the maximum value an internal index can take.
+class _Dummy:
+ def __getslice__(self, i, j):
+ return j
+MAX_Py_ssize_t = _Dummy()[:]
+
def set_memlimit(limit):
import re
global max_memuse
@@ -310,7 +334,9 @@ def set_memlimit(limit):
if m is None:
raise ValueError('Invalid memory limit %r' % (limit,))
memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
- if memlimit < 2.5*_1G:
+ if memlimit > MAX_Py_ssize_t:
+ memlimit = MAX_Py_ssize_t
+ if memlimit < _2G - 1:
raise ValueError('Memory limit %r too low to be useful' % (limit,))
max_memuse = memlimit
@@ -353,6 +379,17 @@ def bigmemtest(minsize, memuse, overhead=5*_1M):
return wrapper
return decorator
+def bigaddrspacetest(f):
+ """Decorator for tests that fill the address space."""
+ def wrapper(self):
+ if max_memuse < MAX_Py_ssize_t:
+ if verbose:
+ sys.stderr.write("Skipping %s because of memory "
+ "constraint\n" % (f.__name__,))
+ else:
+ return f(self)
+ return wrapper
+
#=======================================================================
# Preliminary PyUNIT integration.
@@ -435,3 +472,46 @@ def run_doctest(module, verbosity=None):
if verbose:
print 'doctest (%s) ... %d tests with zero failures' % (module.__name__, t)
return f, t
+
+#=======================================================================
+# Threading support to prevent reporting refleaks when running regrtest.py -R
+
+def threading_setup():
+ import threading
+ return len(threading._active), len(threading._limbo)
+
+def threading_cleanup(num_active, num_limbo):
+ import threading
+ import time
+
+ _MAX_COUNT = 10
+ count = 0
+ while len(threading._active) != num_active and count < _MAX_COUNT:
+ count += 1
+ time.sleep(0.1)
+
+ count = 0
+ while len(threading._limbo) != num_limbo and count < _MAX_COUNT:
+ count += 1
+ time.sleep(0.1)
+
+def reap_children():
+ """Use this function at the end of test_main() whenever sub-processes
+ are started. This will help ensure that no extra children (zombies)
+ stick around to hog resources and create problems when looking
+ for refleaks.
+ """
+
+ # Reap all our dead child processes so we don't leave zombies around.
+ # These hog resources and might be causing some of the buildbots to die.
+ import os
+ if hasattr(os, 'waitpid'):
+ any_process = -1
+ while True:
+ try:
+ # This will raise an exception on Windows. That's ok.
+ pid, status = os.waitpid(any_process, os.WNOHANG)
+ if pid == 0:
+ break
+ except:
+ break
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index ae2a1c8..f1f1524 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -237,6 +237,90 @@ class SysModuleTest(unittest.TestCase):
is sys._getframe().f_code
)
+ # sys._current_frames() is a CPython-only gimmick.
+ def test_current_frames(self):
+ have_threads = True
+ try:
+ import thread
+ except ImportError:
+ have_threads = False
+
+ if have_threads:
+ self.current_frames_with_threads()
+ else:
+ self.current_frames_without_threads()
+
+ # Test sys._current_frames() in a WITH_THREADS build.
+ def current_frames_with_threads(self):
+ import threading, thread
+ import traceback
+
+ # Spawn a thread that blocks at a known place. Then the main
+ # thread does sys._current_frames(), and verifies that the frames
+ # returned make sense.
+ entered_g = threading.Event()
+ leave_g = threading.Event()
+ thread_info = [] # the thread's id
+
+ def f123():
+ g456()
+
+ def g456():
+ thread_info.append(thread.get_ident())
+ entered_g.set()
+ leave_g.wait()
+
+ t = threading.Thread(target=f123)
+ t.start()
+ entered_g.wait()
+
+ # At this point, t has finished its entered_g.set(), although it's
+ # impossible to guess whether it's still on that line or has moved on
+ # to its leave_g.wait().
+ self.assertEqual(len(thread_info), 1)
+ thread_id = thread_info[0]
+
+ d = sys._current_frames()
+
+ main_id = thread.get_ident()
+ self.assert_(main_id in d)
+ self.assert_(thread_id in d)
+
+ # Verify that the captured main-thread frame is _this_ frame.
+ frame = d.pop(main_id)
+ self.assert_(frame is sys._getframe())
+
+ # Verify that the captured thread frame is blocked in g456, called
+ # from f123. This is a litte tricky, since various bits of
+ # threading.py are also in the thread's call stack.
+ frame = d.pop(thread_id)
+ stack = traceback.extract_stack(frame)
+ for i, (filename, lineno, funcname, sourceline) in enumerate(stack):
+ if funcname == "f123":
+ break
+ else:
+ self.fail("didn't find f123() on thread's call stack")
+
+ self.assertEqual(sourceline, "g456()")
+
+ # And the next record must be for g456().
+ filename, lineno, funcname, sourceline = stack[i+1]
+ self.assertEqual(funcname, "g456")
+ self.assert_(sourceline in ["leave_g.wait()", "entered_g.set()"])
+
+ # Reap the spawned thread.
+ leave_g.set()
+ t.join()
+
+ # Test sys._current_frames() when thread support doesn't exist.
+ def current_frames_without_threads(self):
+ # Not much happens here: there is only one thread, with artificial
+ # "thread id" 0.
+ d = sys._current_frames()
+ self.assertEqual(len(d), 1)
+ self.assert_(0 in d)
+ self.assert_(d[0] is sys._getframe())
+
def test_attributes(self):
self.assert_(isinstance(sys.api_version, int))
self.assert_(isinstance(sys.argv, list))
diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py
index e3fbf98..fa170ef 100644
--- a/Lib/test/test_tcl.py
+++ b/Lib/test/test_tcl.py
@@ -130,10 +130,8 @@ class TclTest(unittest.TestCase):
import os
old_display = None
import sys
- if (sys.platform.startswith('win') or
- sys.platform.startswith('darwin') or
- sys.platform.startswith('cygwin')):
- return # no failure possible on windows?
+ if sys.platform.startswith(('win', 'darwin', 'cygwin')):
+ return # no failure possible on windows?
if 'DISPLAY' in os.environ:
old_display = os.environ['DISPLAY']
del os.environ['DISPLAY']
diff --git a/Lib/test/test_textwrap.py b/Lib/test/test_textwrap.py
index 68e4d6d..500eceb 100644
--- a/Lib/test/test_textwrap.py
+++ b/Lib/test/test_textwrap.py
@@ -460,38 +460,42 @@ some (including a hanging indent).'''
# of IndentTestCase!
class DedentTestCase(unittest.TestCase):
+ def assertUnchanged(self, text):
+ """assert that dedent() has no effect on 'text'"""
+ self.assertEquals(text, dedent(text))
+
def test_dedent_nomargin(self):
# No lines indented.
text = "Hello there.\nHow are you?\nOh good, I'm glad."
- self.assertEquals(dedent(text), text)
+ self.assertUnchanged(text)
# Similar, with a blank line.
text = "Hello there.\n\nBoo!"
- self.assertEquals(dedent(text), text)
+ self.assertUnchanged(text)
# Some lines indented, but overall margin is still zero.
text = "Hello there.\n This is indented."
- self.assertEquals(dedent(text), text)
+ self.assertUnchanged(text)
# Again, add a blank line.
text = "Hello there.\n\n Boo!\n"
- self.assertEquals(dedent(text), text)
+ self.assertUnchanged(text)
def test_dedent_even(self):
# All lines indented by two spaces.
text = " Hello there.\n How are ya?\n Oh good."
expect = "Hello there.\nHow are ya?\nOh good."
- self.assertEquals(dedent(text), expect)
+ self.assertEquals(expect, dedent(text))
# Same, with blank lines.
text = " Hello there.\n\n How are ya?\n Oh good.\n"
expect = "Hello there.\n\nHow are ya?\nOh good.\n"
- self.assertEquals(dedent(text), expect)
+ self.assertEquals(expect, dedent(text))
# Now indent one of the blank lines.
text = " Hello there.\n \n How are ya?\n Oh good.\n"
expect = "Hello there.\n\nHow are ya?\nOh good.\n"
- self.assertEquals(dedent(text), expect)
+ self.assertEquals(expect, dedent(text))
def test_dedent_uneven(self):
# Lines indented unevenly.
@@ -505,18 +509,53 @@ def foo():
while 1:
return foo
'''
- self.assertEquals(dedent(text), expect)
+ self.assertEquals(expect, dedent(text))
# Uneven indentation with a blank line.
text = " Foo\n Bar\n\n Baz\n"
expect = "Foo\n Bar\n\n Baz\n"
- self.assertEquals(dedent(text), expect)
+ self.assertEquals(expect, dedent(text))
# Uneven indentation with a whitespace-only line.
text = " Foo\n Bar\n \n Baz\n"
expect = "Foo\n Bar\n\n Baz\n"
- self.assertEquals(dedent(text), expect)
-
+ self.assertEquals(expect, dedent(text))
+
+ # dedent() should not mangle internal tabs
+ def test_dedent_preserve_internal_tabs(self):
+ text = " hello\tthere\n how are\tyou?"
+ expect = "hello\tthere\nhow are\tyou?"
+ self.assertEquals(expect, dedent(text))
+
+ # make sure that it preserves tabs when it's not making any
+ # changes at all
+ self.assertEquals(expect, dedent(expect))
+
+ # dedent() should not mangle tabs in the margin (i.e.
+ # tabs and spaces both count as margin, but are *not*
+ # considered equivalent)
+ def test_dedent_preserve_margin_tabs(self):
+ text = " hello there\n\thow are you?"
+ self.assertUnchanged(text)
+
+ # same effect even if we have 8 spaces
+ text = " hello there\n\thow are you?"
+ self.assertUnchanged(text)
+
+ # dedent() only removes whitespace that can be uniformly removed!
+ text = "\thello there\n\thow are you?"
+ expect = "hello there\nhow are you?"
+ self.assertEquals(expect, dedent(text))
+
+ text = " \thello there\n \thow are you?"
+ self.assertEquals(expect, dedent(text))
+
+ text = " \t hello there\n \t how are you?"
+ self.assertEquals(expect, dedent(text))
+
+ text = " \thello there\n \t how are you?"
+ expect = "hello there\n how are you?"
+ self.assertEquals(expect, dedent(text))
def test_main():
diff --git a/Lib/test/test_thread.py b/Lib/test/test_thread.py
index ea345b6..c4c21fe 100644
--- a/Lib/test/test_thread.py
+++ b/Lib/test/test_thread.py
@@ -115,3 +115,46 @@ for i in range(numtasks):
thread.start_new_thread(task2, (i,))
done.acquire()
print 'all tasks done'
+
+# not all platforms support changing thread stack size
+print '\n*** Changing thread stack size ***'
+if thread.stack_size() != 0:
+ raise ValueError, "initial stack_size not 0"
+
+thread.stack_size(0)
+if thread.stack_size() != 0:
+ raise ValueError, "stack_size not reset to default"
+
+from os import name as os_name
+if os_name in ("nt", "os2", "posix"):
+
+ tss_supported = 1
+ try:
+ thread.stack_size(4096)
+ except ValueError:
+ print 'caught expected ValueError setting stack_size(4096)'
+ except thread.error:
+ tss_supported = 0
+ print 'platform does not support changing thread stack size'
+
+ if tss_supported:
+ failed = lambda s, e: s != e
+ fail_msg = "stack_size(%d) failed - should succeed"
+ for tss in (262144, 0x100000, 0):
+ thread.stack_size(tss)
+ if failed(thread.stack_size(), tss):
+ raise ValueError, fail_msg % tss
+ print 'successfully set stack_size(%d)' % tss
+
+ for tss in (262144, 0x100000):
+ print 'trying stack_size = %d' % tss
+ next_ident = 0
+ for i in range(numtasks):
+ newtask()
+
+ print 'waiting for all tasks to complete'
+ done.acquire()
+ print 'all tasks done'
+
+ # reset stack size to default
+ thread.stack_size(0)
diff --git a/Lib/test/test_threaded_import.py b/Lib/test/test_threaded_import.py
index 0642d25..602ad2a 100644
--- a/Lib/test/test_threaded_import.py
+++ b/Lib/test/test_threaded_import.py
@@ -30,11 +30,10 @@ def test_import_hangers():
if verbose:
print "testing import hangers ...",
- from test import threaded_import_hangers
-
+ import test.threaded_import_hangers
try:
- if threaded_import_hangers.errors:
- raise TestFailed(threaded_import_hangers.errors)
+ if test.threaded_import_hangers.errors:
+ raise TestFailed(test.threaded_import_hangers.errors)
elif verbose:
print "OK."
finally:
diff --git a/Lib/test/test_threadedtempfile.py b/Lib/test/test_threadedtempfile.py
index 459ba3a..974333b 100644
--- a/Lib/test/test_threadedtempfile.py
+++ b/Lib/test/test_threadedtempfile.py
@@ -22,7 +22,7 @@ FILES_PER_THREAD = 50 # change w/ -f option
import thread # If this fails, we can't test this module
import threading
-from test.test_support import TestFailed
+from test.test_support import TestFailed, threading_setup, threading_cleanup
import StringIO
from traceback import print_exc
import tempfile
@@ -48,6 +48,7 @@ class TempFileGreedy(threading.Thread):
def test_main():
threads = []
+ thread_info = threading_setup()
print "Creating"
for i in range(NUM_THREADS):
@@ -72,6 +73,7 @@ def test_main():
if errors:
raise TestFailed(msg)
+ threading_cleanup(*thread_info)
if __name__ == "__main__":
import sys, getopt
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index 7eb9758..79335ea 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -85,6 +85,32 @@ class ThreadTests(unittest.TestCase):
print 'all tasks done'
self.assertEqual(numrunning.get(), 0)
+ # run with a small(ish) thread stack size (256kB)
+ def test_various_ops_small_stack(self):
+ if verbose:
+ print 'with 256kB thread stack size...'
+ try:
+ threading.stack_size(262144)
+ except thread.error:
+ if verbose:
+ print 'platform does not support changing thread stack size'
+ return
+ self.test_various_ops()
+ threading.stack_size(0)
+
+ # run with a large thread stack size (1MB)
+ def test_various_ops_large_stack(self):
+ if verbose:
+ print 'with 1MB thread stack size...'
+ try:
+ threading.stack_size(0x100000)
+ except thread.error:
+ if verbose:
+ print 'platform does not support changing thread stack size'
+ return
+ self.test_various_ops()
+ threading.stack_size(0)
+
def test_foreign_thread(self):
# Check that a "foreign" thread can use the threading module.
def f(mutex):
diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py
index 768e7a0..f4be759 100644
--- a/Lib/test/test_time.py
+++ b/Lib/test/test_time.py
@@ -39,9 +39,9 @@ class TimeTestCase(unittest.TestCase):
def test_strftime_bounds_checking(self):
# Make sure that strftime() checks the bounds of the various parts
- #of the time tuple.
+ #of the time tuple (0 is valid for *all* values).
- # Check year
+ # Check year [1900, max(int)]
self.assertRaises(ValueError, time.strftime, '',
(1899, 1, 1, 0, 0, 0, 0, 1, -1))
if time.accept2dyear:
@@ -49,27 +49,27 @@ class TimeTestCase(unittest.TestCase):
(-1, 1, 1, 0, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, time.strftime, '',
(100, 1, 1, 0, 0, 0, 0, 1, -1))
- # Check month
+ # Check month [1, 12] + zero support
self.assertRaises(ValueError, time.strftime, '',
- (1900, 0, 1, 0, 0, 0, 0, 1, -1))
+ (1900, -1, 1, 0, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, time.strftime, '',
(1900, 13, 1, 0, 0, 0, 0, 1, -1))
- # Check day of month
+ # Check day of month [1, 31] + zero support
self.assertRaises(ValueError, time.strftime, '',
- (1900, 1, 0, 0, 0, 0, 0, 1, -1))
+ (1900, 1, -1, 0, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 32, 0, 0, 0, 0, 1, -1))
- # Check hour
+ # Check hour [0, 23]
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, -1, 0, 0, 0, 1, -1))
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 24, 0, 0, 0, 1, -1))
- # Check minute
+ # Check minute [0, 59]
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 0, -1, 0, 0, 1, -1))
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 0, 60, 0, 0, 1, -1))
- # Check second
+ # Check second [0, 61]
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 0, 0, -1, 0, 1, -1))
# C99 only requires allowing for one leap second, but Python's docs say
@@ -82,17 +82,25 @@ class TimeTestCase(unittest.TestCase):
# modulo.
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 0, 0, 0, -2, 1, -1))
- # Check day of the year
+ # Check day of the year [1, 366] + zero support
self.assertRaises(ValueError, time.strftime, '',
- (1900, 1, 1, 0, 0, 0, 0, 0, -1))
+ (1900, 1, 1, 0, 0, 0, 0, -1, -1))
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 0, 0, 0, 0, 367, -1))
- # Check daylight savings flag
+ # Check daylight savings flag [-1, 1]
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 0, 0, 0, 0, 1, -2))
self.assertRaises(ValueError, time.strftime, '',
(1900, 1, 1, 0, 0, 0, 0, 1, 2))
+ def test_default_values_for_zero(self):
+ # Make sure that using all zeros uses the proper default values.
+ # No test for daylight savings since strftime() does not change output
+ # based on its value.
+ expected = "2000 01 01 00 00 00 1 001"
+ result = time.strftime("%Y %m %d %H %M %S %w %j", (0,)*9)
+ self.assertEquals(expected, result)
+
def test_strptime(self):
tt = time.gmtime(self.t)
for directive in ('a', 'A', 'b', 'B', 'c', 'd', 'H', 'I',
@@ -193,13 +201,17 @@ class TimeTestCase(unittest.TestCase):
time.ctime(None)
def test_gmtime_without_arg(self):
- t0 = time.mktime(time.gmtime())
- t1 = time.mktime(time.gmtime(None))
+ gt0 = time.gmtime()
+ gt1 = time.gmtime(None)
+ t0 = time.mktime(gt0)
+ t1 = time.mktime(gt1)
self.assert_(0 <= (t1-t0) < 0.2)
def test_localtime_without_arg(self):
- t0 = time.mktime(time.localtime())
- t1 = time.mktime(time.localtime(None))
+ lt0 = time.localtime()
+ lt1 = time.localtime(None)
+ t0 = time.mktime(lt0)
+ t1 = time.mktime(lt1)
self.assert_(0 <= (t1-t0) < 0.2)
def test_main():
diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py
index 4309e8c..2b32b92 100644
--- a/Lib/test/test_timeout.py
+++ b/Lib/test/test_timeout.py
@@ -100,7 +100,7 @@ class TimeoutTestCase(unittest.TestCase):
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.addr_remote = ('www.python.org', 80)
+ self.addr_remote = ('www.python.org.', 80)
self.addr_local = ('127.0.0.1', 25339)
def tearDown(self):
diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py
index 4f946f7..08aec8e 100644
--- a/Lib/test/test_trace.py
+++ b/Lib/test/test_trace.py
@@ -244,8 +244,8 @@ class TraceTestCase(unittest.TestCase):
self.run_test(one_instr_line)
def test_04_no_pop_blocks(self):
self.run_test(no_pop_blocks)
-## def test_05_no_pop_tops(self):
-## self.run_test(no_pop_tops)
+ def test_05_no_pop_tops(self):
+ self.run_test(no_pop_tops)
def test_06_call(self):
self.run_test(call)
def test_07_raise(self):
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index 1b59f98..b3c5a50 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -31,8 +31,9 @@ class TracebackCases(unittest.TestCase):
err = self.get_exception_format(self.syntax_error_with_caret,
SyntaxError)
self.assert_(len(err) == 4)
- self.assert_("^" in err[2]) # third line has caret
self.assert_(err[1].strip() == "return x!")
+ self.assert_("^" in err[2]) # third line has caret
+ self.assert_(err[1].find("!") == err[2].find("^")) # in the right place
def test_nocaret(self):
if is_jython:
@@ -47,8 +48,9 @@ class TracebackCases(unittest.TestCase):
err = self.get_exception_format(self.syntax_error_bad_indentation,
IndentationError)
self.assert_(len(err) == 4)
- self.assert_("^" in err[2])
self.assert_(err[1].strip() == "print 2")
+ self.assert_("^" in err[2])
+ self.assert_(err[1].find("2") == err[2].find("^"))
def test_bug737473(self):
import sys, os, tempfile, time
@@ -109,6 +111,45 @@ def test():
lst = traceback.format_exception_only(e.__class__, e)
self.assertEqual(lst, ['KeyboardInterrupt\n'])
+ # String exceptions are deprecated, but legal. The quirky form with
+ # separate "type" and "value" tends to break things, because
+ # not isinstance(value, type)
+ # and a string cannot be the first argument to issubclass.
+ #
+ # Note that sys.last_type and sys.last_value do not get set if an
+ # exception is caught, so we sort of cheat and just emulate them.
+ #
+ # test_string_exception1 is equivalent to
+ #
+ # >>> raise "String Exception"
+ #
+ # test_string_exception2 is equivalent to
+ #
+ # >>> raise "String Exception", "String Value"
+ #
+ def test_string_exception1(self):
+ str_type = "String Exception"
+ err = traceback.format_exception_only(str_type, None)
+ self.assertEqual(len(err), 1)
+ self.assertEqual(err[0], str_type + '\n')
+
+ def test_string_exception2(self):
+ str_type = "String Exception"
+ str_value = "String Value"
+ err = traceback.format_exception_only(str_type, str_value)
+ self.assertEqual(len(err), 1)
+ self.assertEqual(err[0], str_type + ': ' + str_value + '\n')
+
+ def test_format_exception_only_bad__str__(self):
+ class X(Exception):
+ def __str__(self):
+ 1/0
+ err = traceback.format_exception_only(X, X())
+ self.assertEqual(len(err), 1)
+ str_value = '<unprintable %s object>' % X.__name__
+ self.assertEqual(err[0], X.__name__ + ': ' + str_value + '\n')
+
+
def test_main():
run_unittest(TracebackCases)
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index c575c0c..2d299c3 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -233,6 +233,7 @@ print 'Buffers'
try: buffer('asdf', -1)
except ValueError: pass
else: raise TestFailed, "buffer('asdf', -1) should raise ValueError"
+cmp(buffer("abc"), buffer("def")) # used to raise a warning: tp_compare didn't return -1, 0, or 1
try: buffer(None)
except TypeError: pass
@@ -276,3 +277,10 @@ else: raise TestFailed, "buffer assignment should raise TypeError"
try: a[0:1] = 'g'
except TypeError: pass
else: raise TestFailed, "buffer slice assignment should raise TypeError"
+
+# array.array() returns an object that does not implement a char buffer,
+# something which int() uses for conversion.
+import array
+try: int(buffer(array.array('c')))
+except TypeError :pass
+else: raise TestFailed, "char buffer (at C level) not working"
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 034b9d0..67218b8 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -676,11 +676,11 @@ class HandlerTests(unittest.TestCase):
r = MockResponse(200, "OK", {}, "")
newreq = h.do_request_(req)
if data is None: # GET
- self.assert_("Content-length" not in req.unredirected_hdrs)
- self.assert_("Content-type" not in req.unredirected_hdrs)
+ self.assert_("Content-Length" not in req.unredirected_hdrs)
+ self.assert_("Content-Type" not in req.unredirected_hdrs)
else: # POST
- self.assertEqual(req.unredirected_hdrs["Content-length"], "0")
- self.assertEqual(req.unredirected_hdrs["Content-type"],
+ self.assertEqual(req.unredirected_hdrs["Content-Length"], "0")
+ self.assertEqual(req.unredirected_hdrs["Content-Type"],
"application/x-www-form-urlencoded")
# XXX the details of Host could be better tested
self.assertEqual(req.unredirected_hdrs["Host"], "example.com")
@@ -692,8 +692,8 @@ class HandlerTests(unittest.TestCase):
req.add_unredirected_header("Host", "baz")
req.add_unredirected_header("Spam", "foo")
newreq = h.do_request_(req)
- self.assertEqual(req.unredirected_hdrs["Content-length"], "foo")
- self.assertEqual(req.unredirected_hdrs["Content-type"], "bar")
+ self.assertEqual(req.unredirected_hdrs["Content-Length"], "foo")
+ self.assertEqual(req.unredirected_hdrs["Content-Type"], "bar")
self.assertEqual(req.unredirected_hdrs["Host"], "baz")
self.assertEqual(req.unredirected_hdrs["Spam"], "foo")
@@ -847,7 +847,7 @@ class HandlerTests(unittest.TestCase):
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
- self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
+ self._test_basic_auth(opener, auth_handler, "Proxy-Authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py
index dc3d36d..00cf202 100644
--- a/Lib/test/test_urllib2net.py
+++ b/Lib/test/test_urllib2net.py
@@ -123,7 +123,7 @@ class urlopenNetworkTests(unittest.TestCase):
# domain will be spared to serve its defined
# purpose.
# urllib2.urlopen, "http://www.sadflkjsasadf.com/")
- urllib2.urlopen, "http://www.python.invalid/")
+ urllib2.urlopen, "http://www.python.invalid./")
class OtherNetworkTests(unittest.TestCase):
@@ -160,8 +160,8 @@ class OtherNetworkTests(unittest.TestCase):
"urllib2$")
urls = [
# Thanks to Fred for finding these!
- 'gopher://gopher.lib.ncsu.edu/11/library/stacks/Alex',
- 'gopher://gopher.vt.edu:10010/10/33',
+ 'gopher://gopher.lib.ncsu.edu./11/library/stacks/Alex',
+ 'gopher://gopher.vt.edu.:10010/10/33',
]
self._test_urls(urls, self._extra_handlers())
@@ -176,7 +176,7 @@ class OtherNetworkTests(unittest.TestCase):
# XXX bug, should raise URLError
#('file://nonsensename/etc/passwd', None, urllib2.URLError)
- ('file://nonsensename/etc/passwd', None, (OSError, socket.error))
+ ('file://nonsensename/etc/passwd', None, (EnvironmentError, socket.error))
]
self._test_urls(urls, self._extra_handlers())
finally:
@@ -239,7 +239,9 @@ class OtherNetworkTests(unittest.TestCase):
except (IOError, socket.error, OSError), err:
debug(err)
if expected_err:
- self.assert_(isinstance(err, expected_err))
+ msg = ("Didn't get expected error(s) %s for %s %s, got %s" %
+ (expected_err, url, req, err))
+ self.assert_(isinstance(err, expected_err), msg)
else:
buf = f.read()
f.close()
@@ -259,7 +261,6 @@ class OtherNetworkTests(unittest.TestCase):
return handlers
-
def test_main():
test_support.requires("network")
test_support.run_unittest(URLTimeoutTest, urlopenNetworkTests,
diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py
index 80761df..9105afe 100644
--- a/Lib/test/test_urllibnet.py
+++ b/Lib/test/test_urllibnet.py
@@ -110,7 +110,7 @@ class urlopenNetworkTests(unittest.TestCase):
# domain will be spared to serve its defined
# purpose.
# urllib.urlopen, "http://www.sadflkjsasadf.com/")
- urllib.urlopen, "http://www.python.invalid/")
+ urllib.urlopen, "http://www.python.invalid./")
class urlretrieveNetworkTests(unittest.TestCase):
"""Tests urllib.urlretrieve using the network."""
diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py
new file mode 100644
index 0000000..0586cfd
--- /dev/null
+++ b/Lib/test/test_uuid.py
@@ -0,0 +1,434 @@
+from unittest import TestCase
+from test import test_support
+import uuid
+
+def importable(name):
+ try:
+ __import__(name)
+ return True
+ except:
+ return False
+
+class TestUUID(TestCase):
+ last_node = None
+ source2node = {}
+
+ def test_UUID(self):
+ equal = self.assertEqual
+ ascending = []
+ for (string, curly, hex, bytes, fields, integer, urn,
+ time, clock_seq, variant, version) in [
+ ('00000000-0000-0000-0000-000000000000',
+ '{00000000-0000-0000-0000-000000000000}',
+ '00000000000000000000000000000000',
+ '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
+ (0, 0, 0, 0, 0, 0),
+ 0,
+ 'urn:uuid:00000000-0000-0000-0000-000000000000',
+ 0, 0, uuid.RESERVED_NCS, None),
+ ('00010203-0405-0607-0809-0a0b0c0d0e0f',
+ '{00010203-0405-0607-0809-0a0b0c0d0e0f}',
+ '000102030405060708090a0b0c0d0e0f',
+ '\0\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\x0d\x0e\x0f',
+ (0x00010203L, 0x0405, 0x0607, 8, 9, 0x0a0b0c0d0e0fL),
+ 0x000102030405060708090a0b0c0d0e0fL,
+ 'urn:uuid:00010203-0405-0607-0809-0a0b0c0d0e0f',
+ 0x607040500010203L, 0x809, uuid.RESERVED_NCS, None),
+ ('02d9e6d5-9467-382e-8f9b-9300a64ac3cd',
+ '{02d9e6d5-9467-382e-8f9b-9300a64ac3cd}',
+ '02d9e6d59467382e8f9b9300a64ac3cd',
+ '\x02\xd9\xe6\xd5\x94\x67\x38\x2e\x8f\x9b\x93\x00\xa6\x4a\xc3\xcd',
+ (0x02d9e6d5L, 0x9467, 0x382e, 0x8f, 0x9b, 0x9300a64ac3cdL),
+ 0x02d9e6d59467382e8f9b9300a64ac3cdL,
+ 'urn:uuid:02d9e6d5-9467-382e-8f9b-9300a64ac3cd',
+ 0x82e946702d9e6d5L, 0xf9b, uuid.RFC_4122, 3),
+ ('12345678-1234-5678-1234-567812345678',
+ '{12345678-1234-5678-1234-567812345678}',
+ '12345678123456781234567812345678',
+ '\x12\x34\x56\x78'*4,
+ (0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678),
+ 0x12345678123456781234567812345678,
+ 'urn:uuid:12345678-1234-5678-1234-567812345678',
+ 0x678123412345678L, 0x1234, uuid.RESERVED_NCS, None),
+ ('6ba7b810-9dad-11d1-80b4-00c04fd430c8',
+ '{6ba7b810-9dad-11d1-80b4-00c04fd430c8}',
+ '6ba7b8109dad11d180b400c04fd430c8',
+ '\x6b\xa7\xb8\x10\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
+ (0x6ba7b810L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
+ 0x6ba7b8109dad11d180b400c04fd430c8L,
+ 'urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8',
+ 0x1d19dad6ba7b810L, 0xb4, uuid.RFC_4122, 1),
+ ('6ba7b811-9dad-11d1-80b4-00c04fd430c8',
+ '{6ba7b811-9dad-11d1-80b4-00c04fd430c8}',
+ '6ba7b8119dad11d180b400c04fd430c8',
+ '\x6b\xa7\xb8\x11\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
+ (0x6ba7b811L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
+ 0x6ba7b8119dad11d180b400c04fd430c8L,
+ 'urn:uuid:6ba7b811-9dad-11d1-80b4-00c04fd430c8',
+ 0x1d19dad6ba7b811L, 0xb4, uuid.RFC_4122, 1),
+ ('6ba7b812-9dad-11d1-80b4-00c04fd430c8',
+ '{6ba7b812-9dad-11d1-80b4-00c04fd430c8}',
+ '6ba7b8129dad11d180b400c04fd430c8',
+ '\x6b\xa7\xb8\x12\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
+ (0x6ba7b812L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
+ 0x6ba7b8129dad11d180b400c04fd430c8L,
+ 'urn:uuid:6ba7b812-9dad-11d1-80b4-00c04fd430c8',
+ 0x1d19dad6ba7b812L, 0xb4, uuid.RFC_4122, 1),
+ ('6ba7b814-9dad-11d1-80b4-00c04fd430c8',
+ '{6ba7b814-9dad-11d1-80b4-00c04fd430c8}',
+ '6ba7b8149dad11d180b400c04fd430c8',
+ '\x6b\xa7\xb8\x14\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
+ (0x6ba7b814L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
+ 0x6ba7b8149dad11d180b400c04fd430c8L,
+ 'urn:uuid:6ba7b814-9dad-11d1-80b4-00c04fd430c8',
+ 0x1d19dad6ba7b814L, 0xb4, uuid.RFC_4122, 1),
+ ('7d444840-9dc0-11d1-b245-5ffdce74fad2',
+ '{7d444840-9dc0-11d1-b245-5ffdce74fad2}',
+ '7d4448409dc011d1b2455ffdce74fad2',
+ '\x7d\x44\x48\x40\x9d\xc0\x11\xd1\xb2\x45\x5f\xfd\xce\x74\xfa\xd2',
+ (0x7d444840L, 0x9dc0, 0x11d1, 0xb2, 0x45, 0x5ffdce74fad2L),
+ 0x7d4448409dc011d1b2455ffdce74fad2L,
+ 'urn:uuid:7d444840-9dc0-11d1-b245-5ffdce74fad2',
+ 0x1d19dc07d444840L, 0x3245, uuid.RFC_4122, 1),
+ ('e902893a-9d22-3c7e-a7b8-d6e313b71d9f',
+ '{e902893a-9d22-3c7e-a7b8-d6e313b71d9f}',
+ 'e902893a9d223c7ea7b8d6e313b71d9f',
+ '\xe9\x02\x89\x3a\x9d\x22\x3c\x7e\xa7\xb8\xd6\xe3\x13\xb7\x1d\x9f',
+ (0xe902893aL, 0x9d22, 0x3c7e, 0xa7, 0xb8, 0xd6e313b71d9fL),
+ 0xe902893a9d223c7ea7b8d6e313b71d9fL,
+ 'urn:uuid:e902893a-9d22-3c7e-a7b8-d6e313b71d9f',
+ 0xc7e9d22e902893aL, 0x27b8, uuid.RFC_4122, 3),
+ ('eb424026-6f54-4ef8-a4d0-bb658a1fc6cf',
+ '{eb424026-6f54-4ef8-a4d0-bb658a1fc6cf}',
+ 'eb4240266f544ef8a4d0bb658a1fc6cf',
+ '\xeb\x42\x40\x26\x6f\x54\x4e\xf8\xa4\xd0\xbb\x65\x8a\x1f\xc6\xcf',
+ (0xeb424026L, 0x6f54, 0x4ef8, 0xa4, 0xd0, 0xbb658a1fc6cfL),
+ 0xeb4240266f544ef8a4d0bb658a1fc6cfL,
+ 'urn:uuid:eb424026-6f54-4ef8-a4d0-bb658a1fc6cf',
+ 0xef86f54eb424026L, 0x24d0, uuid.RFC_4122, 4),
+ ('f81d4fae-7dec-11d0-a765-00a0c91e6bf6',
+ '{f81d4fae-7dec-11d0-a765-00a0c91e6bf6}',
+ 'f81d4fae7dec11d0a76500a0c91e6bf6',
+ '\xf8\x1d\x4f\xae\x7d\xec\x11\xd0\xa7\x65\x00\xa0\xc9\x1e\x6b\xf6',
+ (0xf81d4faeL, 0x7dec, 0x11d0, 0xa7, 0x65, 0x00a0c91e6bf6L),
+ 0xf81d4fae7dec11d0a76500a0c91e6bf6L,
+ 'urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6',
+ 0x1d07decf81d4faeL, 0x2765, uuid.RFC_4122, 1),
+ ('fffefdfc-fffe-fffe-fffe-fffefdfcfbfa',
+ '{fffefdfc-fffe-fffe-fffe-fffefdfcfbfa}',
+ 'fffefdfcfffefffefffefffefdfcfbfa',
+ '\xff\xfe\xfd\xfc\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xfd\xfc\xfb\xfa',
+ (0xfffefdfcL, 0xfffe, 0xfffe, 0xff, 0xfe, 0xfffefdfcfbfaL),
+ 0xfffefdfcfffefffefffefffefdfcfbfaL,
+ 'urn:uuid:fffefdfc-fffe-fffe-fffe-fffefdfcfbfa',
+ 0xffefffefffefdfcL, 0x3ffe, uuid.RESERVED_FUTURE, None),
+ ('ffffffff-ffff-ffff-ffff-ffffffffffff',
+ '{ffffffff-ffff-ffff-ffff-ffffffffffff}',
+ 'ffffffffffffffffffffffffffffffff',
+ '\xff'*16,
+ (0xffffffffL, 0xffffL, 0xffffL, 0xff, 0xff, 0xffffffffffffL),
+ 0xffffffffffffffffffffffffffffffffL,
+ 'urn:uuid:ffffffff-ffff-ffff-ffff-ffffffffffff',
+ 0xfffffffffffffffL, 0x3fff, uuid.RESERVED_FUTURE, None),
+ ]:
+ equivalents = []
+ # Construct each UUID in several different ways.
+ for u in [uuid.UUID(string), uuid.UUID(curly), uuid.UUID(hex),
+ uuid.UUID(bytes=bytes), uuid.UUID(fields=fields),
+ uuid.UUID(int=integer), uuid.UUID(urn)]:
+ # Test all conversions and properties of the UUID object.
+ equal(str(u), string)
+ equal(int(u), integer)
+ equal(u.bytes, bytes)
+ equal(u.fields, fields)
+ equal(u.time_low, fields[0])
+ equal(u.time_mid, fields[1])
+ equal(u.time_hi_version, fields[2])
+ equal(u.clock_seq_hi_variant, fields[3])
+ equal(u.clock_seq_low, fields[4])
+ equal(u.node, fields[5])
+ equal(u.hex, hex)
+ equal(u.int, integer)
+ equal(u.urn, urn)
+ equal(u.time, time)
+ equal(u.clock_seq, clock_seq)
+ equal(u.variant, variant)
+ equal(u.version, version)
+ equivalents.append(u)
+
+ # Different construction methods should give the same UUID.
+ for u in equivalents:
+ for v in equivalents:
+ equal(u, v)
+ ascending.append(u)
+
+ # Test comparison of UUIDs.
+ for i in range(len(ascending)):
+ for j in range(len(ascending)):
+ equal(cmp(i, j), cmp(ascending[i], ascending[j]))
+
+ # Test sorting of UUIDs (above list is in ascending order).
+ resorted = ascending[:]
+ resorted.reverse()
+ resorted.sort()
+ equal(ascending, resorted)
+
+ def test_exceptions(self):
+ badvalue = lambda f: self.assertRaises(ValueError, f)
+ badtype = lambda f: self.assertRaises(TypeError, f)
+
+ # Badly formed hex strings.
+ badvalue(lambda: uuid.UUID(''))
+ badvalue(lambda: uuid.UUID('abc'))
+ badvalue(lambda: uuid.UUID('1234567812345678123456781234567'))
+ badvalue(lambda: uuid.UUID('123456781234567812345678123456789'))
+ badvalue(lambda: uuid.UUID('123456781234567812345678z2345678'))
+
+ # Badly formed bytes.
+ badvalue(lambda: uuid.UUID(bytes='abc'))
+ badvalue(lambda: uuid.UUID(bytes='\0'*15))
+ badvalue(lambda: uuid.UUID(bytes='\0'*17))
+
+ # Badly formed fields.
+ badvalue(lambda: uuid.UUID(fields=(1,)))
+ badvalue(lambda: uuid.UUID(fields=(1, 2, 3, 4, 5)))
+ badvalue(lambda: uuid.UUID(fields=(1, 2, 3, 4, 5, 6, 7)))
+
+ # Field values out of range.
+ badvalue(lambda: uuid.UUID(fields=(-1, 0, 0, 0, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0x100000000L, 0, 0, 0, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, -1, 0, 0, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0x10000L, 0, 0, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, -1, 0, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, 0x10000L, 0, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, 0, -1, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0x100L, 0, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, -1, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0x100L, 0)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0, -1)))
+ badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0, 0x1000000000000L)))
+
+ # Version number out of range.
+ badvalue(lambda: uuid.UUID('00'*16, version=0))
+ badvalue(lambda: uuid.UUID('00'*16, version=6))
+
+ # Integer value out of range.
+ badvalue(lambda: uuid.UUID(int=-1))
+ badvalue(lambda: uuid.UUID(int=1<<128L))
+
+ # Must supply exactly one of hex, bytes, fields, int.
+ h, b, f, i = '00'*16, '\0'*16, (0, 0, 0, 0, 0, 0), 0
+ uuid.UUID(h)
+ uuid.UUID(hex=h)
+ uuid.UUID(bytes=b)
+ uuid.UUID(fields=f)
+ uuid.UUID(int=i)
+
+ # Wrong number of arguments (positional).
+ badtype(lambda: uuid.UUID())
+ badtype(lambda: uuid.UUID(h, b))
+ badtype(lambda: uuid.UUID(h, b, f))
+ badtype(lambda: uuid.UUID(h, b, f, i))
+
+ # Duplicate arguments (named).
+ badtype(lambda: uuid.UUID(hex=h, bytes=b))
+ badtype(lambda: uuid.UUID(hex=h, fields=f))
+ badtype(lambda: uuid.UUID(hex=h, int=i))
+ badtype(lambda: uuid.UUID(bytes=b, fields=f))
+ badtype(lambda: uuid.UUID(bytes=b, int=i))
+ badtype(lambda: uuid.UUID(fields=f, int=i))
+ badtype(lambda: uuid.UUID(hex=h, bytes=b, fields=f))
+ badtype(lambda: uuid.UUID(hex=h, bytes=b, int=i))
+ badtype(lambda: uuid.UUID(hex=h, fields=f, int=i))
+ badtype(lambda: uuid.UUID(bytes=b, int=i, fields=f))
+ badtype(lambda: uuid.UUID(hex=h, bytes=b, int=i, fields=f))
+
+ # Duplicate arguments (positional and named).
+ badtype(lambda: uuid.UUID(h, hex=h))
+ badtype(lambda: uuid.UUID(h, bytes=b))
+ badtype(lambda: uuid.UUID(h, fields=f))
+ badtype(lambda: uuid.UUID(h, int=i))
+ badtype(lambda: uuid.UUID(h, hex=h, bytes=b))
+ badtype(lambda: uuid.UUID(h, hex=h, fields=f))
+ badtype(lambda: uuid.UUID(h, hex=h, int=i))
+ badtype(lambda: uuid.UUID(h, bytes=b, fields=f))
+ badtype(lambda: uuid.UUID(h, bytes=b, int=i))
+ badtype(lambda: uuid.UUID(h, fields=f, int=i))
+ badtype(lambda: uuid.UUID(h, hex=h, bytes=b, fields=f))
+ badtype(lambda: uuid.UUID(h, hex=h, bytes=b, int=i))
+ badtype(lambda: uuid.UUID(h, hex=h, fields=f, int=i))
+ badtype(lambda: uuid.UUID(h, bytes=b, int=i, fields=f))
+ badtype(lambda: uuid.UUID(h, hex=h, bytes=b, int=i, fields=f))
+
+ # Immutability.
+ u = uuid.UUID(h)
+ badtype(lambda: setattr(u, 'hex', h))
+ badtype(lambda: setattr(u, 'bytes', b))
+ badtype(lambda: setattr(u, 'fields', f))
+ badtype(lambda: setattr(u, 'int', i))
+
+ def check_node(self, node, source):
+ individual_group_bit = (node >> 40L) & 1
+ universal_local_bit = (node >> 40L) & 2
+ message = "%012x doesn't look like a real MAC address" % node
+ self.assertEqual(individual_group_bit, 0, message)
+ self.assertEqual(universal_local_bit, 0, message)
+ self.assertNotEqual(node, 0, message)
+ self.assertNotEqual(node, 0xffffffffffffL, message)
+ self.assert_(0 <= node, message)
+ self.assert_(node < (1L << 48), message)
+
+ TestUUID.source2node[source] = node
+ if TestUUID.last_node:
+ if TestUUID.last_node != node:
+ msg = "different sources disagree on node:\n"
+ for s, n in TestUUID.source2node.iteritems():
+ msg += " from source %r, node was %012x\n" % (s, n)
+ # There's actually no reason to expect the MAC addresses
+ # to agree across various methods -- e.g., a box may have
+ # multiple network interfaces, and different ways of getting
+ # a MAC address may favor different HW.
+ ##self.fail(msg)
+ else:
+ TestUUID.last_node = node
+
+ def test_ifconfig_getnode(self):
+ import sys
+ print >>sys.__stdout__, \
+""" WARNING: uuid._ifconfig_getnode is unreliable on many platforms.
+ It is disabled until the code and/or test can be fixed properly."""
+ return
+
+ import os
+ if os.name == 'posix':
+ node = uuid._ifconfig_getnode()
+ if node is not None:
+ self.check_node(node, 'ifconfig')
+
+ def test_ipconfig_getnode(self):
+ import os
+ if os.name == 'nt':
+ node = uuid._ipconfig_getnode()
+ if node is not None:
+ self.check_node(node, 'ipconfig')
+
+ def test_netbios_getnode(self):
+ if importable('win32wnet') and importable('netbios'):
+ self.check_node(uuid._netbios_getnode(), 'netbios')
+
+ def test_random_getnode(self):
+ node = uuid._random_getnode()
+ self.assert_(0 <= node)
+ self.assert_(node < (1L <<48))
+
+ def test_unixdll_getnode(self):
+ import sys
+ print >>sys.__stdout__, \
+""" WARNING: uuid._unixdll_getnode is unreliable on many platforms.
+ It is disabled until the code and/or test can be fixed properly."""
+ return
+
+ import os
+ if importable('ctypes') and os.name == 'posix':
+ self.check_node(uuid._unixdll_getnode(), 'unixdll')
+
+ def test_windll_getnode(self):
+ import os
+ if importable('ctypes') and os.name == 'nt':
+ self.check_node(uuid._windll_getnode(), 'windll')
+
+ def test_getnode(self):
+ import sys
+ print >>sys.__stdout__, \
+""" WARNING: uuid.getnode is unreliable on many platforms.
+ It is disabled until the code and/or test can be fixed properly."""
+ return
+
+ node1 = uuid.getnode()
+ self.check_node(node1, "getnode1")
+
+ # Test it again to ensure consistency.
+ node2 = uuid.getnode()
+ self.check_node(node2, "getnode2")
+
+ self.assertEqual(node1, node2)
+
+ def test_uuid1(self):
+ equal = self.assertEqual
+
+ # Make sure uuid4() generates UUIDs that are actually version 1.
+ for u in [uuid.uuid1() for i in range(10)]:
+ equal(u.variant, uuid.RFC_4122)
+ equal(u.version, 1)
+
+ # Make sure the supplied node ID appears in the UUID.
+ u = uuid.uuid1(0)
+ equal(u.node, 0)
+ u = uuid.uuid1(0x123456789abc)
+ equal(u.node, 0x123456789abc)
+ u = uuid.uuid1(0xffffffffffff)
+ equal(u.node, 0xffffffffffff)
+
+ # Make sure the supplied clock sequence appears in the UUID.
+ u = uuid.uuid1(0x123456789abc, 0)
+ equal(u.node, 0x123456789abc)
+ equal(((u.clock_seq_hi_variant & 0x3f) << 8) | u.clock_seq_low, 0)
+ u = uuid.uuid1(0x123456789abc, 0x1234)
+ equal(u.node, 0x123456789abc)
+ equal(((u.clock_seq_hi_variant & 0x3f) << 8) |
+ u.clock_seq_low, 0x1234)
+ u = uuid.uuid1(0x123456789abc, 0x3fff)
+ equal(u.node, 0x123456789abc)
+ equal(((u.clock_seq_hi_variant & 0x3f) << 8) |
+ u.clock_seq_low, 0x3fff)
+
+ def test_uuid3(self):
+ equal = self.assertEqual
+
+ # Test some known version-3 UUIDs.
+ for u, v in [(uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org'),
+ '6fa459ea-ee8a-3ca4-894e-db77e160355e'),
+ (uuid.uuid3(uuid.NAMESPACE_URL, 'http://python.org/'),
+ '9fe8e8c4-aaa8-32a9-a55c-4535a88b748d'),
+ (uuid.uuid3(uuid.NAMESPACE_OID, '1.3.6.1'),
+ 'dd1a1cef-13d5-368a-ad82-eca71acd4cd1'),
+ (uuid.uuid3(uuid.NAMESPACE_X500, 'c=ca'),
+ '658d3002-db6b-3040-a1d1-8ddd7d189a4d'),
+ ]:
+ equal(u.variant, uuid.RFC_4122)
+ equal(u.version, 3)
+ equal(u, uuid.UUID(v))
+ equal(str(u), v)
+
+ def test_uuid4(self):
+ equal = self.assertEqual
+
+ # Make sure uuid4() generates UUIDs that are actually version 4.
+ for u in [uuid.uuid4() for i in range(10)]:
+ equal(u.variant, uuid.RFC_4122)
+ equal(u.version, 4)
+
+ def test_uuid5(self):
+ equal = self.assertEqual
+
+ # Test some known version-5 UUIDs.
+ for u, v in [(uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org'),
+ '886313e1-3b8a-5372-9b90-0c9aee199e5d'),
+ (uuid.uuid5(uuid.NAMESPACE_URL, 'http://python.org/'),
+ '4c565f0d-3f5a-5890-b41b-20cf47701c5e'),
+ (uuid.uuid5(uuid.NAMESPACE_OID, '1.3.6.1'),
+ '1447fa61-5277-5fef-a9b3-fbc6e44f4af3'),
+ (uuid.uuid5(uuid.NAMESPACE_X500, 'c=ca'),
+ 'cc957dd1-a972-5349-98cd-874190002798'),
+ ]:
+ equal(u.variant, uuid.RFC_4122)
+ equal(u.version, 5)
+ equal(u, uuid.UUID(v))
+ equal(str(u), v)
+
+
+def test_main():
+ test_support.run_unittest(TestUUID)
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/test/test_wait3.py b/Lib/test/test_wait3.py
index f6a41a6..9de64b2 100644
--- a/Lib/test/test_wait3.py
+++ b/Lib/test/test_wait3.py
@@ -2,8 +2,9 @@
"""
import os
+import time
from test.fork_wait import ForkWait
-from test.test_support import TestSkipped, run_unittest
+from test.test_support import TestSkipped, run_unittest, reap_children
try:
os.fork
@@ -17,16 +18,21 @@ except AttributeError:
class Wait3Test(ForkWait):
def wait_impl(self, cpid):
- while 1:
- spid, status, rusage = os.wait3(0)
+ for i in range(10):
+ # wait3() shouldn't hang, but some of the buildbots seem to hang
+ # in the forking tests. This is an attempt to fix the problem.
+ spid, status, rusage = os.wait3(os.WNOHANG)
if spid == cpid:
break
+ time.sleep(1.0)
+
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
self.assertTrue(rusage)
def test_main():
run_unittest(Wait3Test)
+ reap_children()
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_wait4.py b/Lib/test/test_wait4.py
index 027e5c3..9f7fc14 100644
--- a/Lib/test/test_wait4.py
+++ b/Lib/test/test_wait4.py
@@ -2,8 +2,9 @@
"""
import os
+import time
from test.fork_wait import ForkWait
-from test.test_support import TestSkipped, run_unittest
+from test.test_support import TestSkipped, run_unittest, reap_children
try:
os.fork
@@ -17,13 +18,20 @@ except AttributeError:
class Wait4Test(ForkWait):
def wait_impl(self, cpid):
- spid, status, rusage = os.wait4(cpid, 0)
+ for i in range(10):
+ # wait4() shouldn't hang, but some of the buildbots seem to hang
+ # in the forking tests. This is an attempt to fix the problem.
+ spid, status, rusage = os.wait4(cpid, os.WNOHANG)
+ if spid == cpid:
+ break
+ time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
self.assertTrue(rusage)
def test_main():
run_unittest(Wait4Test)
+ reap_children()
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_warnings.py b/Lib/test/test_warnings.py
index 5d051a5..a7ccb6b 100644
--- a/Lib/test/test_warnings.py
+++ b/Lib/test/test_warnings.py
@@ -81,6 +81,19 @@ class TestModule(unittest.TestCase):
self.assertEqual(msg.message, text)
self.assertEqual(msg.category, 'UserWarning')
+ def test_options(self):
+ # Uses the private _setoption() function to test the parsing
+ # of command-line warning arguments
+ self.assertRaises(warnings._OptionError,
+ warnings._setoption, '1:2:3:4:5:6')
+ self.assertRaises(warnings._OptionError,
+ warnings._setoption, 'bogus::Warning')
+ self.assertRaises(warnings._OptionError,
+ warnings._setoption, 'ignore:2::4:-5')
+ warnings._setoption('error::Warning::0')
+ self.assertRaises(UserWarning, warnings.warn, 'convert to error')
+
+
def test_main(verbose=None):
# Obscure hack so that this test passes after reloads or repeated calls
# to test_main (regrtest -R).
diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py
index a9bc962..5830fd6 100644
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -151,3 +151,6 @@ if remote_name is not None:
else:
print "Remote registry calls can be tested using",
print "'test_winreg.py --remote \\\\machine_name'"
+ # perform minimal ConnectRegistry test which just invokes it
+ h = ConnectRegistry(None, HKEY_LOCAL_MACHINE)
+ h.Close()
diff --git a/Lib/test/test_wsgiref.py b/Lib/test/test_wsgiref.py
new file mode 100755
index 0000000..1ec271b
--- /dev/null
+++ b/Lib/test/test_wsgiref.py
@@ -0,0 +1,615 @@
+from __future__ import nested_scopes # Backward compat for 2.1
+from unittest import TestSuite, TestCase, makeSuite
+from wsgiref.util import setup_testing_defaults
+from wsgiref.headers import Headers
+from wsgiref.handlers import BaseHandler, BaseCGIHandler
+from wsgiref import util
+from wsgiref.validate import validator
+from wsgiref.simple_server import WSGIServer, WSGIRequestHandler, demo_app
+from wsgiref.simple_server import make_server
+from StringIO import StringIO
+from SocketServer import BaseServer
+import re, sys
+
+
+class MockServer(WSGIServer):
+ """Non-socket HTTP server"""
+
+ def __init__(self, server_address, RequestHandlerClass):
+ BaseServer.__init__(self, server_address, RequestHandlerClass)
+ self.server_bind()
+
+ def server_bind(self):
+ host, port = self.server_address
+ self.server_name = host
+ self.server_port = port
+ self.setup_environ()
+
+
+class MockHandler(WSGIRequestHandler):
+ """Non-socket HTTP handler"""
+ def setup(self):
+ self.connection = self.request
+ self.rfile, self.wfile = self.connection
+
+ def finish(self):
+ pass
+
+
+
+
+
+def hello_app(environ,start_response):
+ start_response("200 OK", [
+ ('Content-Type','text/plain'),
+ ('Date','Mon, 05 Jun 2006 18:49:54 GMT')
+ ])
+ return ["Hello, world!"]
+
+def run_amock(app=hello_app, data="GET / HTTP/1.0\n\n"):
+ server = make_server("", 80, app, MockServer, MockHandler)
+ inp, out, err, olderr = StringIO(data), StringIO(), StringIO(), sys.stderr
+ sys.stderr = err
+
+ try:
+ server.finish_request((inp,out), ("127.0.0.1",8888))
+ finally:
+ sys.stderr = olderr
+
+ return out.getvalue(), err.getvalue()
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def compare_generic_iter(make_it,match):
+ """Utility to compare a generic 2.1/2.2+ iterator with an iterable
+
+ If running under Python 2.2+, this tests the iterator using iter()/next(),
+ as well as __getitem__. 'make_it' must be a function returning a fresh
+ iterator to be tested (since this may test the iterator twice)."""
+
+ it = make_it()
+ n = 0
+ for item in match:
+ if not it[n]==item: raise AssertionError
+ n+=1
+ try:
+ it[n]
+ except IndexError:
+ pass
+ else:
+ raise AssertionError("Too many items from __getitem__",it)
+
+ try:
+ iter, StopIteration
+ except NameError:
+ pass
+ else:
+ # Only test iter mode under 2.2+
+ it = make_it()
+ if not iter(it) is it: raise AssertionError
+ for item in match:
+ if not it.next()==item: raise AssertionError
+ try:
+ it.next()
+ except StopIteration:
+ pass
+ else:
+ raise AssertionError("Too many items from .next()",it)
+
+
+
+
+
+
+class IntegrationTests(TestCase):
+
+ def check_hello(self, out, has_length=True):
+ self.assertEqual(out,
+ "HTTP/1.0 200 OK\r\n"
+ "Server: WSGIServer/0.1 Python/"+sys.version.split()[0]+"\r\n"
+ "Content-Type: text/plain\r\n"
+ "Date: Mon, 05 Jun 2006 18:49:54 GMT\r\n" +
+ (has_length and "Content-Length: 13\r\n" or "") +
+ "\r\n"
+ "Hello, world!"
+ )
+
+ def test_plain_hello(self):
+ out, err = run_amock()
+ self.check_hello(out)
+
+ def test_validated_hello(self):
+ out, err = run_amock(validator(hello_app))
+ # the middleware doesn't support len(), so content-length isn't there
+ self.check_hello(out, has_length=False)
+
+ def test_simple_validation_error(self):
+ def bad_app(environ,start_response):
+ start_response("200 OK", ('Content-Type','text/plain'))
+ return ["Hello, world!"]
+ out, err = run_amock(validator(bad_app))
+ self.failUnless(out.endswith(
+ "A server error occurred. Please contact the administrator."
+ ))
+ self.assertEqual(
+ err.splitlines()[-2],
+ "AssertionError: Headers (('Content-Type', 'text/plain')) must"
+ " be of type list: <type 'tuple'>"
+ )
+
+
+
+
+
+
+class UtilityTests(TestCase):
+
+ def checkShift(self,sn_in,pi_in,part,sn_out,pi_out):
+ env = {'SCRIPT_NAME':sn_in,'PATH_INFO':pi_in}
+ util.setup_testing_defaults(env)
+ self.assertEqual(util.shift_path_info(env),part)
+ self.assertEqual(env['PATH_INFO'],pi_out)
+ self.assertEqual(env['SCRIPT_NAME'],sn_out)
+ return env
+
+ def checkDefault(self, key, value, alt=None):
+ # Check defaulting when empty
+ env = {}
+ util.setup_testing_defaults(env)
+ if isinstance(value,StringIO):
+ self.failUnless(isinstance(env[key],StringIO))
+ else:
+ self.assertEqual(env[key],value)
+
+ # Check existing value
+ env = {key:alt}
+ util.setup_testing_defaults(env)
+ self.failUnless(env[key] is alt)
+
+ def checkCrossDefault(self,key,value,**kw):
+ util.setup_testing_defaults(kw)
+ self.assertEqual(kw[key],value)
+
+ def checkAppURI(self,uri,**kw):
+ util.setup_testing_defaults(kw)
+ self.assertEqual(util.application_uri(kw),uri)
+
+ def checkReqURI(self,uri,query=1,**kw):
+ util.setup_testing_defaults(kw)
+ self.assertEqual(util.request_uri(kw,query),uri)
+
+
+
+
+
+
+ def checkFW(self,text,size,match):
+
+ def make_it(text=text,size=size):
+ return util.FileWrapper(StringIO(text),size)
+
+ compare_generic_iter(make_it,match)
+
+ it = make_it()
+ self.failIf(it.filelike.closed)
+
+ for item in it:
+ pass
+
+ self.failIf(it.filelike.closed)
+
+ it.close()
+ self.failUnless(it.filelike.closed)
+
+
+ def testSimpleShifts(self):
+ self.checkShift('','/', '', '/', '')
+ self.checkShift('','/x', 'x', '/x', '')
+ self.checkShift('/','', None, '/', '')
+ self.checkShift('/a','/x/y', 'x', '/a/x', '/y')
+ self.checkShift('/a','/x/', 'x', '/a/x', '/')
+
+
+ def testNormalizedShifts(self):
+ self.checkShift('/a/b', '/../y', '..', '/a', '/y')
+ self.checkShift('', '/../y', '..', '', '/y')
+ self.checkShift('/a/b', '//y', 'y', '/a/b/y', '')
+ self.checkShift('/a/b', '//y/', 'y', '/a/b/y', '/')
+ self.checkShift('/a/b', '/./y', 'y', '/a/b/y', '')
+ self.checkShift('/a/b', '/./y/', 'y', '/a/b/y', '/')
+ self.checkShift('/a/b', '///./..//y/.//', '..', '/a', '/y/')
+ self.checkShift('/a/b', '///', '', '/a/b/', '')
+ self.checkShift('/a/b', '/.//', '', '/a/b/', '')
+ self.checkShift('/a/b', '/x//', 'x', '/a/b/x', '/')
+ self.checkShift('/a/b', '/.', None, '/a/b', '')
+
+
+ def testDefaults(self):
+ for key, value in [
+ ('SERVER_NAME','127.0.0.1'),
+ ('SERVER_PORT', '80'),
+ ('SERVER_PROTOCOL','HTTP/1.0'),
+ ('HTTP_HOST','127.0.0.1'),
+ ('REQUEST_METHOD','GET'),
+ ('SCRIPT_NAME',''),
+ ('PATH_INFO','/'),
+ ('wsgi.version', (1,0)),
+ ('wsgi.run_once', 0),
+ ('wsgi.multithread', 0),
+ ('wsgi.multiprocess', 0),
+ ('wsgi.input', StringIO("")),
+ ('wsgi.errors', StringIO()),
+ ('wsgi.url_scheme','http'),
+ ]:
+ self.checkDefault(key,value)
+
+
+ def testCrossDefaults(self):
+ self.checkCrossDefault('HTTP_HOST',"foo.bar",SERVER_NAME="foo.bar")
+ self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="on")
+ self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="1")
+ self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="yes")
+ self.checkCrossDefault('wsgi.url_scheme',"http",HTTPS="foo")
+ self.checkCrossDefault('SERVER_PORT',"80",HTTPS="foo")
+ self.checkCrossDefault('SERVER_PORT',"443",HTTPS="on")
+
+
+ def testGuessScheme(self):
+ self.assertEqual(util.guess_scheme({}), "http")
+ self.assertEqual(util.guess_scheme({'HTTPS':"foo"}), "http")
+ self.assertEqual(util.guess_scheme({'HTTPS':"on"}), "https")
+ self.assertEqual(util.guess_scheme({'HTTPS':"yes"}), "https")
+ self.assertEqual(util.guess_scheme({'HTTPS':"1"}), "https")
+
+
+
+
+
+ def testAppURIs(self):
+ self.checkAppURI("http://127.0.0.1/")
+ self.checkAppURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
+ self.checkAppURI("http://spam.example.com:2071/",
+ HTTP_HOST="spam.example.com:2071", SERVER_PORT="2071")
+ self.checkAppURI("http://spam.example.com/",
+ SERVER_NAME="spam.example.com")
+ self.checkAppURI("http://127.0.0.1/",
+ HTTP_HOST="127.0.0.1", SERVER_NAME="spam.example.com")
+ self.checkAppURI("https://127.0.0.1/", HTTPS="on")
+ self.checkAppURI("http://127.0.0.1:8000/", SERVER_PORT="8000",
+ HTTP_HOST=None)
+
+ def testReqURIs(self):
+ self.checkReqURI("http://127.0.0.1/")
+ self.checkReqURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
+ self.checkReqURI("http://127.0.0.1/spammity/spam",
+ SCRIPT_NAME="/spammity", PATH_INFO="/spam")
+ self.checkReqURI("http://127.0.0.1/spammity/spam?say=ni",
+ SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
+ self.checkReqURI("http://127.0.0.1/spammity/spam", 0,
+ SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
+
+ def testFileWrapper(self):
+ self.checkFW("xyz"*50, 120, ["xyz"*40,"xyz"*10])
+
+ def testHopByHop(self):
+ for hop in (
+ "Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
+ "TE Trailers Transfer-Encoding Upgrade"
+ ).split():
+ for alt in hop, hop.title(), hop.upper(), hop.lower():
+ self.failUnless(util.is_hop_by_hop(alt))
+
+ # Not comprehensive, just a few random header names
+ for hop in (
+ "Accept Cache-Control Date Pragma Trailer Via Warning"
+ ).split():
+ for alt in hop, hop.title(), hop.upper(), hop.lower():
+ self.failIf(util.is_hop_by_hop(alt))
+
+class HeaderTests(TestCase):
+
+ def testMappingInterface(self):
+ test = [('x','y')]
+ self.assertEqual(len(Headers([])),0)
+ self.assertEqual(len(Headers(test[:])),1)
+ self.assertEqual(Headers(test[:]).keys(), ['x'])
+ self.assertEqual(Headers(test[:]).values(), ['y'])
+ self.assertEqual(Headers(test[:]).items(), test)
+ self.failIf(Headers(test).items() is test) # must be copy!
+
+ h=Headers([])
+ del h['foo'] # should not raise an error
+
+ h['Foo'] = 'bar'
+ for m in h.has_key, h.__contains__, h.get, h.get_all, h.__getitem__:
+ self.failUnless(m('foo'))
+ self.failUnless(m('Foo'))
+ self.failUnless(m('FOO'))
+ self.failIf(m('bar'))
+
+ self.assertEqual(h['foo'],'bar')
+ h['foo'] = 'baz'
+ self.assertEqual(h['FOO'],'baz')
+ self.assertEqual(h.get_all('foo'),['baz'])
+
+ self.assertEqual(h.get("foo","whee"), "baz")
+ self.assertEqual(h.get("zoo","whee"), "whee")
+ self.assertEqual(h.setdefault("foo","whee"), "baz")
+ self.assertEqual(h.setdefault("zoo","whee"), "whee")
+ self.assertEqual(h["foo"],"baz")
+ self.assertEqual(h["zoo"],"whee")
+
+ def testRequireList(self):
+ self.assertRaises(TypeError, Headers, "foo")
+
+
+ def testExtras(self):
+ h = Headers([])
+ self.assertEqual(str(h),'\r\n')
+
+ h.add_header('foo','bar',baz="spam")
+ self.assertEqual(h['foo'], 'bar; baz="spam"')
+ self.assertEqual(str(h),'foo: bar; baz="spam"\r\n\r\n')
+
+ h.add_header('Foo','bar',cheese=None)
+ self.assertEqual(h.get_all('foo'),
+ ['bar; baz="spam"', 'bar; cheese'])
+
+ self.assertEqual(str(h),
+ 'foo: bar; baz="spam"\r\n'
+ 'Foo: bar; cheese\r\n'
+ '\r\n'
+ )
+
+
+class ErrorHandler(BaseCGIHandler):
+ """Simple handler subclass for testing BaseHandler"""
+
+ def __init__(self,**kw):
+ setup_testing_defaults(kw)
+ BaseCGIHandler.__init__(
+ self, StringIO(''), StringIO(), StringIO(), kw,
+ multithread=True, multiprocess=True
+ )
+
+class TestHandler(ErrorHandler):
+ """Simple handler subclass for testing BaseHandler, w/error passthru"""
+
+ def handle_error(self):
+ raise # for testing, we want to see what's happening
+
+
+
+
+
+
+
+
+
+
+
+class HandlerTests(TestCase):
+
+ def checkEnvironAttrs(self, handler):
+ env = handler.environ
+ for attr in [
+ 'version','multithread','multiprocess','run_once','file_wrapper'
+ ]:
+ if attr=='file_wrapper' and handler.wsgi_file_wrapper is None:
+ continue
+ self.assertEqual(getattr(handler,'wsgi_'+attr),env['wsgi.'+attr])
+
+ def checkOSEnviron(self,handler):
+ empty = {}; setup_testing_defaults(empty)
+ env = handler.environ
+ from os import environ
+ for k,v in environ.items():
+ if not empty.has_key(k):
+ self.assertEqual(env[k],v)
+ for k,v in empty.items():
+ self.failUnless(env.has_key(k))
+
+ def testEnviron(self):
+ h = TestHandler(X="Y")
+ h.setup_environ()
+ self.checkEnvironAttrs(h)
+ self.checkOSEnviron(h)
+ self.assertEqual(h.environ["X"],"Y")
+
+ def testCGIEnviron(self):
+ h = BaseCGIHandler(None,None,None,{})
+ h.setup_environ()
+ for key in 'wsgi.url_scheme', 'wsgi.input', 'wsgi.errors':
+ self.assert_(h.environ.has_key(key))
+
+ def testScheme(self):
+ h=TestHandler(HTTPS="on"); h.setup_environ()
+ self.assertEqual(h.environ['wsgi.url_scheme'],'https')
+ h=TestHandler(); h.setup_environ()
+ self.assertEqual(h.environ['wsgi.url_scheme'],'http')
+
+
+ def testAbstractMethods(self):
+ h = BaseHandler()
+ for name in [
+ '_flush','get_stdin','get_stderr','add_cgi_vars'
+ ]:
+ self.assertRaises(NotImplementedError, getattr(h,name))
+ self.assertRaises(NotImplementedError, h._write, "test")
+
+
+ def testContentLength(self):
+ # Demo one reason iteration is better than write()... ;)
+
+ def trivial_app1(e,s):
+ s('200 OK',[])
+ return [e['wsgi.url_scheme']]
+
+ def trivial_app2(e,s):
+ s('200 OK',[])(e['wsgi.url_scheme'])
+ return []
+
+ h = TestHandler()
+ h.run(trivial_app1)
+ self.assertEqual(h.stdout.getvalue(),
+ "Status: 200 OK\r\n"
+ "Content-Length: 4\r\n"
+ "\r\n"
+ "http")
+
+ h = TestHandler()
+ h.run(trivial_app2)
+ self.assertEqual(h.stdout.getvalue(),
+ "Status: 200 OK\r\n"
+ "\r\n"
+ "http")
+
+
+
+
+
+
+
+ def testBasicErrorOutput(self):
+
+ def non_error_app(e,s):
+ s('200 OK',[])
+ return []
+
+ def error_app(e,s):
+ raise AssertionError("This should be caught by handler")
+
+ h = ErrorHandler()
+ h.run(non_error_app)
+ self.assertEqual(h.stdout.getvalue(),
+ "Status: 200 OK\r\n"
+ "Content-Length: 0\r\n"
+ "\r\n")
+ self.assertEqual(h.stderr.getvalue(),"")
+
+ h = ErrorHandler()
+ h.run(error_app)
+ self.assertEqual(h.stdout.getvalue(),
+ "Status: %s\r\n"
+ "Content-Type: text/plain\r\n"
+ "Content-Length: %d\r\n"
+ "\r\n%s" % (h.error_status,len(h.error_body),h.error_body))
+
+ self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1)
+
+ def testErrorAfterOutput(self):
+ MSG = "Some output has been sent"
+ def error_app(e,s):
+ s("200 OK",[])(MSG)
+ raise AssertionError("This should be caught by handler")
+
+ h = ErrorHandler()
+ h.run(error_app)
+ self.assertEqual(h.stdout.getvalue(),
+ "Status: 200 OK\r\n"
+ "\r\n"+MSG)
+ self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1)
+
+
+ def testHeaderFormats(self):
+
+ def non_error_app(e,s):
+ s('200 OK',[])
+ return []
+
+ stdpat = (
+ r"HTTP/%s 200 OK\r\n"
+ r"Date: \w{3}, [ 0123]\d \w{3} \d{4} \d\d:\d\d:\d\d GMT\r\n"
+ r"%s" r"Content-Length: 0\r\n" r"\r\n"
+ )
+ shortpat = (
+ "Status: 200 OK\r\n" "Content-Length: 0\r\n" "\r\n"
+ )
+
+ for ssw in "FooBar/1.0", None:
+ sw = ssw and "Server: %s\r\n" % ssw or ""
+
+ for version in "1.0", "1.1":
+ for proto in "HTTP/0.9", "HTTP/1.0", "HTTP/1.1":
+
+ h = TestHandler(SERVER_PROTOCOL=proto)
+ h.origin_server = False
+ h.http_version = version
+ h.server_software = ssw
+ h.run(non_error_app)
+ self.assertEqual(shortpat,h.stdout.getvalue())
+
+ h = TestHandler(SERVER_PROTOCOL=proto)
+ h.origin_server = True
+ h.http_version = version
+ h.server_software = ssw
+ h.run(non_error_app)
+ if proto=="HTTP/0.9":
+ self.assertEqual(h.stdout.getvalue(),"")
+ else:
+ self.failUnless(
+ re.match(stdpat%(version,sw), h.stdout.getvalue()),
+ (stdpat%(version,sw), h.stdout.getvalue())
+ )
+
+# This epilogue is needed for compatibility with the Python 2.5 regrtest module
+
+def test_main():
+ import unittest
+ from test.test_support import run_suite
+ run_suite(
+ unittest.defaultTestLoader.loadTestsFromModule(sys.modules[__name__])
+ )
+
+if __name__ == "__main__":
+ test_main()
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# the above lines intentionally left blank
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index 86052d7..78adb42 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -1,4 +1,4 @@
-# xmlcore.etree test. This file contains enough tests to make sure that
+# xml.etree test. This file contains enough tests to make sure that
# all included components work as they should. For a more extensive
# test suite, see the selftest script in the ElementTree distribution.
@@ -6,8 +6,6 @@ import doctest, sys
from test import test_support
-from xmlcore.etree import ElementTree as ET
-
SAMPLE_XML = """
<body>
<tag>text</tag>
@@ -32,9 +30,9 @@ def sanity():
"""
Import sanity.
- >>> from xmlcore.etree import ElementTree
- >>> from xmlcore.etree import ElementInclude
- >>> from xmlcore.etree import ElementPath
+ >>> from xml.etree import ElementTree
+ >>> from xml.etree import ElementInclude
+ >>> from xml.etree import ElementPath
"""
def check_method(method):
@@ -61,6 +59,8 @@ def interface():
"""
Test element tree interface.
+ >>> from xml.etree import ElementTree as ET
+
>>> element = ET.Element("tag", key="value")
>>> tree = ET.ElementTree(element)
@@ -108,6 +108,8 @@ def find():
"""
Test find methods (including xpath syntax).
+ >>> from xml.etree import ElementTree as ET
+
>>> elem = ET.XML(SAMPLE_XML)
>>> elem.find("tag").tag
'tag'
@@ -174,6 +176,8 @@ def find():
def parseliteral():
r"""
+ >>> from xml.etree import ElementTree as ET
+
>>> element = ET.XML("<html><body>text</body></html>")
>>> ET.ElementTree(element).write(sys.stdout)
<html><body>text</body></html>
@@ -195,18 +199,20 @@ def parseliteral():
'body'
"""
-def check_encoding(encoding):
+
+def check_encoding(ET, encoding):
"""
- >>> check_encoding("ascii")
- >>> check_encoding("us-ascii")
- >>> check_encoding("iso-8859-1")
- >>> check_encoding("iso-8859-15")
- >>> check_encoding("cp437")
- >>> check_encoding("mac-roman")
+ >>> from xml.etree import ElementTree as ET
+
+ >>> check_encoding(ET, "ascii")
+ >>> check_encoding(ET, "us-ascii")
+ >>> check_encoding(ET, "iso-8859-1")
+ >>> check_encoding(ET, "iso-8859-15")
+ >>> check_encoding(ET, "cp437")
+ >>> check_encoding(ET, "mac-roman")
"""
- ET.XML(
- "<?xml version='1.0' encoding='%s'?><xml />" % encoding
- )
+ ET.XML("<?xml version='1.0' encoding='%s'?><xml />" % encoding)
+
#
# xinclude tests (samples from appendix C of the xinclude specification)
@@ -282,14 +288,16 @@ def xinclude_loader(href, parse="xml", encoding=None):
except KeyError:
raise IOError("resource not found")
if parse == "xml":
- return ET.XML(data)
+ from xml.etree.ElementTree import XML
+ return XML(data)
return data
def xinclude():
r"""
Basic inclusion example (XInclude C.1)
- >>> from xmlcore.etree import ElementInclude
+ >>> from xml.etree import ElementTree as ET
+ >>> from xml.etree import ElementInclude
>>> document = xinclude_loader("C1.xml")
>>> ElementInclude.include(document, xinclude_loader)
diff --git a/Lib/test/test_xml_etree_c.py b/Lib/test/test_xml_etree_c.py
index 587ea99..56e7fed 100644
--- a/Lib/test/test_xml_etree_c.py
+++ b/Lib/test/test_xml_etree_c.py
@@ -1,10 +1,10 @@
-# xmlcore.etree test for cElementTree
+# xml.etree test for cElementTree
import doctest, sys
from test import test_support
-from xmlcore.etree import cElementTree as ET
+from xml.etree import cElementTree as ET
SAMPLE_XML = """
<body>
@@ -30,7 +30,7 @@ def sanity():
"""
Import sanity.
- >>> from xmlcore.etree import cElementTree
+ >>> from xml.etree import cElementTree
"""
def check_method(method):
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index 0241348..54684f3 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -4,7 +4,7 @@ try:
except ImportError:
zlib = None
-import zipfile, os, unittest
+import zipfile, os, unittest, sys, shutil
from StringIO import StringIO
from tempfile import TemporaryFile
@@ -28,14 +28,70 @@ class TestsWithSourceFile(unittest.TestCase):
zipfp = zipfile.ZipFile(f, "w", compression)
zipfp.write(TESTFN, "another"+os.extsep+"name")
zipfp.write(TESTFN, TESTFN)
+ zipfp.writestr("strfile", self.data)
zipfp.close()
# Read the ZIP archive
zipfp = zipfile.ZipFile(f, "r", compression)
self.assertEqual(zipfp.read(TESTFN), self.data)
self.assertEqual(zipfp.read("another"+os.extsep+"name"), self.data)
+ self.assertEqual(zipfp.read("strfile"), self.data)
+
+ # Print the ZIP directory
+ fp = StringIO()
+ stdout = sys.stdout
+ try:
+ sys.stdout = fp
+
+ zipfp.printdir()
+ finally:
+ sys.stdout = stdout
+
+ directory = fp.getvalue()
+ lines = directory.splitlines()
+ self.assertEquals(len(lines), 4) # Number of files + header
+
+ self.assert_('File Name' in lines[0])
+ self.assert_('Modified' in lines[0])
+ self.assert_('Size' in lines[0])
+
+ fn, date, time, size = lines[1].split()
+ self.assertEquals(fn, 'another.name')
+ # XXX: timestamp is not tested
+ self.assertEquals(size, str(len(self.data)))
+
+ # Check the namelist
+ names = zipfp.namelist()
+ self.assertEquals(len(names), 3)
+ self.assert_(TESTFN in names)
+ self.assert_("another"+os.extsep+"name" in names)
+ self.assert_("strfile" in names)
+
+ # Check infolist
+ infos = zipfp.infolist()
+ names = [ i.filename for i in infos ]
+ self.assertEquals(len(names), 3)
+ self.assert_(TESTFN in names)
+ self.assert_("another"+os.extsep+"name" in names)
+ self.assert_("strfile" in names)
+ for i in infos:
+ self.assertEquals(i.file_size, len(self.data))
+
+ # check getinfo
+ for nm in (TESTFN, "another"+os.extsep+"name", "strfile"):
+ info = zipfp.getinfo(nm)
+ self.assertEquals(info.filename, nm)
+ self.assertEquals(info.file_size, len(self.data))
+
+ # Check that testzip doesn't raise an exception
+ zipfp.testzip()
+
+
zipfp.close()
+
+
+
def testStored(self):
for f in (TESTFN2, TemporaryFile(), StringIO()):
self.zipTest(f, zipfile.ZIP_STORED)
@@ -59,6 +115,197 @@ class TestsWithSourceFile(unittest.TestCase):
os.remove(TESTFN)
os.remove(TESTFN2)
+class TestZip64InSmallFiles(unittest.TestCase):
+ # These tests test the ZIP64 functionality without using large files,
+ # see test_zipfile64 for proper tests.
+
+ def setUp(self):
+ self._limit = zipfile.ZIP64_LIMIT
+ zipfile.ZIP64_LIMIT = 5
+
+ line_gen = ("Test of zipfile line %d." % i for i in range(0, 1000))
+ self.data = '\n'.join(line_gen)
+
+ # Make a source file with some lines
+ fp = open(TESTFN, "wb")
+ fp.write(self.data)
+ fp.close()
+
+ def largeFileExceptionTest(self, f, compression):
+ zipfp = zipfile.ZipFile(f, "w", compression)
+ self.assertRaises(zipfile.LargeZipFile,
+ zipfp.write, TESTFN, "another"+os.extsep+"name")
+ zipfp.close()
+
+ def largeFileExceptionTest2(self, f, compression):
+ zipfp = zipfile.ZipFile(f, "w", compression)
+ self.assertRaises(zipfile.LargeZipFile,
+ zipfp.writestr, "another"+os.extsep+"name", self.data)
+ zipfp.close()
+
+ def testLargeFileException(self):
+ for f in (TESTFN2, TemporaryFile(), StringIO()):
+ self.largeFileExceptionTest(f, zipfile.ZIP_STORED)
+ self.largeFileExceptionTest2(f, zipfile.ZIP_STORED)
+
+ def zipTest(self, f, compression):
+ # Create the ZIP archive
+ zipfp = zipfile.ZipFile(f, "w", compression, allowZip64=True)
+ zipfp.write(TESTFN, "another"+os.extsep+"name")
+ zipfp.write(TESTFN, TESTFN)
+ zipfp.writestr("strfile", self.data)
+ zipfp.close()
+
+ # Read the ZIP archive
+ zipfp = zipfile.ZipFile(f, "r", compression)
+ self.assertEqual(zipfp.read(TESTFN), self.data)
+ self.assertEqual(zipfp.read("another"+os.extsep+"name"), self.data)
+ self.assertEqual(zipfp.read("strfile"), self.data)
+
+ # Print the ZIP directory
+ fp = StringIO()
+ stdout = sys.stdout
+ try:
+ sys.stdout = fp
+
+ zipfp.printdir()
+ finally:
+ sys.stdout = stdout
+
+ directory = fp.getvalue()
+ lines = directory.splitlines()
+ self.assertEquals(len(lines), 4) # Number of files + header
+
+ self.assert_('File Name' in lines[0])
+ self.assert_('Modified' in lines[0])
+ self.assert_('Size' in lines[0])
+
+ fn, date, time, size = lines[1].split()
+ self.assertEquals(fn, 'another.name')
+ # XXX: timestamp is not tested
+ self.assertEquals(size, str(len(self.data)))
+
+ # Check the namelist
+ names = zipfp.namelist()
+ self.assertEquals(len(names), 3)
+ self.assert_(TESTFN in names)
+ self.assert_("another"+os.extsep+"name" in names)
+ self.assert_("strfile" in names)
+
+ # Check infolist
+ infos = zipfp.infolist()
+ names = [ i.filename for i in infos ]
+ self.assertEquals(len(names), 3)
+ self.assert_(TESTFN in names)
+ self.assert_("another"+os.extsep+"name" in names)
+ self.assert_("strfile" in names)
+ for i in infos:
+ self.assertEquals(i.file_size, len(self.data))
+
+ # check getinfo
+ for nm in (TESTFN, "another"+os.extsep+"name", "strfile"):
+ info = zipfp.getinfo(nm)
+ self.assertEquals(info.filename, nm)
+ self.assertEquals(info.file_size, len(self.data))
+
+ # Check that testzip doesn't raise an exception
+ zipfp.testzip()
+
+
+ zipfp.close()
+
+ def testStored(self):
+ for f in (TESTFN2, TemporaryFile(), StringIO()):
+ self.zipTest(f, zipfile.ZIP_STORED)
+
+
+ if zlib:
+ def testDeflated(self):
+ for f in (TESTFN2, TemporaryFile(), StringIO()):
+ self.zipTest(f, zipfile.ZIP_DEFLATED)
+
+ def testAbsoluteArcnames(self):
+ zipfp = zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED, allowZip64=True)
+ zipfp.write(TESTFN, "/absolute")
+ zipfp.close()
+
+ zipfp = zipfile.ZipFile(TESTFN2, "r", zipfile.ZIP_STORED)
+ self.assertEqual(zipfp.namelist(), ["absolute"])
+ zipfp.close()
+
+
+ def tearDown(self):
+ zipfile.ZIP64_LIMIT = self._limit
+ os.remove(TESTFN)
+ os.remove(TESTFN2)
+
+class PyZipFileTests(unittest.TestCase):
+ def testWritePyfile(self):
+ zipfp = zipfile.PyZipFile(TemporaryFile(), "w")
+ fn = __file__
+ if fn.endswith('.pyc') or fn.endswith('.pyo'):
+ fn = fn[:-1]
+
+ zipfp.writepy(fn)
+
+ bn = os.path.basename(fn)
+ self.assert_(bn not in zipfp.namelist())
+ self.assert_(bn + 'o' in zipfp.namelist() or bn + 'c' in zipfp.namelist())
+ zipfp.close()
+
+
+ zipfp = zipfile.PyZipFile(TemporaryFile(), "w")
+ fn = __file__
+ if fn.endswith('.pyc') or fn.endswith('.pyo'):
+ fn = fn[:-1]
+
+ zipfp.writepy(fn, "testpackage")
+
+ bn = "%s/%s"%("testpackage", os.path.basename(fn))
+ self.assert_(bn not in zipfp.namelist())
+ self.assert_(bn + 'o' in zipfp.namelist() or bn + 'c' in zipfp.namelist())
+ zipfp.close()
+
+ def testWritePythonPackage(self):
+ import email
+ packagedir = os.path.dirname(email.__file__)
+
+ zipfp = zipfile.PyZipFile(TemporaryFile(), "w")
+ zipfp.writepy(packagedir)
+
+ # Check for a couple of modules at different levels of the hieararchy
+ names = zipfp.namelist()
+ self.assert_('email/__init__.pyo' in names or 'email/__init__.pyc' in names)
+ self.assert_('email/mime/text.pyo' in names or 'email/mime/text.pyc' in names)
+
+ def testWritePythonDirectory(self):
+ os.mkdir(TESTFN2)
+ try:
+ fp = open(os.path.join(TESTFN2, "mod1.py"), "w")
+ fp.write("print 42\n")
+ fp.close()
+
+ fp = open(os.path.join(TESTFN2, "mod2.py"), "w")
+ fp.write("print 42 * 42\n")
+ fp.close()
+
+ fp = open(os.path.join(TESTFN2, "mod2.txt"), "w")
+ fp.write("bla bla bla\n")
+ fp.close()
+
+ zipfp = zipfile.PyZipFile(TemporaryFile(), "w")
+ zipfp.writepy(TESTFN2)
+
+ names = zipfp.namelist()
+ self.assert_('mod1.pyc' in names or 'mod1.pyo' in names)
+ self.assert_('mod2.pyc' in names or 'mod2.pyo' in names)
+ self.assert_('mod2.txt' not in names)
+
+ finally:
+ shutil.rmtree(TESTFN2)
+
+
+
class OtherTests(unittest.TestCase):
def testCloseErroneousFile(self):
# This test checks that the ZipFile constructor closes the file object
@@ -103,7 +350,8 @@ class OtherTests(unittest.TestCase):
self.assertRaises(RuntimeError, zipf.testzip)
def test_main():
- run_unittest(TestsWithSourceFile, OtherTests)
+ run_unittest(TestsWithSourceFile, TestZip64InSmallFiles, OtherTests, PyZipFileTests)
+ #run_unittest(TestZip64InSmallFiles)
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_zipfile64.py b/Lib/test/test_zipfile64.py
new file mode 100644
index 0000000..449cf39
--- /dev/null
+++ b/Lib/test/test_zipfile64.py
@@ -0,0 +1,101 @@
+# Tests of the full ZIP64 functionality of zipfile
+# The test_support.requires call is the only reason for keeping this separate
+# from test_zipfile
+from test import test_support
+# XXX(nnorwitz): disable this test by looking for extra largfile resource
+# which doesn't exist. This test takes over 30 minutes to run in general
+# and requires more disk space than most of the buildbots.
+test_support.requires(
+ 'extralargefile',
+ 'test requires loads of disk-space bytes and a long time to run'
+ )
+
+# We can test part of the module without zlib.
+try:
+ import zlib
+except ImportError:
+ zlib = None
+
+import zipfile, os, unittest
+import time
+import sys
+
+from StringIO import StringIO
+from tempfile import TemporaryFile
+
+from test.test_support import TESTFN, run_unittest
+
+TESTFN2 = TESTFN + "2"
+
+# How much time in seconds can pass before we print a 'Still working' message.
+_PRINT_WORKING_MSG_INTERVAL = 5 * 60
+
+class TestsWithSourceFile(unittest.TestCase):
+ def setUp(self):
+ # Create test data.
+ # xrange() is important here -- don't want to create immortal space
+ # for a million ints.
+ line_gen = ("Test of zipfile line %d." % i for i in xrange(1000000))
+ self.data = '\n'.join(line_gen)
+
+ # And write it to a file.
+ fp = open(TESTFN, "wb")
+ fp.write(self.data)
+ fp.close()
+
+ def zipTest(self, f, compression):
+ # Create the ZIP archive.
+ zipfp = zipfile.ZipFile(f, "w", compression, allowZip64=True)
+
+ # It will contain enough copies of self.data to reach about 6GB of
+ # raw data to store.
+ filecount = 6*1024**3 // len(self.data)
+
+ next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
+ for num in range(filecount):
+ zipfp.writestr("testfn%d" % num, self.data)
+ # Print still working message since this test can be really slow
+ if next_time <= time.time():
+ next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
+ print >>sys.__stdout__, (
+ ' zipTest still writing %d of %d, be patient...' %
+ (num, filecount))
+ sys.__stdout__.flush()
+ zipfp.close()
+
+ # Read the ZIP archive
+ zipfp = zipfile.ZipFile(f, "r", compression)
+ for num in range(filecount):
+ self.assertEqual(zipfp.read("testfn%d" % num), self.data)
+ # Print still working message since this test can be really slow
+ if next_time <= time.time():
+ next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
+ print >>sys.__stdout__, (
+ ' zipTest still reading %d of %d, be patient...' %
+ (num, filecount))
+ sys.__stdout__.flush()
+ zipfp.close()
+
+ def testStored(self):
+ # Try the temp file first. If we do TESTFN2 first, then it hogs
+ # gigabytes of disk space for the duration of the test.
+ for f in TemporaryFile(), TESTFN2:
+ self.zipTest(f, zipfile.ZIP_STORED)
+
+ if zlib:
+ def testDeflated(self):
+ # Try the temp file first. If we do TESTFN2 first, then it hogs
+ # gigabytes of disk space for the duration of the test.
+ for f in TemporaryFile(), TESTFN2:
+ self.zipTest(f, zipfile.ZIP_DEFLATED)
+
+ def tearDown(self):
+ for fname in TESTFN, TESTFN2:
+ if os.path.exists(fname):
+ os.remove(fname)
+
+def test_main():
+ run_unittest(TestsWithSourceFile)
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py
index ccbc8fd..4440942 100644
--- a/Lib/test/test_zlib.py
+++ b/Lib/test/test_zlib.py
@@ -302,63 +302,65 @@ class CompressObjectTestCase(unittest.TestCase):
dco = zlib.decompressobj()
self.assertEqual(dco.flush(), "") # Returns nothing
- def test_compresscopy(self):
- # Test copying a compression object
- data0 = HAMLET_SCENE
- data1 = HAMLET_SCENE.swapcase()
- c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
- bufs0 = []
- bufs0.append(c0.compress(data0))
-
- c1 = c0.copy()
- bufs1 = bufs0[:]
-
- bufs0.append(c0.compress(data0))
- bufs0.append(c0.flush())
- s0 = ''.join(bufs0)
-
- bufs1.append(c1.compress(data1))
- bufs1.append(c1.flush())
- s1 = ''.join(bufs1)
-
- self.assertEqual(zlib.decompress(s0),data0+data0)
- self.assertEqual(zlib.decompress(s1),data0+data1)
-
- def test_badcompresscopy(self):
- # Test copying a compression object in an inconsistent state
- c = zlib.compressobj()
- c.compress(HAMLET_SCENE)
- c.flush()
- self.assertRaises(ValueError, c.copy)
-
- def test_decompresscopy(self):
- # Test copying a decompression object
- data = HAMLET_SCENE
- comp = zlib.compress(data)
-
- d0 = zlib.decompressobj()
- bufs0 = []
- bufs0.append(d0.decompress(comp[:32]))
-
- d1 = d0.copy()
- bufs1 = bufs0[:]
-
- bufs0.append(d0.decompress(comp[32:]))
- s0 = ''.join(bufs0)
-
- bufs1.append(d1.decompress(comp[32:]))
- s1 = ''.join(bufs1)
-
- self.assertEqual(s0,s1)
- self.assertEqual(s0,data)
-
- def test_baddecompresscopy(self):
- # Test copying a compression object in an inconsistent state
- data = zlib.compress(HAMLET_SCENE)
- d = zlib.decompressobj()
- d.decompress(data)
- d.flush()
- self.assertRaises(ValueError, d.copy)
+ if hasattr(zlib.compressobj(), "copy"):
+ def test_compresscopy(self):
+ # Test copying a compression object
+ data0 = HAMLET_SCENE
+ data1 = HAMLET_SCENE.swapcase()
+ c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
+ bufs0 = []
+ bufs0.append(c0.compress(data0))
+
+ c1 = c0.copy()
+ bufs1 = bufs0[:]
+
+ bufs0.append(c0.compress(data0))
+ bufs0.append(c0.flush())
+ s0 = ''.join(bufs0)
+
+ bufs1.append(c1.compress(data1))
+ bufs1.append(c1.flush())
+ s1 = ''.join(bufs1)
+
+ self.assertEqual(zlib.decompress(s0),data0+data0)
+ self.assertEqual(zlib.decompress(s1),data0+data1)
+
+ def test_badcompresscopy(self):
+ # Test copying a compression object in an inconsistent state
+ c = zlib.compressobj()
+ c.compress(HAMLET_SCENE)
+ c.flush()
+ self.assertRaises(ValueError, c.copy)
+
+ if hasattr(zlib.decompressobj(), "copy"):
+ def test_decompresscopy(self):
+ # Test copying a decompression object
+ data = HAMLET_SCENE
+ comp = zlib.compress(data)
+
+ d0 = zlib.decompressobj()
+ bufs0 = []
+ bufs0.append(d0.decompress(comp[:32]))
+
+ d1 = d0.copy()
+ bufs1 = bufs0[:]
+
+ bufs0.append(d0.decompress(comp[32:]))
+ s0 = ''.join(bufs0)
+
+ bufs1.append(d1.decompress(comp[32:]))
+ s1 = ''.join(bufs1)
+
+ self.assertEqual(s0,s1)
+ self.assertEqual(s0,data)
+
+ def test_baddecompresscopy(self):
+ # Test copying a compression object in an inconsistent state
+ data = zlib.compress(HAMLET_SCENE)
+ d = zlib.decompressobj()
+ d.decompress(data)
+ d.flush()
+ self.assertRaises(ValueError, d.copy)
def genblock(seed, length, step=1024, generator=random):
"""length-byte stream of random data from a seed (in step-byte blocks)."""
diff --git a/Lib/textwrap.py b/Lib/textwrap.py
index 7c68280..ccff2ab 100644
--- a/Lib/textwrap.py
+++ b/Lib/textwrap.py
@@ -317,41 +317,58 @@ def fill(text, width=70, **kwargs):
# -- Loosely related functionality -------------------------------------
-def dedent(text):
- """dedent(text : string) -> string
-
- Remove any whitespace than can be uniformly removed from the left
- of every line in `text`.
+_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE)
+_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE)
- This can be used e.g. to make triple-quoted strings line up with
- the left edge of screen/whatever, while still presenting it in the
- source code in indented form.
+def dedent(text):
+ """Remove any common leading whitespace from every line in `text`.
- For example:
+ This can be used to make triple-quoted strings line up with the left
+ edge of the display, while still presenting them in the source code
+ in indented form.
- def test():
- # end first line with \ to avoid the empty line!
- s = '''\
- hello
- world
- '''
- print repr(s) # prints ' hello\n world\n '
- print repr(dedent(s)) # prints 'hello\n world\n'
+ Note that tabs and spaces are both treated as whitespace, but they
+ are not equal: the lines " hello" and "\thello" are
+ considered to have no common leading whitespace. (This behaviour is
+ new in Python 2.5; older versions of this module incorrectly
+ expanded tabs before searching for common leading whitespace.)
"""
- lines = text.expandtabs().split('\n')
+ # Look for the longest leading string of spaces and tabs common to
+ # all lines.
margin = None
- for line in lines:
- content = line.lstrip()
- if not content:
- continue
- indent = len(line) - len(content)
+ text = _whitespace_only_re.sub('', text)
+ indents = _leading_whitespace_re.findall(text)
+ for indent in indents:
if margin is None:
margin = indent
- else:
- margin = min(margin, indent)
- if margin is not None and margin > 0:
- for i in range(len(lines)):
- lines[i] = lines[i][margin:]
+ # Current line more deeply indented than previous winner:
+ # no change (previous winner is still on top).
+ elif indent.startswith(margin):
+ pass
- return '\n'.join(lines)
+ # Current line consistent with and no deeper than previous winner:
+ # it's the new winner.
+ elif margin.startswith(indent):
+ margin = indent
+
+ # Current line and previous winner have no common whitespace:
+ # there is no margin.
+ else:
+ margin = ""
+ break
+
+ # sanity check (testing/debugging only)
+ if 0 and margin:
+ for line in text.split("\n"):
+ assert not line or line.startswith(margin), \
+ "line = %r, margin = %r" % (line, margin)
+
+ if margin:
+ text = re.sub(r'(?m)^' + margin, '', text)
+ return text
+
+if __name__ == "__main__":
+ #print dedent("\tfoo\n\tbar")
+ #print dedent(" \thello there\n \t how are you?")
+ print dedent("Hello there.\n This is indented.")
diff --git a/Lib/threading.py b/Lib/threading.py