From 3f95292be69ac09ed173e4241d220d30b1b059ff Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Tue, 27 Jan 2015 22:18:34 +0200 Subject: Issue #23055: Fixed a buffer overflow in PyUnicode_FromFormatV. Analysis and fix by Guido Vranken. --- Lib/test/test_unicode.py | 145 +++++++++++++++++++++++++++++++++++++++++------ Misc/NEWS | 12 ++++ Objects/unicodeobject.c | 37 ++++++------ 3 files changed, 161 insertions(+), 33 deletions(-) diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 47af8b9..21005eb 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -1661,7 +1661,10 @@ class UnicodeTest(string_tests.CommonTest, # Test PyUnicode_FromFormat() def test_from_format(self): support.import_module('ctypes') - from ctypes import pythonapi, py_object, c_int + from ctypes import ( + pythonapi, py_object, sizeof, + c_int, c_long, c_longlong, c_ssize_t, + c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) if sys.maxunicode == 65535: name = "PyUnicodeUCS2_FromFormat" else: @@ -1675,9 +1678,13 @@ class UnicodeTest(string_tests.CommonTest, for arg in args) return _PyUnicode_FromFormat(format, *cargs) + def check_format(expected, format, *args): + text = PyUnicode_FromFormat(format, *args) + self.assertEqual(expected, text) + # ascii format, non-ascii argument - text = PyUnicode_FromFormat(b'ascii\x7f=%U', 'unicode\xe9') - self.assertEqual(text, 'ascii\x7f=unicode\xe9') + check_format('ascii\x7f=unicode\xe9', + b'ascii\x7f=%U', 'unicode\xe9') # non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV() # raises an error @@ -1686,25 +1693,131 @@ class UnicodeTest(string_tests.CommonTest, 'string, got a non-ASCII byte: 0xe9$', PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii') - self.assertEqual(PyUnicode_FromFormat(b'%c', c_int(0xabcd)), '\uabcd') - self.assertEqual(PyUnicode_FromFormat(b'%c', c_int(0x10ffff)), '\U0010ffff') - - # other tests - text = PyUnicode_FromFormat(b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') - self.assertEqual(text, r"%A:'abc\xe9\uabcd\U0010ffff'") - - text = PyUnicode_FromFormat(b'repr=%V', 'abc', b'xyz') - self.assertEqual(text, 'repr=abc') + # test "%c" + check_format('\uabcd', + b'%c', c_int(0xabcd)) + check_format('\U0010ffff', + b'%c', c_int(0x10ffff)) + with self.assertRaises(OverflowError): + PyUnicode_FromFormat(b'%c', c_int(0x110000)) + # Issue #18183 + check_format('\U00010000\U00100000', + b'%c%c', c_int(0x10000), c_int(0x100000)) + + # test "%" + check_format('%', + b'%') + check_format('%', + b'%%') + check_format('%s', + b'%%s') + check_format('[%]', + b'[%%]') + check_format('%abc', + b'%%%s', b'abc') + + # test %S + check_format("repr=\u20acABC", + b'repr=%S', '\u20acABC') + + # test %R + check_format("repr='\u20acABC'", + b'repr=%R', '\u20acABC') + + # test integer formats (%i, %d, %u) + check_format('010', + b'%03i', c_int(10)) + check_format('0010', + b'%0.4i', c_int(10)) + check_format('-123', + b'%i', c_int(-123)) + + check_format('-123', + b'%d', c_int(-123)) + check_format('-123', + b'%ld', c_long(-123)) + check_format('-123', + b'%lld', c_longlong(-123)) + check_format('-123', + b'%zd', c_ssize_t(-123)) + + check_format('123', + b'%u', c_uint(123)) + check_format('123', + b'%lu', c_ulong(123)) + check_format('123', + b'%llu', c_ulonglong(123)) + check_format('123', + b'%zu', c_size_t(123)) + + # test long output + min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1)) + max_longlong = -min_longlong - 1 + check_format(str(min_longlong), + b'%lld', c_longlong(min_longlong)) + check_format(str(max_longlong), + b'%lld', c_longlong(max_longlong)) + max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1 + check_format(str(max_ulonglong), + b'%llu', c_ulonglong(max_ulonglong)) + PyUnicode_FromFormat(b'%p', c_void_p(-1)) + + # test padding (width and/or precision) + check_format('123'.rjust(10, '0'), + b'%010i', c_int(123)) + check_format('123'.rjust(100), + b'%100i', c_int(123)) + check_format('123'.rjust(100, '0'), + b'%.100i', c_int(123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80i', c_int(123)) + + check_format('123'.rjust(10, '0'), + b'%010u', c_uint(123)) + check_format('123'.rjust(100), + b'%100u', c_uint(123)) + check_format('123'.rjust(100, '0'), + b'%.100u', c_uint(123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80u', c_uint(123)) + + check_format('123'.rjust(10, '0'), + b'%010x', c_int(0x123)) + check_format('123'.rjust(100), + b'%100x', c_int(0x123)) + check_format('123'.rjust(100, '0'), + b'%.100x', c_int(0x123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80x', c_int(0x123)) + + # test %A + check_format(r"%A:'abc\xe9\uabcd\U0010ffff'", + b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') + + # test %V + check_format('repr=abc', + b'repr=%V', 'abc', b'xyz') # Test string decode from parameter of %s using utf-8. # b'\xe4\xba\xba\xe6\xb0\x91' is utf-8 encoded byte sequence of # '\u4eba\u6c11' - text = PyUnicode_FromFormat(b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91') - self.assertEqual(text, 'repr=\u4eba\u6c11') + check_format('repr=\u4eba\u6c11', + b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91') #Test replace error handler. - text = PyUnicode_FromFormat(b'repr=%V', None, b'abc\xff') - self.assertEqual(text, 'repr=abc\ufffd') + check_format('repr=abc\ufffd', + b'repr=%V', None, b'abc\xff') + + # not supported: copy the raw format string. these tests are just here + # to check for crashs and should not be considered as specifications + check_format('%s', + b'%1%s', b'abc') + check_format('%1abc', + b'%1abc') + check_format('%+i', + b'%+i', c_int(10)) + check_format('%s', + b'%.%s', b'abc') # Test PyUnicode_AsWideChar() def test_aswidechar(self): diff --git a/Misc/NEWS b/Misc/NEWS index e841862..b54f267 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -2,6 +2,18 @@ Python News +++++++++++ +What's New in Python 3.2.7? +============================ + +*Release date: XXXX-XX-XX* + +Core and Builtins +----------------- + +- Issue #23055: Fixed a buffer overflow in PyUnicode_FromFormatV. Analysis + and fix by Guido Vranken. + + What's New in Python 3.2.6? =========================== diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index b798850..1d1d531 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -759,15 +759,10 @@ PyUnicode_FromFormatV(const char *format, va_list vargs) * result in an array) */ for (f = format; *f; f++) { if (*f == '%') { - if (*(f+1)=='%') - continue; - if (*(f+1)=='S' || *(f+1)=='R' || *(f+1)=='A' || *(f+1) == 'V') - ++callcount; - while (Py_ISDIGIT((unsigned)*f)) - width = (width*10) + *f++ - '0'; - while (*++f && *f != '%' && !Py_ISALPHA((unsigned)*f)) - ; - if (*f == 's') + f++; + while (*f && *f != '%' && !Py_ISALPHA((unsigned)*f)) + f++; + if (*f == 's' || *f=='S' || *f=='R' || *f=='A' || *f=='V') ++callcount; } else if (128 <= (unsigned char)*f) { @@ -794,12 +789,16 @@ PyUnicode_FromFormatV(const char *format, va_list vargs) #ifdef HAVE_LONG_LONG int longlongflag = 0; #endif - const char* p = f; + const char* p = f++; width = 0; while (Py_ISDIGIT((unsigned)*f)) width = (width*10) + *f++ - '0'; - while (*++f && *f != '%' && !Py_ISALPHA((unsigned)*f)) - ; + precision = 0; + if (*f == '.') { + f++; + while (Py_ISDIGIT((unsigned)*f)) + precision = (precision*10) + *f++ - '0'; + } /* skip the 'l' or 'z' in {%ld, %zd, %lu, %zu} since * they don't affect the amount of space we reserve. @@ -823,16 +822,18 @@ PyUnicode_FromFormatV(const char *format, va_list vargs) switch (*f) { case 'c': { -#ifndef Py_UNICODE_WIDE int ordinal = va_arg(count, int); + if (ordinal < 0 || ordinal > 0x10ffff) { + PyErr_SetString(PyExc_OverflowError, + "%c arg not in range(0x110000)"); + goto fail; + } +#ifndef Py_UNICODE_WIDE if (ordinal > 0xffff) n += 2; else - n++; -#else - (void)va_arg(count, int); - n++; #endif + n++; break; } case '%': @@ -840,6 +841,8 @@ PyUnicode_FromFormatV(const char *format, va_list vargs) break; case 'd': case 'u': case 'i': case 'x': (void) va_arg(count, int); + if (width < precision) + width = precision; #ifdef HAVE_LONG_LONG if (longlongflag) { if (width < MAX_LONG_LONG_CHARS) -- cgit v0.12 From aed198426a3b1186633295cd4a6160ab4423e069 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sat, 31 Jan 2015 01:15:48 +0200 Subject: Issue #23055: Fixed read-past-the-end error in PyUnicode_FromFormatV. --- Objects/unicodeobject.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 1d1d531..090cc1f 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -762,6 +762,8 @@ PyUnicode_FromFormatV(const char *format, va_list vargs) f++; while (*f && *f != '%' && !Py_ISALPHA((unsigned)*f)) f++; + if (!*f) + break; if (*f == 's' || *f=='S' || *f=='R' || *f=='A' || *f=='V') ++callcount; } -- cgit v0.12 From 8ce6806498be8aa8ae4bd3d3d83624766557ffad Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Mon, 9 Feb 2015 20:58:12 -0500 Subject: add overflow checking (closes #23361) --- Misc/NEWS | 2 ++ Modules/_winapi.c | 14 ++++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS index 5e1dbf0..7d1dfb8 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -16,6 +16,8 @@ Core and Builtins Library ------- +- Issue #23361: Fix possible overflow in Windows subprocess creation code. + - Issue #23363: Fix possible overflow in itertools.permutations. - Issue #23364: Fix possible overflow in itertools.product. diff --git a/Modules/_winapi.c b/Modules/_winapi.c index c53d55a..5257a1e 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -513,13 +513,23 @@ getenvironment(PyObject* environment) "environment can only contain strings"); goto error; } + if (totalsize > PY_SSIZE_T_MAX - PyUnicode_GET_LENGTH(key) - 1) { + PyErr_SetString(PyExc_OverflowError, "environment too long"); + goto error; + } totalsize += PyUnicode_GET_LENGTH(key) + 1; /* +1 for '=' */ + if (totalsize > PY_SSIZE_T_MAX - PyUnicode_GET_LENGTH(value) - 1) { + PyErr_SetString(PyExc_OverflowError, "environment too long"); + goto error; + } totalsize += PyUnicode_GET_LENGTH(value) + 1; /* +1 for '\0' */ } - buffer = PyMem_Malloc(totalsize * sizeof(Py_UCS4)); - if (! buffer) + buffer = PyMem_NEW(Py_UCS4, totalsize); + if (! buffer) { + PyErr_NoMemory(); goto error; + } p = buffer; end = buffer + totalsize; -- cgit v0.12 From 832dd5f0d65d3a0ebd7d7c7a3a4c80ab5170cd08 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Tue, 10 Feb 2015 08:45:53 +0200 Subject: Issue #23421: Fixed compression in tarfile CLI. Patch by wdv4758h. --- Lib/tarfile.py | 16 ++++++++-------- Lib/test/test_tarfile.py | 15 +++++++++++++++ Misc/NEWS | 3 +++ 3 files changed, 26 insertions(+), 8 deletions(-) diff --git a/Lib/tarfile.py b/Lib/tarfile.py index 9e291c2..37e4dcd 100755 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -2491,16 +2491,16 @@ def main(): _, ext = os.path.splitext(tar_name) compressions = { # gz - 'gz': 'gz', - 'tgz': 'gz', + '.gz': 'gz', + '.tgz': 'gz', # xz - 'xz': 'xz', - 'txz': 'xz', + '.xz': 'xz', + '.txz': 'xz', # bz2 - 'bz2': 'bz2', - 'tbz': 'bz2', - 'tbz2': 'bz2', - 'tb2': 'bz2', + '.bz2': 'bz2', + '.tbz': 'bz2', + '.tbz2': 'bz2', + '.tb2': 'bz2', } tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w' tar_files = args.create diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index e527e40..c135304 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -1994,6 +1994,21 @@ class CommandLineTest(unittest.TestCase): finally: support.unlink(tar_name) + def test_create_command_compressed(self): + files = [support.findfile('tokenize_tests.txt'), + support.findfile('tokenize_tests-no-coding-cookie-' + 'and-utf8-bom-sig-only.txt')] + for filetype in (GzipTest, Bz2Test, LzmaTest): + if not filetype.open: + continue + try: + tar_name = tmpname + '.' + filetype.suffix + out = self.tarfilecmd('-c', tar_name, *files) + with filetype.taropen(tar_name) as tar: + tar.getmembers() + finally: + support.unlink(tar_name) + def test_extract_command(self): self.make_simple_tarfile(tmpname) for opt in '-e', '--extract': diff --git a/Misc/NEWS b/Misc/NEWS index dc89959..b411abf 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,8 +13,11 @@ Core and Builtins Library ------- +- Issue #23421: Fixed compression in tarfile CLI. Patch by wdv4758h. + - Issue #23361: Fix possible overflow in Windows subprocess creation code. + What's New in Python 3.4.3rc1? ============================== -- cgit v0.12 From 8e36812e27f70bd6e4b3b85c9e9e858b0ac0df5e Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 10 Feb 2015 14:49:32 +0100 Subject: asyncio: BaseSubprocessTransport.close() doesn't try to kill the process if it already finished --- Lib/asyncio/base_subprocess.py | 7 +++- Lib/test/test_asyncio/test_subprocess.py | 55 ++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) diff --git a/Lib/asyncio/base_subprocess.py b/Lib/asyncio/base_subprocess.py index 02b9e89..5458ab1 100644 --- a/Lib/asyncio/base_subprocess.py +++ b/Lib/asyncio/base_subprocess.py @@ -93,7 +93,12 @@ class BaseSubprocessTransport(transports.SubprocessTransport): continue proto.pipe.close() - if self._proc is not None and self._returncode is None: + if (self._proc is not None + # the child process finished? + and self._returncode is None + # the child process finished but the transport was not notified yet? + and self._proc.poll() is None + ): if self._loop.get_debug(): logger.warning('Close running child process: kill %r', self) diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index b467b04..de0b08a 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -349,6 +349,61 @@ class SubprocessMixin: self.loop.run_until_complete(cancel_make_transport()) test_utils.run_briefly(self.loop) + def test_close_kill_running(self): + @asyncio.coroutine + def kill_running(): + create = self.loop.subprocess_exec(asyncio.SubprocessProtocol, + *PROGRAM_BLOCKED) + transport, protocol = yield from create + proc = transport.get_extra_info('subprocess') + proc.kill = mock.Mock() + returncode = transport.get_returncode() + transport.close() + return (returncode, proc.kill.called) + + # Ignore "Close running child process: kill ..." log + with test_utils.disable_logger(): + returncode, killed = self.loop.run_until_complete(kill_running()) + self.assertIsNone(returncode) + + # transport.close() must kill the process if it is still running + self.assertTrue(killed) + test_utils.run_briefly(self.loop) + + def test_close_dont_kill_finished(self): + @asyncio.coroutine + def kill_running(): + create = self.loop.subprocess_exec(asyncio.SubprocessProtocol, + *PROGRAM_BLOCKED) + transport, protocol = yield from create + proc = transport.get_extra_info('subprocess') + + # kill the process (but asyncio is not notified immediatly) + proc.kill() + proc.wait() + + proc.kill = mock.Mock() + proc_returncode = proc.poll() + transport_returncode = transport.get_returncode() + transport.close() + return (proc_returncode, transport_returncode, proc.kill.called) + + # Ignore "Unknown child process pid ..." log of SafeChildWatcher, + # emitted because the test already consumes the exit status: + # proc.wait() + with test_utils.disable_logger(): + result = self.loop.run_until_complete(kill_running()) + test_utils.run_briefly(self.loop) + + proc_returncode, transport_return_code, killed = result + + self.assertIsNotNone(proc_returncode) + self.assertIsNone(transport_return_code) + + # transport.close() must not kill the process if it finished, even if + # the transport was not notified yet + self.assertFalse(killed) + if sys.platform != 'win32': # Unix -- cgit v0.12 From 7a5567a92cf0e0bcaae7dca8975328c9151fe1c7 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 11 Feb 2015 14:23:35 +0100 Subject: Issue #23433: Fix faulthandler._stack_overflow() Fix undefined behaviour: don't compare pointers. Use Py_uintptr_t type instead of void*. It fixes test_faulthandler on Fedora 22 which now uses GCC 5. --- Modules/faulthandler.c | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index c17ffd8..01e7beb 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -911,12 +911,12 @@ faulthandler_fatal_error_py(PyObject *self, PyObject *args) } #if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION) -static void* -stack_overflow(void *min_sp, void *max_sp, size_t *depth) +static Py_uintptr_t +stack_overflow(Py_uintptr_t min_sp, Py_uintptr_t max_sp, size_t *depth) { /* allocate 4096 bytes on the stack at each call */ unsigned char buffer[4096]; - void *sp = &buffer; + Py_uintptr_t sp = (Py_uintptr_t)&buffer; *depth += 1; if (sp < min_sp || max_sp < sp) return sp; @@ -929,7 +929,8 @@ static PyObject * faulthandler_stack_overflow(PyObject *self) { size_t depth, size; - char *sp = (char *)&depth, *stop; + Py_uintptr_t sp = (Py_uintptr_t)&depth; + Py_uintptr_t stop; depth = 0; stop = stack_overflow(sp - STACK_OVERFLOW_MAX_SIZE, -- cgit v0.12 From 22fabe218d3d6d0198075bcd8d1e1ddf821102c1 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 11 Feb 2015 18:17:56 +0100 Subject: Fix typo: PyMem_Alloc => PyMem_Malloc --- Include/unicodeobject.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h index 729f584..d7b2ace 100644 --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -1052,7 +1052,7 @@ PyAPI_FUNC(Py_ssize_t) PyUnicode_AsWideChar( always ends with a nul character. If size is not NULL, write the number of wide characters (excluding the null character) into *size. - Returns a buffer allocated by PyMem_Alloc() (use PyMem_Free() to free it) + Returns a buffer allocated by PyMem_Malloc() (use PyMem_Free() to free it) on success. On error, returns NULL, *size is undefined and raises a MemoryError. */ -- cgit v0.12 From 3d6c784371bccc2407048652bce50c5bccf9b1af Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Wed, 11 Feb 2015 19:39:16 +0100 Subject: Issue #23445: pydebug builds now use "gcc -Og" where possible, to make the resulting executable faster. --- Misc/NEWS | 6 ++++++ configure | 6 +++++- configure.ac | 6 +++++- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS index b411abf..47e325d 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -17,6 +17,12 @@ Library - Issue #23361: Fix possible overflow in Windows subprocess creation code. +Build +----- + +- Issue #23445: pydebug builds now use "gcc -Og" where possible, to make + the resulting executable faster. + What's New in Python 3.4.3rc1? ============================== diff --git a/configure b/configure index 274af7e..29cbd36 100755 --- a/configure +++ b/configure @@ -6283,7 +6283,11 @@ then if test "$Py_DEBUG" = 'true' ; then # Optimization messes up debuggers, so turn it off for # debug builds. - OPT="-g -O0 -Wall $STRICT_PROTO" + if "$CC" -v --help 2>/dev/null |grep -- -Og > /dev/null; then + OPT="-g -Og -Wall $STRICT_PROTO" + else + OPT="-g -O0 -Wall $STRICT_PROTO" + fi else OPT="-g $WRAP -O3 -Wall $STRICT_PROTO" fi diff --git a/configure.ac b/configure.ac index a0d0afa..8969279 100644 --- a/configure.ac +++ b/configure.ac @@ -1119,7 +1119,11 @@ then if test "$Py_DEBUG" = 'true' ; then # Optimization messes up debuggers, so turn it off for # debug builds. - OPT="-g -O0 -Wall $STRICT_PROTO" + if "$CC" -v --help 2>/dev/null |grep -- -Og > /dev/null; then + OPT="-g -Og -Wall $STRICT_PROTO" + else + OPT="-g -O0 -Wall $STRICT_PROTO" + fi else OPT="-g $WRAP -O3 -Wall $STRICT_PROTO" fi -- cgit v0.12 From bdd574d09a834f4b9935d91fe9d16a0d4a56d846 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 12 Feb 2015 22:49:18 +0100 Subject: asyncio doc: annotate coroutine on coroutine functions and methods --- Doc/library/asyncio-eventloop.rst | 32 +++++++++++++++++--------------- Doc/library/asyncio-stream.rst | 16 ++++++++-------- Doc/library/asyncio-subprocess.rst | 12 ++++++------ Doc/library/asyncio-sync.rst | 20 ++++++++++---------- Doc/library/asyncio-task.rst | 8 +++++--- Doc/tools/extensions/pyspecific.py | 26 ++++++++++++++++++++++++++ 6 files changed, 72 insertions(+), 42 deletions(-) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 4f7fdfe..f2c8945 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -180,7 +180,7 @@ Coroutines Creating connections -------------------- -.. method:: BaseEventLoop.create_connection(protocol_factory, host=None, port=None, \*, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None) +.. coroutinemethod:: BaseEventLoop.create_connection(protocol_factory, host=None, port=None, \*, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None) Create a streaming transport connection to a given Internet *host* and *port*: socket family :py:data:`~socket.AF_INET` or @@ -253,7 +253,7 @@ Creating connections (:class:`StreamReader`, :class:`StreamWriter`) instead of a protocol. -.. method:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0) +.. coroutinemethod:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0) Create datagram connection: socket family :py:data:`~socket.AF_INET` or :py:data:`~socket.AF_INET6` depending on *host* (or *family* if specified), @@ -271,7 +271,7 @@ Creating connections :ref:`UDP echo server protocol ` examples. -.. method:: BaseEventLoop.create_unix_connection(protocol_factory, path, \*, ssl=None, sock=None, server_hostname=None) +.. coroutinemethod:: BaseEventLoop.create_unix_connection(protocol_factory, path, \*, ssl=None, sock=None, server_hostname=None) Create UNIX connection: socket family :py:data:`~socket.AF_UNIX`, socket type :py:data:`~socket.SOCK_STREAM`. The :py:data:`~socket.AF_UNIX` socket @@ -290,7 +290,7 @@ Creating connections Creating listening connections ------------------------------ -.. method:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None) +.. coroutinemethod:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None) Create a TCP server (socket type :data:`~socket.SOCK_STREAM`) bound to *host* and *port*. @@ -336,11 +336,13 @@ Creating listening connections :class:`StreamWriter`) pair and calls back a function with this pair. -.. method:: BaseEventLoop.create_unix_server(protocol_factory, path=None, \*, sock=None, backlog=100, ssl=None) +.. coroutinemethod:: BaseEventLoop.create_unix_server(protocol_factory, path=None, \*, sock=None, backlog=100, ssl=None) Similar to :meth:`BaseEventLoop.create_server`, but specific to the socket family :py:data:`~socket.AF_UNIX`. + This method is a :ref:`coroutine `. + Availability: UNIX. @@ -384,7 +386,7 @@ the file descriptor of a socket. Low-level socket operations --------------------------- -.. method:: BaseEventLoop.sock_recv(sock, nbytes) +.. coroutinemethod:: BaseEventLoop.sock_recv(sock, nbytes) Receive data from the socket. The return value is a bytes object representing the data received. The maximum amount of data to be received @@ -399,7 +401,7 @@ Low-level socket operations The :meth:`socket.socket.recv` method. -.. method:: BaseEventLoop.sock_sendall(sock, data) +.. coroutinemethod:: BaseEventLoop.sock_sendall(sock, data) Send data to the socket. The socket must be connected to a remote socket. This method continues to send data from *data* until either all data has @@ -416,7 +418,7 @@ Low-level socket operations The :meth:`socket.socket.sendall` method. -.. method:: BaseEventLoop.sock_connect(sock, address) +.. coroutinemethod:: BaseEventLoop.sock_connect(sock, address) Connect to a remote socket at *address*. @@ -438,7 +440,7 @@ Low-level socket operations method. -.. method:: BaseEventLoop.sock_accept(sock) +.. coroutinemethod:: BaseEventLoop.sock_accept(sock) Accept a connection. The socket must be bound to an address and listening for connections. The return value is a pair ``(conn, address)`` where *conn* @@ -459,12 +461,12 @@ Low-level socket operations Resolve host name ----------------- -.. method:: BaseEventLoop.getaddrinfo(host, port, \*, family=0, type=0, proto=0, flags=0) +.. coroutinemethod:: BaseEventLoop.getaddrinfo(host, port, \*, family=0, type=0, proto=0, flags=0) This method is a :ref:`coroutine `, similar to :meth:`socket.getaddrinfo` function but non-blocking. -.. method:: BaseEventLoop.getnameinfo(sockaddr, flags=0) +.. coroutinemethod:: BaseEventLoop.getnameinfo(sockaddr, flags=0) This method is a :ref:`coroutine `, similar to :meth:`socket.getnameinfo` function but non-blocking. @@ -476,7 +478,7 @@ Connect pipes On Windows with :class:`SelectorEventLoop`, these methods are not supported. Use :class:`ProactorEventLoop` to support pipes on Windows. -.. method:: BaseEventLoop.connect_read_pipe(protocol_factory, pipe) +.. coroutinemethod:: BaseEventLoop.connect_read_pipe(protocol_factory, pipe) Register read pipe in eventloop. @@ -490,7 +492,7 @@ Use :class:`ProactorEventLoop` to support pipes on Windows. This method is a :ref:`coroutine `. -.. method:: BaseEventLoop.connect_write_pipe(protocol_factory, pipe) +.. coroutinemethod:: BaseEventLoop.connect_write_pipe(protocol_factory, pipe) Register write pipe in eventloop. @@ -543,7 +545,7 @@ Call a function in an :class:`~concurrent.futures.Executor` (pool of threads or pool of processes). By default, an event loop uses a thread pool executor (:class:`~concurrent.futures.ThreadPoolExecutor`). -.. method:: BaseEventLoop.run_in_executor(executor, callback, \*args) +.. coroutinemethod:: BaseEventLoop.run_in_executor(executor, callback, \*args) Arrange for a callback to be called in the specified executor. @@ -654,7 +656,7 @@ Server The server is closed asynchonously, use the :meth:`wait_closed` coroutine to wait until the server is closed. - .. method:: wait_closed() + .. coroutinemethod:: wait_closed() Wait until the :meth:`close` method completes. diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index 3809d94..22b7341 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -9,7 +9,7 @@ Streams (high-level API) Stream functions ================ -.. function:: open_connection(host=None, port=None, \*, loop=None, limit=None, **kwds) +.. coroutinefunction:: open_connection(host=None, port=None, \*, loop=None, limit=None, \*\*kwds) A wrapper for :meth:`~BaseEventLoop.create_connection()` returning a (reader, writer) pair. @@ -32,7 +32,7 @@ Stream functions This function is a :ref:`coroutine `. -.. function:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, **kwds) +.. coroutinefunction:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, \*\*kwds) Start a socket server, with a callback for each client connected. The return value is the same as :meth:`~BaseEventLoop.create_server()`. @@ -56,7 +56,7 @@ Stream functions This function is a :ref:`coroutine `. -.. function:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds) +.. coroutinefunction:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds) A wrapper for :meth:`~BaseEventLoop.create_unix_connection()` returning a (reader, writer) pair. @@ -68,7 +68,7 @@ Stream functions Availability: UNIX. -.. function:: start_unix_server(client_connected_cb, path=None, \*, loop=None, limit=None, **kwds) +.. coroutinefunction:: start_unix_server(client_connected_cb, path=None, \*, loop=None, limit=None, **kwds) Start a UNIX Domain Socket server, with a callback for each client connected. @@ -106,7 +106,7 @@ StreamReader Set the transport. - .. method:: read(n=-1) + .. coroutinemethod:: read(n=-1) Read up to *n* bytes. If *n* is not provided, or set to ``-1``, read until EOF and return all read bytes. @@ -116,7 +116,7 @@ StreamReader This method is a :ref:`coroutine `. - .. method:: readline() + .. coroutinemethod:: readline() Read one line, where "line" is a sequence of bytes ending with ``\n``. @@ -128,7 +128,7 @@ StreamReader This method is a :ref:`coroutine `. - .. method:: readexactly(n) + .. coroutinemethod:: readexactly(n) Read exactly *n* bytes. Raise an :exc:`IncompleteReadError` if the end of the stream is reached before *n* can be read, the @@ -168,7 +168,7 @@ StreamWriter Close the transport: see :meth:`BaseTransport.close`. - .. method:: drain() + .. coroutinemethod:: drain() Let the write buffer of the underlying transport a chance to be flushed. diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 570107e..1334f5b 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -27,7 +27,7 @@ Example to use it on Windows:: Create a subprocess: high-level API using Process ------------------------------------------------- -.. function:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds) +.. coroutinefunction:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds) Create a subprocess. @@ -39,7 +39,7 @@ Create a subprocess: high-level API using Process This function is a :ref:`coroutine `. -.. function:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds) +.. coroutinefunction:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds) Run the shell command *cmd*. @@ -67,7 +67,7 @@ Create a subprocess: low-level API using subprocess.Popen Run subprocesses asynchronously using the :mod:`subprocess` module. -.. method:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs) +.. coroutinemethod:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs) Create a subprocess from one or more string arguments (character strings or bytes strings encoded to the :ref:`filesystem encoding @@ -116,7 +116,7 @@ Run subprocesses asynchronously using the :mod:`subprocess` module. See the constructor of the :class:`subprocess.Popen` class for parameters. -.. method:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs) +.. coroutinemethod:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs) Create a subprocess from *cmd*, which is a character string or a bytes string encoded to the :ref:`filesystem encoding `, @@ -193,7 +193,7 @@ Process :meth:`~subprocess.Popen.wait` method of the :class:`~subprocess.Popen` class is implemented as a busy loop. - .. method:: wait() + .. coroutinemethod:: wait() Wait for child process to terminate. Set and return :attr:`returncode` attribute. @@ -207,7 +207,7 @@ Process blocks waiting for the OS pipe buffer to accept more data. Use the :meth:`communicate` method when using pipes to avoid that. - .. method:: communicate(input=None) + .. coroutinemethod:: communicate(input=None) Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate. diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index 80974d9..f036bc8 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -89,7 +89,7 @@ Lock Return ``True`` if the lock is acquired. - .. method:: acquire() + .. coroutinemethod:: acquire() Acquire a lock. @@ -139,7 +139,7 @@ Event true are awakened. Coroutine that call :meth:`wait` once the flag is true will not block at all. - .. method:: wait() + .. coroutinemethod:: wait() Block until the internal flag is true. @@ -166,7 +166,7 @@ Condition object, and it is used as the underlying lock. Otherwise, a new :class:`Lock` object is created and used as the underlying lock. - .. method:: acquire() + .. coroutinemethod:: acquire() Acquire the underlying lock. @@ -213,7 +213,7 @@ Condition There is no return value. - .. method:: wait() + .. coroutinemethod:: wait() Wait until notified. @@ -227,7 +227,7 @@ Condition This method is a :ref:`coroutine `. - .. method:: wait_for(predicate) + .. coroutinemethod:: wait_for(predicate) Wait until a predicate becomes true. @@ -258,7 +258,7 @@ Semaphore defaults to ``1``. If the value given is less than ``0``, :exc:`ValueError` is raised. - .. method:: acquire() + .. coroutinemethod:: acquire() Acquire a semaphore. @@ -273,7 +273,7 @@ Semaphore Returns ``True`` if semaphore can not be acquired immediately. - .. method:: release() + .. coroutinemethod:: release() Release a semaphore, incrementing the internal counter by one. When it was zero on entry and another coroutine is waiting for it to become @@ -323,7 +323,7 @@ Queue If the Queue was initialized with ``maxsize=0`` (the default), then :meth:`full()` is never ``True``. - .. method:: get() + .. coroutinemethod:: get() Remove and return an item from the queue. If queue is empty, wait until an item is available. @@ -341,7 +341,7 @@ Queue Return an item if one is immediately available, else raise :exc:`QueueEmpty`. - .. method:: put(item) + .. coroutinemethod:: put(item) Put an item into the queue. If the queue is full, wait until a free slot is available before adding item. @@ -395,7 +395,7 @@ JoinableQueue A subclass of :class:`Queue` with :meth:`task_done` and :meth:`join` methods. - .. method:: join() + .. coroutinemethod:: join() Block until all items in the queue have been gotten and processed. diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 3008c86..edc05c3 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -545,7 +545,7 @@ Task functions Return ``True`` if *func* is a decorated :ref:`coroutine function `. -.. function:: sleep(delay, result=None, \*, loop=None) +.. coroutinefunction:: sleep(delay, result=None, \*, loop=None) Create a :ref:`coroutine ` that completes after a given time (in seconds). If *result* is provided, it is produced to the caller @@ -554,6 +554,8 @@ Task functions The resolution of the sleep depends on the :ref:`granularity of the event loop `. + This function is a :ref:`coroutine `. + .. function:: shield(arg, \*, loop=None) Wait for a future, shielding it from cancellation. @@ -581,7 +583,7 @@ Task functions except CancelledError: res = None -.. function:: wait(futures, \*, loop=None, timeout=None, return_when=ALL_COMPLETED) +.. coroutinefunction:: wait(futures, \*, loop=None, timeout=None, return_when=ALL_COMPLETED) Wait for the Futures and coroutine objects given by the sequence *futures* to complete. Coroutines will be wrapped in Tasks. Returns two sets of @@ -626,7 +628,7 @@ Task functions when the timeout occurs are returned in the second set. -.. function:: wait_for(fut, timeout, \*, loop=None) +.. coroutinefunction:: wait_for(fut, timeout, \*, loop=None) Wait for the single :class:`Future` or :ref:`coroutine object ` to complete with timeout. If *timeout* is ``None``, block until the future diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 7baacc4..17b3c82 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -145,6 +145,30 @@ class PyDecoratorMethod(PyDecoratorMixin, PyClassmember): return PyClassmember.run(self) +class PyCoroutineMixin(object): + def handle_signature(self, sig, signode): + ret = super(PyCoroutineMixin, self).handle_signature(sig, signode) +# signode.insert(0, addnodes.desc_addname('coroutine ', 'coroutine ')) + signode.insert(0, addnodes.desc_annotation('coroutine ', 'coroutine ')) + return ret + + def needs_arglist(self): + return False + + +class PyCoroutineFunction(PyCoroutineMixin, PyModulelevel): + def run(self): + # a decorator function is a function after all + self.name = 'py:function' + return PyModulelevel.run(self) + + +class PyCoroutineMethod(PyCoroutineMixin, PyClassmember): + def run(self): + self.name = 'py:method' + return PyClassmember.run(self) + + # Support for documenting version of removal in deprecations class DeprecatedRemoved(Directive): @@ -347,5 +371,7 @@ def setup(app): app.add_description_unit('2to3fixer', '2to3fixer', '%s (2to3 fixer)') app.add_directive_to_domain('py', 'decorator', PyDecoratorFunction) app.add_directive_to_domain('py', 'decoratormethod', PyDecoratorMethod) + app.add_directive_to_domain('py', 'coroutinefunction', PyCoroutineFunction) + app.add_directive_to_domain('py', 'coroutinemethod', PyCoroutineMethod) app.add_directive('miscnews', MiscNews) return {'version': '1.0', 'parallel_read_safe': True} -- cgit v0.12 From a25c542c476b84b30a7172d5b4f7e2c82330f6f4 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Fri, 13 Feb 2015 15:13:33 +0200 Subject: Issue #21849: Ported from 2.7 tests for non-ASCII data. --- Lib/test/_test_multiprocessing.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 2d4395e..9466d4e 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -2020,6 +2020,12 @@ SERIALIZER = 'xmlrpclib' class _TestRemoteManager(BaseTestCase): ALLOWED_TYPES = ('manager',) + values = ['hello world', None, True, 2.25, + 'hall\xe5 v\xe4rlden', + '\u043f\u0440\u0438\u0432\u0456\u0442 \u0441\u0432\u0456\u0442', + b'hall\xe5 v\xe4rlden', + ] + result = values[:] @classmethod def _putter(cls, address, authkey): @@ -2028,7 +2034,8 @@ class _TestRemoteManager(BaseTestCase): ) manager.connect() queue = manager.get_queue() - queue.put(('hello world', None, True, 2.25)) + # Note that xmlrpclib will deserialize object as a list not a tuple + queue.put(tuple(cls.values)) def test_remote(self): authkey = os.urandom(32) @@ -2048,8 +2055,7 @@ class _TestRemoteManager(BaseTestCase): manager2.connect() queue = manager2.get_queue() - # Note that xmlrpclib will deserialize object as a list not a tuple - self.assertEqual(queue.get(), ['hello world', None, True, 2.25]) + self.assertEqual(queue.get(), self.result) # Because we are using xmlrpclib for serialization instead of # pickle this will cause a serialization error. @@ -3405,12 +3411,12 @@ class TestNoForkBomb(unittest.TestCase): name = os.path.join(os.path.dirname(__file__), 'mp_fork_bomb.py') if sm != 'fork': rc, out, err = test.script_helper.assert_python_failure(name, sm) - self.assertEqual('', out.decode('ascii')) - self.assertIn('RuntimeError', err.decode('ascii')) + self.assertEqual(out, b'') + self.assertIn(b'RuntimeError', err) else: rc, out, err = test.script_helper.assert_python_ok(name, sm) - self.assertEqual('123', out.decode('ascii').rstrip()) - self.assertEqual('', err.decode('ascii')) + self.assertEqual(out.rstrip(), b'123') + self.assertEqual(err, b'') # # Issue #17555: ForkAwareThreadLock -- cgit v0.12 From 366c570d1f48df0f06707e34472b626bc97f03e3 Mon Sep 17 00:00:00 2001 From: Berker Peksag Date: Fri, 13 Feb 2015 20:48:15 +0200 Subject: Issue #23418: Add missing entries to http.server.__all__. Patch by Martin Panter. --- Lib/http/server.py | 5 ++++- Lib/test/test_httpservers.py | 14 ++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/Lib/http/server.py b/Lib/http/server.py index cfa29f4..a27890e 100644 --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -82,7 +82,10 @@ XXX To do: __version__ = "0.6" -__all__ = ["HTTPServer", "BaseHTTPRequestHandler"] +__all__ = [ + "HTTPServer", "BaseHTTPRequestHandler", + "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler", +] import html import http.client diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index 569341d..67a4654 100644 --- a/Lib/test/test_httpservers.py +++ b/Lib/test/test_httpservers.py @@ -760,6 +760,19 @@ class SimpleHTTPRequestHandlerTestCase(unittest.TestCase): self.assertEqual(path, self.translated) +class MiscTestCase(unittest.TestCase): + def test_all(self): + expected = [] + blacklist = {'executable', 'nobody_uid', 'test'} + for name in dir(server): + if name.startswith('_') or name in blacklist: + continue + module_object = getattr(server, name) + if getattr(module_object, '__module__', None) == 'http.server': + expected.append(name) + self.assertCountEqual(server.__all__, expected) + + def test_main(verbose=None): cwd = os.getcwd() try: @@ -769,6 +782,7 @@ def test_main(verbose=None): SimpleHTTPServerTestCase, CGIHTTPServerTestCase, SimpleHTTPRequestHandlerTestCase, + MiscTestCase, ) finally: os.chdir(cwd) -- cgit v0.12 From 2476b98acfe303ecc3ed9b38107f75e05dab95c5 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Sat, 14 Feb 2015 15:16:32 -0500 Subject: avoid reading unallocated memory when argc == 0 (closes #22633) --- Python/frozenmain.c | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/Python/frozenmain.c b/Python/frozenmain.c index 55d05fc..b05c94a 100644 --- a/Python/frozenmain.c +++ b/Python/frozenmain.c @@ -24,11 +24,13 @@ Py_FrozenMain(int argc, char **argv) /* We need a second copies, as Python might modify the first one. */ wchar_t **argv_copy2 = NULL; - argv_copy = PyMem_RawMalloc(sizeof(wchar_t*) * argc); - argv_copy2 = PyMem_RawMalloc(sizeof(wchar_t*) * argc); - if (!argv_copy || !argv_copy2) { - fprintf(stderr, "out of memory\n"); - goto error; + if (argc > 0) { + argv_copy = PyMem_RawMalloc(sizeof(wchar_t*) * argc); + argv_copy2 = PyMem_RawMalloc(sizeof(wchar_t*) * argc); + if (!argv_copy || !argv_copy2) { + fprintf(stderr, "out of memory\n"); + goto error; + } } Py_FrozenFlag = 1; /* Suppress errors from getpath.c */ @@ -68,7 +70,8 @@ Py_FrozenMain(int argc, char **argv) #ifdef MS_WINDOWS PyInitFrozenExtensions(); #endif /* MS_WINDOWS */ - Py_SetProgramName(argv_copy[0]); + if (argc >= 1) + Py_SetProgramName(argv_copy[0]); Py_Initialize(); #ifdef MS_WINDOWS PyWinFreeze_ExeInit(); -- cgit v0.12 From 6b688d8162bc0629286644ba901dcd40c2d35303 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sat, 14 Feb 2015 22:44:35 +0200 Subject: Issue #22844: Fized test_gdb failure on Debian Wheezy for Z. Patch by David Edelsohn. --- Lib/test/test_gdb.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py index aaa5c69..c57875c 100644 --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -190,6 +190,8 @@ class DebuggerTests(unittest.TestCase): 'linux-vdso.so', 'warning: Could not load shared library symbols for ' 'linux-gate.so', + 'warning: Could not load shared library symbols for ' + 'linux-vdso64.so', 'Do you need "set solib-search-path" or ' '"set sysroot"?', 'warning: Source file is more recent than executable.', -- cgit v0.12 From db724fe994b180d989fd07968aab44260fa5dea5 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sat, 14 Feb 2015 23:04:35 +0200 Subject: Issue #17753: Skip test_zipfile tests which require write access to test and email.test. --- Lib/test/test_zipfile.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py index 76e32fb..4633fe6 100644 --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile.py @@ -648,7 +648,12 @@ class PyZipFileTests(unittest.TestCase): if name + 'o' not in namelist: self.assertIn(name + 'c', namelist) + def requiresWriteAccess(self, path): + if not os.access(path, os.W_OK, effective_ids=True): + self.skipTest('requires write access to the installed location') + def test_write_pyfile(self): + self.requiresWriteAccess(os.path.dirname(__file__)) with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp: fn = __file__ if fn.endswith('.pyc') or fn.endswith('.pyo'): @@ -680,6 +685,7 @@ class PyZipFileTests(unittest.TestCase): def test_write_python_package(self): import email packagedir = os.path.dirname(email.__file__) + self.requiresWriteAccess(packagedir) with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp: zipfp.writepy(packagedir) @@ -693,6 +699,7 @@ class PyZipFileTests(unittest.TestCase): def test_write_filtered_python_package(self): import test packagedir = os.path.dirname(test.__file__) + self.requiresWriteAccess(packagedir) with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp: @@ -721,6 +728,7 @@ class PyZipFileTests(unittest.TestCase): def test_write_with_optimization(self): import email packagedir = os.path.dirname(email.__file__) + self.requiresWriteAccess(packagedir) # use .pyc if running test in optimization mode, # use .pyo if running test in debug mode optlevel = 1 if __debug__ else 0 -- cgit v0.12 From 50ef8a8e6707f8658c4eeab34cd68822aa80c4d2 Mon Sep 17 00:00:00 2001 From: Berker Peksag Date: Sun, 15 Feb 2015 00:05:42 +0200 Subject: Issue #13637: Remove outdated versionchanged directives. Patch by Martin Panter. --- Doc/library/binascii.rst | 6 ------ 1 file changed, 6 deletions(-) diff --git a/Doc/library/binascii.rst b/Doc/library/binascii.rst index c92a8e1..3f7df74 100644 --- a/Doc/library/binascii.rst +++ b/Doc/library/binascii.rst @@ -65,9 +65,6 @@ The :mod:`binascii` module defines the following functions: data. More than one line may be passed at a time. If the optional argument *header* is present and true, underscores will be decoded as spaces. - .. versionchanged:: 3.2 - Accept only bytestring or bytearray objects as input. - .. function:: b2a_qp(data, quotetabs=False, istext=True, header=False) @@ -156,9 +153,6 @@ The :mod:`binascii` module defines the following functions: of hexadecimal digits (which can be upper or lower case), otherwise a :exc:`TypeError` is raised. - .. versionchanged:: 3.2 - Accept only bytestring or bytearray objects as input. - .. exception:: Error -- cgit v0.12 From 3cd30c2ceeaeabe0486a2467fd11cdc008bf6f67 Mon Sep 17 00:00:00 2001 From: Berker Peksag Date: Sun, 15 Feb 2015 00:31:00 +0200 Subject: Issue #13637: Improve exception message of a2b_* functions. Patch by Vajrasky Kok. --- Modules/binascii.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/binascii.c b/Modules/binascii.c index 86b63bb..4e6953b 100644 --- a/Modules/binascii.c +++ b/Modules/binascii.c @@ -228,13 +228,13 @@ ascii_buffer_converter(PyObject *arg, Py_buffer *buf) if (PyObject_GetBuffer(arg, buf, PyBUF_SIMPLE) != 0) { PyErr_Format(PyExc_TypeError, "argument should be bytes, buffer or ASCII string, " - "not %R", Py_TYPE(arg)); + "not '%.100s'", Py_TYPE(arg)->tp_name); return 0; } if (!PyBuffer_IsContiguous(buf, 'C')) { PyErr_Format(PyExc_TypeError, "argument should be a contiguous buffer, " - "not %R", Py_TYPE(arg)); + "not '%.100s'", Py_TYPE(arg)->tp_name); PyBuffer_Release(buf); return 0; } -- cgit v0.12 From 85c3033670d493d17e633261793e83226a6a0f61 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 15 Feb 2015 13:58:23 +0200 Subject: Use os.devnull instead of hardcoded '/dev/null'. --- Lib/test/test_asyncio/test_unix_events.py | 6 +++--- Lib/test/test_bz2.py | 10 +++++----- Lib/test/test_cgi.py | 4 ++-- Lib/test/test_subprocess.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 41249ff..dc0835c 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -295,7 +295,7 @@ class SelectorEventLoopUnixSocketTests(test_utils.TestCase): def test_create_unix_connection_path_sock(self): coro = self.loop.create_unix_connection( - lambda: None, '/dev/null', sock=object()) + lambda: None, os.devnull, sock=object()) with self.assertRaisesRegex(ValueError, 'path and sock can not be'): self.loop.run_until_complete(coro) @@ -308,14 +308,14 @@ class SelectorEventLoopUnixSocketTests(test_utils.TestCase): def test_create_unix_connection_nossl_serverhost(self): coro = self.loop.create_unix_connection( - lambda: None, '/dev/null', server_hostname='spam') + lambda: None, os.devnull, server_hostname='spam') with self.assertRaisesRegex(ValueError, 'server_hostname is only meaningful'): self.loop.run_until_complete(coro) def test_create_unix_connection_ssl_noserverhost(self): coro = self.loop.create_unix_connection( - lambda: None, '/dev/null', ssl=True) + lambda: None, os.devnull, ssl=True) with self.assertRaisesRegex( ValueError, 'you have to pass server_hostname when using ssl'): diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index ce012d6..1535e8e 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -87,11 +87,11 @@ class BZ2FileTest(BaseTest): def testBadArgs(self): self.assertRaises(TypeError, BZ2File, 123.456) - self.assertRaises(ValueError, BZ2File, "/dev/null", "z") - self.assertRaises(ValueError, BZ2File, "/dev/null", "rx") - self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt") - self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0) - self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10) + self.assertRaises(ValueError, BZ2File, os.devnull, "z") + self.assertRaises(ValueError, BZ2File, os.devnull, "rx") + self.assertRaises(ValueError, BZ2File, os.devnull, "rbt") + self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=0) + self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=10) def testRead(self): self.createTempFile() diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py index 86e1f3a..1127dd1 100644 --- a/Lib/test/test_cgi.py +++ b/Lib/test/test_cgi.py @@ -186,9 +186,9 @@ class CgiTests(unittest.TestCase): cgi.initlog("%s", "Testing initlog 1") cgi.log("%s", "Testing log 2") self.assertEqual(cgi.logfp.getvalue(), "Testing initlog 1\nTesting log 2\n") - if os.path.exists("/dev/null"): + if os.path.exists(os.devnull): cgi.logfp = None - cgi.logfile = "/dev/null" + cgi.logfile = os.devnull cgi.initlog("%s", "Testing log 3") self.addCleanup(cgi.closelog) cgi.log("Testing log 4") diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 5381115..08af71f 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -1922,7 +1922,7 @@ class POSIXProcessTestCase(BaseTestCase): open_fds = set(fds) # add a bunch more fds for _ in range(9): - fd = os.open("/dev/null", os.O_RDONLY) + fd = os.open(os.devnull, os.O_RDONLY) self.addCleanup(os.close, fd) open_fds.add(fd) @@ -1984,7 +1984,7 @@ class POSIXProcessTestCase(BaseTestCase): open_fds = set() # Add a bunch more fds to pass down. for _ in range(40): - fd = os.open("/dev/null", os.O_RDONLY) + fd = os.open(os.devnull, os.O_RDONLY) open_fds.add(fd) # Leave a two pairs of low ones available for use by the -- cgit v0.12 From fa310ee3a950e32c94c378ba2aa790b517104fae Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 15 Feb 2015 14:10:03 +0200 Subject: Use pickled data compatible with Python 2 for testing protocols 0-2. --- Lib/test/pickletester.py | 433 ++++++++++++++++++++++++----------------------- 1 file changed, 217 insertions(+), 216 deletions(-) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index bdc7bad..55205d1 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -144,21 +144,22 @@ def create_dynamic_class(name, bases): # the object returned by create_data(). DATA0 = ( - b'(lp0\nL0L\naL1L\naF2.0\nac' - b'builtins\ncomplex\n' - b'p1\n(F3.0\nF0.0\ntp2\nRp' - b'3\naL1L\naL-1L\naL255L\naL-' - b'255L\naL-256L\naL65535L\na' - b'L-65535L\naL-65536L\naL2' - b'147483647L\naL-2147483' - b'647L\naL-2147483648L\na(' - b'Vabc\np4\ng4\nccopyreg' - b'\n_reconstructor\np5\n(' - b'c__main__\nC\np6\ncbu' - b'iltins\nobject\np7\nNt' - b'p8\nRp9\n(dp10\nVfoo\np1' - b'1\nL1L\nsVbar\np12\nL2L\nsb' - b'g9\ntp13\nag13\naL5L\na.' + b'(lp0\nL0L\naL1L\naF2.0\n' + b'ac__builtin__\ncomple' + b'x\np1\n(F3.0\nF0.0\ntp2\n' + b'Rp3\naL1L\naL-1L\naL255' + b'L\naL-255L\naL-256L\naL' + b'65535L\naL-65535L\naL-' + b'65536L\naL2147483647L' + b'\naL-2147483647L\naL-2' + b'147483648L\na(Vabc\np4' + b'\ng4\nccopy_reg\n_recon' + b'structor\np5\n(c__main' + b'__\nC\np6\nc__builtin__' + b'\nobject\np7\nNtp8\nRp9\n' + b'(dp10\nVfoo\np11\nL1L\ns' + b'Vbar\np12\nL2L\nsbg9\ntp' + b'13\nag13\naL5L\na.' ) # Disassembly of DATA0 @@ -172,88 +173,88 @@ DATA0_DIS = """\ 14: a APPEND 15: F FLOAT 2.0 20: a APPEND - 21: c GLOBAL 'builtins complex' - 39: p PUT 1 - 42: ( MARK - 43: F FLOAT 3.0 - 48: F FLOAT 0.0 - 53: t TUPLE (MARK at 42) - 54: p PUT 2 - 57: R REDUCE - 58: p PUT 3 - 61: a APPEND - 62: L LONG 1 - 66: a APPEND - 67: L LONG -1 - 72: a APPEND - 73: L LONG 255 - 79: a APPEND - 80: L LONG -255 - 87: a APPEND - 88: L LONG -256 - 95: a APPEND - 96: L LONG 65535 - 104: a APPEND - 105: L LONG -65535 - 114: a APPEND - 115: L LONG -65536 - 124: a APPEND - 125: L LONG 2147483647 - 138: a APPEND - 139: L LONG -2147483647 - 153: a APPEND - 154: L LONG -2147483648 - 168: a APPEND - 169: ( MARK - 170: V UNICODE 'abc' - 175: p PUT 4 - 178: g GET 4 - 181: c GLOBAL 'copyreg _reconstructor' - 205: p PUT 5 - 208: ( MARK - 209: c GLOBAL '__main__ C' - 221: p PUT 6 - 224: c GLOBAL 'builtins object' - 241: p PUT 7 - 244: N NONE - 245: t TUPLE (MARK at 208) - 246: p PUT 8 - 249: R REDUCE - 250: p PUT 9 - 253: ( MARK - 254: d DICT (MARK at 253) - 255: p PUT 10 - 259: V UNICODE 'foo' - 264: p PUT 11 - 268: L LONG 1 - 272: s SETITEM - 273: V UNICODE 'bar' - 278: p PUT 12 - 282: L LONG 2 - 286: s SETITEM - 287: b BUILD - 288: g GET 9 - 291: t TUPLE (MARK at 169) - 292: p PUT 13 - 296: a APPEND - 297: g GET 13 - 301: a APPEND - 302: L LONG 5 - 306: a APPEND - 307: . STOP + 21: c GLOBAL '__builtin__ complex' + 42: p PUT 1 + 45: ( MARK + 46: F FLOAT 3.0 + 51: F FLOAT 0.0 + 56: t TUPLE (MARK at 45) + 57: p PUT 2 + 60: R REDUCE + 61: p PUT 3 + 64: a APPEND + 65: L LONG 1 + 69: a APPEND + 70: L LONG -1 + 75: a APPEND + 76: L LONG 255 + 82: a APPEND + 83: L LONG -255 + 90: a APPEND + 91: L LONG -256 + 98: a APPEND + 99: L LONG 65535 + 107: a APPEND + 108: L LONG -65535 + 117: a APPEND + 118: L LONG -65536 + 127: a APPEND + 128: L LONG 2147483647 + 141: a APPEND + 142: L LONG -2147483647 + 156: a APPEND + 157: L LONG -2147483648 + 171: a APPEND + 172: ( MARK + 173: V UNICODE 'abc' + 178: p PUT 4 + 181: g GET 4 + 184: c GLOBAL 'copy_reg _reconstructor' + 209: p PUT 5 + 212: ( MARK + 213: c GLOBAL '__main__ C' + 225: p PUT 6 + 228: c GLOBAL '__builtin__ object' + 248: p PUT 7 + 251: N NONE + 252: t TUPLE (MARK at 212) + 253: p PUT 8 + 256: R REDUCE + 257: p PUT 9 + 260: ( MARK + 261: d DICT (MARK at 260) + 262: p PUT 10 + 266: V UNICODE 'foo' + 271: p PUT 11 + 275: L LONG 1 + 279: s SETITEM + 280: V UNICODE 'bar' + 285: p PUT 12 + 289: L LONG 2 + 293: s SETITEM + 294: b BUILD + 295: g GET 9 + 298: t TUPLE (MARK at 172) + 299: p PUT 13 + 303: a APPEND + 304: g GET 13 + 308: a APPEND + 309: L LONG 5 + 313: a APPEND + 314: . STOP highest protocol among opcodes = 0 """ DATA1 = ( - b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c' - b'builtins\ncomplex\nq\x01' + b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c__' + b'builtin__\ncomplex\nq\x01' b'(G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00t' b'q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xffJ' b'\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff' b'\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00ab' - b'cq\x04h\x04ccopyreg\n_reco' + b'cq\x04h\x04ccopy_reg\n_reco' b'nstructor\nq\x05(c__main' - b'__\nC\nq\x06cbuiltins\n' + b'__\nC\nq\x06c__builtin__\n' b'object\nq\x07Ntq\x08Rq\t}q\n(' b'X\x03\x00\x00\x00fooq\x0bK\x01X\x03\x00\x00\x00bar' b'q\x0cK\x02ubh\ttq\rh\rK\x05e.' @@ -267,66 +268,66 @@ DATA1_DIS = """\ 4: K BININT1 0 6: K BININT1 1 8: G BINFLOAT 2.0 - 17: c GLOBAL 'builtins complex' - 35: q BINPUT 1 - 37: ( MARK - 38: G BINFLOAT 3.0 - 47: G BINFLOAT 0.0 - 56: t TUPLE (MARK at 37) - 57: q BINPUT 2 - 59: R REDUCE - 60: q BINPUT 3 - 62: K BININT1 1 - 64: J BININT -1 - 69: K BININT1 255 - 71: J BININT -255 - 76: J BININT -256 - 81: M BININT2 65535 - 84: J BININT -65535 - 89: J BININT -65536 - 94: J BININT 2147483647 - 99: J BININT -2147483647 - 104: J BININT -2147483648 - 109: ( MARK - 110: X BINUNICODE 'abc' - 118: q BINPUT 4 - 120: h BINGET 4 - 122: c GLOBAL 'copyreg _reconstructor' - 146: q BINPUT 5 - 148: ( MARK - 149: c GLOBAL '__main__ C' - 161: q BINPUT 6 - 163: c GLOBAL 'builtins object' - 180: q BINPUT 7 - 182: N NONE - 183: t TUPLE (MARK at 148) - 184: q BINPUT 8 - 186: R REDUCE - 187: q BINPUT 9 - 189: } EMPTY_DICT - 190: q BINPUT 10 - 192: ( MARK - 193: X BINUNICODE 'foo' - 201: q BINPUT 11 - 203: K BININT1 1 - 205: X BINUNICODE 'bar' - 213: q BINPUT 12 - 215: K BININT1 2 - 217: u SETITEMS (MARK at 192) - 218: b BUILD - 219: h BINGET 9 - 221: t TUPLE (MARK at 109) - 222: q BINPUT 13 - 224: h BINGET 13 - 226: K BININT1 5 - 228: e APPENDS (MARK at 3) - 229: . STOP + 17: c GLOBAL '__builtin__ complex' + 38: q BINPUT 1 + 40: ( MARK + 41: G BINFLOAT 3.0 + 50: G BINFLOAT 0.0 + 59: t TUPLE (MARK at 40) + 60: q BINPUT 2 + 62: R REDUCE + 63: q BINPUT 3 + 65: K BININT1 1 + 67: J BININT -1 + 72: K BININT1 255 + 74: J BININT -255 + 79: J BININT -256 + 84: M BININT2 65535 + 87: J BININT -65535 + 92: J BININT -65536 + 97: J BININT 2147483647 + 102: J BININT -2147483647 + 107: J BININT -2147483648 + 112: ( MARK + 113: X BINUNICODE 'abc' + 121: q BINPUT 4 + 123: h BINGET 4 + 125: c GLOBAL 'copy_reg _reconstructor' + 150: q BINPUT 5 + 152: ( MARK + 153: c GLOBAL '__main__ C' + 165: q BINPUT 6 + 167: c GLOBAL '__builtin__ object' + 187: q BINPUT 7 + 189: N NONE + 190: t TUPLE (MARK at 152) + 191: q BINPUT 8 + 193: R REDUCE + 194: q BINPUT 9 + 196: } EMPTY_DICT + 197: q BINPUT 10 + 199: ( MARK + 200: X BINUNICODE 'foo' + 208: q BINPUT 11 + 210: K BININT1 1 + 212: X BINUNICODE 'bar' + 220: q BINPUT 12 + 222: K BININT1 2 + 224: u SETITEMS (MARK at 199) + 225: b BUILD + 226: h BINGET 9 + 228: t TUPLE (MARK at 112) + 229: q BINPUT 13 + 231: h BINGET 13 + 233: K BININT1 5 + 235: e APPENDS (MARK at 3) + 236: . STOP highest protocol among opcodes = 1 """ DATA2 = ( b'\x80\x02]q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c' - b'builtins\ncomplex\n' + b'__builtin__\ncomplex\n' b'q\x01G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00' b'\x86q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xff' b'J\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff' @@ -346,52 +347,52 @@ DATA2_DIS = """\ 6: K BININT1 0 8: K BININT1 1 10: G BINFLOAT 2.0 - 19: c GLOBAL 'builtins complex' - 37: q BINPUT 1 - 39: G BINFLOAT 3.0 - 48: G BINFLOAT 0.0 - 57: \x86 TUPLE2 - 58: q BINPUT 2 - 60: R REDUCE - 61: q BINPUT 3 - 63: K BININT1 1 - 65: J BININT -1 - 70: K BININT1 255 - 72: J BININT -255 - 77: J BININT -256 - 82: M BININT2 65535 - 85: J BININT -65535 - 90: J BININT -65536 - 95: J BININT 2147483647 - 100: J BININT -2147483647 - 105: J BININT -2147483648 - 110: ( MARK - 111: X BINUNICODE 'abc' - 119: q BINPUT 4 - 121: h BINGET 4 - 123: c GLOBAL '__main__ C' - 135: q BINPUT 5 - 137: ) EMPTY_TUPLE - 138: \x81 NEWOBJ - 139: q BINPUT 6 - 141: } EMPTY_DICT - 142: q BINPUT 7 - 144: ( MARK - 145: X BINUNICODE 'foo' - 153: q BINPUT 8 - 155: K BININT1 1 - 157: X BINUNICODE 'bar' - 165: q BINPUT 9 - 167: K BININT1 2 - 169: u SETITEMS (MARK at 144) - 170: b BUILD - 171: h BINGET 6 - 173: t TUPLE (MARK at 110) - 174: q BINPUT 10 - 176: h BINGET 10 - 178: K BININT1 5 - 180: e APPENDS (MARK at 5) - 181: . STOP + 19: c GLOBAL '__builtin__ complex' + 40: q BINPUT 1 + 42: G BINFLOAT 3.0 + 51: G BINFLOAT 0.0 + 60: \x86 TUPLE2 + 61: q BINPUT 2 + 63: R REDUCE + 64: q BINPUT 3 + 66: K BININT1 1 + 68: J BININT -1 + 73: K BININT1 255 + 75: J BININT -255 + 80: J BININT -256 + 85: M BININT2 65535 + 88: J BININT -65535 + 93: J BININT -65536 + 98: J BININT 2147483647 + 103: J BININT -2147483647 + 108: J BININT -2147483648 + 113: ( MARK + 114: X BINUNICODE 'abc' + 122: q BINPUT 4 + 124: h BINGET 4 + 126: c GLOBAL '__main__ C' + 138: q BINPUT 5 + 140: ) EMPTY_TUPLE + 141: \x81 NEWOBJ + 142: q BINPUT 6 + 144: } EMPTY_DICT + 145: q BINPUT 7 + 147: ( MARK + 148: X BINUNICODE 'foo' + 156: q BINPUT 8 + 158: K BININT1 1 + 160: X BINUNICODE 'bar' + 168: q BINPUT 9 + 170: K BININT1 2 + 172: u SETITEMS (MARK at 147) + 173: b BUILD + 174: h BINGET 6 + 176: t TUPLE (MARK at 113) + 177: q BINPUT 10 + 179: h BINGET 10 + 181: K BININT1 5 + 183: e APPENDS (MARK at 5) + 184: . STOP highest protocol among opcodes = 2 """ @@ -570,14 +571,14 @@ class AbstractPickleTests(unittest.TestCase): xname = X.__name__.encode('ascii') # Protocol 0 (text mode pickle): """ - 0: ( MARK - 1: i INST '__main__ X' (MARK at 0) - 15: p PUT 0 - 18: ( MARK - 19: d DICT (MARK at 18) - 20: p PUT 1 - 23: b BUILD - 24: . STOP + 0: ( MARK + 1: i INST '__main__ X' (MARK at 0) + 13: p PUT 0 + 16: ( MARK + 17: d DICT (MARK at 16) + 18: p PUT 1 + 21: b BUILD + 22: . STOP """ pickle0 = (b"(i__main__\n" b"X\n" @@ -587,15 +588,15 @@ class AbstractPickleTests(unittest.TestCase): # Protocol 1 (binary mode pickle) """ - 0: ( MARK - 1: c GLOBAL '__main__ X' - 15: q BINPUT 0 - 17: o OBJ (MARK at 0) - 18: q BINPUT 1 - 20: } EMPTY_DICT - 21: q BINPUT 2 - 23: b BUILD - 24: . STOP + 0: ( MARK + 1: c GLOBAL '__main__ X' + 13: q BINPUT 0 + 15: o OBJ (MARK at 0) + 16: q BINPUT 1 + 18: } EMPTY_DICT + 19: q BINPUT 2 + 21: b BUILD + 22: . STOP """ pickle1 = (b'(c__main__\n' b'X\n' @@ -604,16 +605,16 @@ class AbstractPickleTests(unittest.TestCase): # Protocol 2 (pickle2 = b'\x80\x02' + pickle1) """ - 0: \x80 PROTO 2 - 2: ( MARK - 3: c GLOBAL '__main__ X' - 17: q BINPUT 0 - 19: o OBJ (MARK at 2) - 20: q BINPUT 1 - 22: } EMPTY_DICT - 23: q BINPUT 2 - 25: b BUILD - 26: . STOP + 0: \x80 PROTO 2 + 2: ( MARK + 3: c GLOBAL '__main__ X' + 15: q BINPUT 0 + 17: o OBJ (MARK at 2) + 18: q BINPUT 1 + 20: } EMPTY_DICT + 21: q BINPUT 2 + 23: b BUILD + 24: . STOP """ pickle2 = (b'\x80\x02(c__main__\n' b'X\n' -- cgit v0.12 From 0aa74e10bb7697db00514d62269ff9cae7bd7af9 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 15 Feb 2015 16:20:47 +0200 Subject: Issue #19681: Test the repr of partial with more than one keyword argument. --- Lib/test/test_functools.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 1012053..0375601c 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -155,9 +155,9 @@ class TestPartialC(TestPartial, unittest.TestCase): def test_repr(self): args = (object(), object()) args_repr = ', '.join(repr(a) for a in args) - #kwargs = {'a': object(), 'b': object()} - kwargs = {'a': object()} - kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items()) + kwargs = {'a': object(), 'b': object()} + kwargs_reprs = ['a={a!r}, b={b!r}'.format_map(kwargs), + 'b={b!r}, a={a!r}'.format_map(kwargs)] if self.partial is c_functools.partial: name = 'functools.partial' else: @@ -172,12 +172,14 @@ class TestPartialC(TestPartial, unittest.TestCase): repr(f)) f = self.partial(capture, **kwargs) - self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr), - repr(f)) + self.assertIn(repr(f), + ['{}({!r}, {})'.format(name, capture, kwargs_repr) + for kwargs_repr in kwargs_reprs]) f = self.partial(capture, *args, **kwargs) - self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr), - repr(f)) + self.assertIn(repr(f), + ['{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr) + for kwargs_repr in kwargs_reprs]) def test_pickle(self): f = self.partial(signature, 'asdf', bar=True) -- cgit v0.12 From 57fffd6f99d55ccd623b381622b989410a695b99 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Sun, 15 Feb 2015 18:03:59 +0100 Subject: Issue #23146: Fix mishandling of absolute Windows paths with forward slashes in pathlib. Detected and fixed by Serhiy. --- Lib/pathlib.py | 4 ++++ Lib/test/test_pathlib.py | 36 ++++++++++++++++++++---------------- Misc/NEWS | 3 +++ 3 files changed, 27 insertions(+), 16 deletions(-) diff --git a/Lib/pathlib.py b/Lib/pathlib.py index 73fd432..918ac8d 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -73,6 +73,10 @@ class _Flavour(object): # parts. This makes the result of parsing e.g. # ("C:", "/", "a") reasonably intuitive. for part in it: + if not part: + continue + if altsep: + part = part.replace(altsep, sep) drv = self.splitroot(part)[0] if drv: break diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index ab88c34..11420e2 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -105,31 +105,35 @@ class NTFlavourTest(_BaseFlavourTest, unittest.TestCase): check = self._check_parse_parts # First part is anchored check(['c:'], ('c:', '', ['c:'])) - check(['c:\\'], ('c:', '\\', ['c:\\'])) - check(['\\'], ('', '\\', ['\\'])) + check(['c:/'], ('c:', '\\', ['c:\\'])) + check(['/'], ('', '\\', ['\\'])) check(['c:a'], ('c:', '', ['c:', 'a'])) - check(['c:\\a'], ('c:', '\\', ['c:\\', 'a'])) - check(['\\a'], ('', '\\', ['\\', 'a'])) + check(['c:/a'], ('c:', '\\', ['c:\\', 'a'])) + check(['/a'], ('', '\\', ['\\', 'a'])) # UNC paths - check(['\\\\a\\b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) - check(['\\\\a\\b\\'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) - check(['\\\\a\\b\\c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c'])) + check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) + check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) + check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c'])) # Second part is anchored, so that the first part is ignored check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c'])) - check(['a', 'Z:\\b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c'])) - check(['a', '\\b', 'c'], ('', '\\', ['\\', 'b', 'c'])) + check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c'])) # UNC paths - check(['a', '\\\\b\\c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) + check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) # Collapsing and stripping excess slashes - check(['a', 'Z:\\\\b\\\\c\\', 'd\\'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd'])) + check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd'])) # UNC paths - check(['a', '\\\\b\\c\\\\', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) + check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) # Extended paths - check(['\\\\?\\c:\\'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\'])) - check(['\\\\?\\c:\\a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a'])) + check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\'])) + check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a'])) + check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b'])) # Extended UNC paths (format is "\\?\UNC\server\share") - check(['\\\\?\\UNC\\b\\c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\'])) - check(['\\\\?\\UNC\\b\\c\\d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd'])) + check(['//?/UNC/b/c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\'])) + check(['//?/UNC/b/c/d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd'])) + # Second part has a root but not drive + check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c'])) + check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c'])) + check(['//?/Z:/a', '/b', 'c'], ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c'])) def test_splitroot(self): f = self.flavour.splitroot diff --git a/Misc/NEWS b/Misc/NEWS index 47e325d..ca4685c 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,9 @@ Core and Builtins Library ------- +- Issue #23146: Fix mishandling of absolute Windows paths with forward + slashes in pathlib. + - Issue #23421: Fixed compression in tarfile CLI. Patch by wdv4758h. - Issue #23361: Fix possible overflow in Windows subprocess creation code. -- cgit v0.12 From 74eb8b2d1a1db905cffc4efcd1cefaf1f725cd81 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 16 Feb 2015 00:30:43 +0200 Subject: Issue #22885: Fixed arbitrary code execution vulnerability in the dbm.dumb module. Original patch by Claudiu Popa. --- Lib/dbm/dumb.py | 3 ++- Lib/test/test_dbm_dumb.py | 9 +++++++++ Misc/NEWS | 3 +++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/Lib/dbm/dumb.py b/Lib/dbm/dumb.py index 8f48aad..a9ead68 100644 --- a/Lib/dbm/dumb.py +++ b/Lib/dbm/dumb.py @@ -21,6 +21,7 @@ is read when the database is opened, and some updates rewrite the whole index) """ +import ast as _ast import io as _io import os as _os import collections @@ -85,7 +86,7 @@ class _Database(collections.MutableMapping): with f: for line in f: line = line.rstrip() - key, pos_and_siz_pair = eval(line) + key, pos_and_siz_pair = _ast.literal_eval(line) key = key.encode('Latin-1') self._index[key] = pos_and_siz_pair diff --git a/Lib/test/test_dbm_dumb.py b/Lib/test/test_dbm_dumb.py index 29f48a3..dc88ca6 100644 --- a/Lib/test/test_dbm_dumb.py +++ b/Lib/test/test_dbm_dumb.py @@ -217,6 +217,15 @@ class DumbDBMTestCase(unittest.TestCase): self.assertEqual(str(cm.exception), "DBM object has already been closed") + def test_eval(self): + with open(_fname + '.dir', 'w') as stream: + stream.write("str(print('Hacked!')), 0\n") + with support.captured_stdout() as stdout: + with self.assertRaises(ValueError): + with dumbdbm.open(_fname) as f: + pass + self.assertEqual(stdout.getvalue(), '') + def tearDown(self): _delete_files() diff --git a/Misc/NEWS b/Misc/NEWS index ca4685c..b453fe4 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,9 @@ Core and Builtins Library ------- +- Issue #22885: Fixed arbitrary code execution vulnerability in the dbm.dumb + module. Original patch by Claudiu Popa. + - Issue #23146: Fix mishandling of absolute Windows paths with forward slashes in pathlib. -- cgit v0.12 From 4d6c6063bf619ce8c31cf9458209110338db386f Mon Sep 17 00:00:00 2001 From: Berker Peksag Date: Mon, 16 Feb 2015 03:36:10 +0200 Subject: Fix typo in os.supports_effective_ids documentation. --- Doc/library/os.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Doc/library/os.rst b/Doc/library/os.rst index f217a36..98ee842 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -2159,7 +2159,8 @@ features: contain :func:`os.access`, otherwise it will be empty. To check whether you can use the *effective_ids* parameter for - :func:`os.access`, use the ``in`` operator on ``supports_dir_fd``, like so:: + :func:`os.access`, use the ``in`` operator on ``supports_effective_ids``, + like so:: os.access in os.supports_effective_ids -- cgit v0.12 From e1efc07a30f4c17723c707ad761bfad538982b0c Mon Sep 17 00:00:00 2001 From: Berker Peksag Date: Mon, 16 Feb 2015 04:36:18 +0200 Subject: Issue #17753: effective_ids unavailable on Windows. --- Lib/test/test_zipfile.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py index 4633fe6..3d8f9bc 100644 --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile.py @@ -649,7 +649,9 @@ class PyZipFileTests(unittest.TestCase): self.assertIn(name + 'c', namelist) def requiresWriteAccess(self, path): - if not os.access(path, os.W_OK, effective_ids=True): + # effective_ids unavailable on windows + if not os.access(path, os.W_OK, + effective_ids=os.access in os.supports_effective_ids): self.skipTest('requires write access to the installed location') def test_write_pyfile(self): -- cgit v0.12 From 1a1ff29659f068659dea07f1bd67b8fd4331071c Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 16 Feb 2015 13:28:22 +0200 Subject: Issue #23446: Use PyMem_New instead of PyMem_Malloc to avoid possible integer overflows. Added few missed PyErr_NoMemory(). --- Modules/_ctypes/_ctypes.c | 9 ++++++--- Modules/_ctypes/stgdict.c | 12 ++++++++---- Modules/_localemodule.c | 2 +- Modules/_ssl.c | 7 ++++--- Modules/_testbuffer.c | 2 +- Modules/_testcapimodule.c | 2 +- Modules/getpath.c | 2 +- Modules/posixmodule.c | 18 +++++++++--------- Modules/pyexpat.c | 4 ++-- Modules/socketmodule.c | 6 ++++-- Modules/unicodedata.c | 4 ++-- Modules/zipimport.c | 2 +- Objects/unicodeobject.c | 22 +++++----------------- PC/winreg.c | 6 +++--- Python/peephole.c | 4 ++-- 15 files changed, 50 insertions(+), 52 deletions(-) diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index c2889d2..14ec4ce 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -4305,8 +4305,11 @@ Array_subscript(PyObject *myself, PyObject *item) slicelen); } - dest = (wchar_t *)PyMem_Malloc( - slicelen * sizeof(wchar_t)); + dest = PyMem_New(wchar_t, slicelen); + if (dest == NULL) { + PyErr_NoMemory(); + return NULL; + } for (cur = start, i = 0; i < slicelen; cur += step, i++) { @@ -4986,7 +4989,7 @@ Pointer_subscript(PyObject *myself, PyObject *item) return PyUnicode_FromWideChar(ptr + start, len); } - dest = (wchar_t *)PyMem_Malloc(len * sizeof(wchar_t)); + dest = PyMem_New(wchar_t, len); if (dest == NULL) return PyErr_NoMemory(); for (cur = start, i = 0; i < len; cur += step, i++) { diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index 728f751..879afb8 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -76,14 +76,18 @@ PyCStgDict_clone(StgDictObject *dst, StgDictObject *src) if (src->format) { dst->format = PyMem_Malloc(strlen(src->format) + 1); - if (dst->format == NULL) + if (dst->format == NULL) { + PyErr_NoMemory(); return -1; + } strcpy(dst->format, src->format); } if (src->shape) { dst->shape = PyMem_Malloc(sizeof(Py_ssize_t) * src->ndim); - if (dst->shape == NULL) + if (dst->shape == NULL) { + PyErr_NoMemory(); return -1; + } memcpy(dst->shape, src->shape, sizeof(Py_ssize_t) * src->ndim); } @@ -380,7 +384,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct union_size = 0; total_align = align ? align : 1; stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT; - stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (basedict->length + len + 1)); + stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, basedict->length + len + 1); if (stgdict->ffi_type_pointer.elements == NULL) { PyErr_NoMemory(); return -1; @@ -398,7 +402,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct union_size = 0; total_align = 1; stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT; - stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (len + 1)); + stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, len + 1); if (stgdict->ffi_type_pointer.elements == NULL) { PyErr_NoMemory(); return -1; diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c index 400c344..b1d6add 100644 --- a/Modules/_localemodule.c +++ b/Modules/_localemodule.c @@ -254,7 +254,7 @@ PyLocale_strxfrm(PyObject* self, PyObject* args) /* assume no change in size, first */ n1 = n1 + 1; - buf = PyMem_Malloc(n1 * sizeof(wchar_t)); + buf = PyMem_New(wchar_t, n1); if (!buf) { PyErr_NoMemory(); goto exit; diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 914d5aa..9539710 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -3838,10 +3838,11 @@ static int _setup_ssl_threads(void) { if (_ssl_locks == NULL) { _ssl_locks_count = CRYPTO_num_locks(); - _ssl_locks = (PyThread_type_lock *) - PyMem_Malloc(sizeof(PyThread_type_lock) * _ssl_locks_count); - if (_ssl_locks == NULL) + _ssl_locks = PyMem_New(PyThread_type_lock, _ssl_locks_count); + if (_ssl_locks == NULL) { + PyErr_NoMemory(); return 0; + } memset(_ssl_locks, 0, sizeof(PyThread_type_lock) * _ssl_locks_count); for (i = 0; i < _ssl_locks_count; i++) { diff --git a/Modules/_testbuffer.c b/Modules/_testbuffer.c index 0c6ef16..176df7c 100644 --- a/Modules/_testbuffer.c +++ b/Modules/_testbuffer.c @@ -850,7 +850,7 @@ seq_as_ssize_array(PyObject *seq, Py_ssize_t len, int is_shape) Py_ssize_t *dest; Py_ssize_t x, i; - dest = PyMem_Malloc(len * (sizeof *dest)); + dest = PyMem_New(Py_ssize_t, len); if (dest == NULL) { PyErr_NoMemory(); return NULL; diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 625409e..cf4b0e1 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -1516,7 +1516,7 @@ unicode_aswidechar(PyObject *self, PyObject *args) if (!PyArg_ParseTuple(args, "Un", &unicode, &buflen)) return NULL; - buffer = PyMem_Malloc(buflen * sizeof(wchar_t)); + buffer = PyMem_New(wchar_t, buflen); if (buffer == NULL) return PyErr_NoMemory(); diff --git a/Modules/getpath.c b/Modules/getpath.c index c057737..13e3817 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -735,7 +735,7 @@ calculate_path(void) bufsz += wcslen(zip_path) + 1; bufsz += wcslen(exec_prefix) + 1; - buf = (wchar_t *)PyMem_Malloc(bufsz * sizeof(wchar_t)); + buf = PyMem_New(wchar_t, bufsz); if (buf == NULL) { Py_FatalError( "Not enough memory for dynamic PYTHONPATH"); diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 628dec2..d45f59e 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -1638,7 +1638,7 @@ get_target_path(HANDLE hdl, wchar_t **target_path) if(!buf_size) return FALSE; - buf = (wchar_t *)PyMem_Malloc((buf_size+1)*sizeof(wchar_t)); + buf = PyMem_New(wchar_t, buf_size+1); if (!buf) { SetLastError(ERROR_OUTOFMEMORY); return FALSE; @@ -3627,7 +3627,7 @@ _listdir_windows_no_opendir(path_t *path, PyObject *list) len = wcslen(path->wide); } /* The +5 is so we can append "\\*.*\0" */ - wnamebuf = PyMem_Malloc((len + 5) * sizeof(wchar_t)); + wnamebuf = PyMem_New(wchar_t, len + 5); if (!wnamebuf) { PyErr_NoMemory(); goto exit; @@ -3917,7 +3917,7 @@ posix__getfullpathname(PyObject *self, PyObject *args) Py_ARRAY_LENGTH(woutbuf), woutbuf, &wtemp); if (result > Py_ARRAY_LENGTH(woutbuf)) { - woutbufp = PyMem_Malloc(result * sizeof(wchar_t)); + woutbufp = PyMem_New(wchar_t, result); if (!woutbufp) return PyErr_NoMemory(); result = GetFullPathNameW(wpath, result, woutbufp, &wtemp); @@ -3997,7 +3997,7 @@ posix__getfinalpathname(PyObject *self, PyObject *args) if(!buf_size) return win32_error_object("GetFinalPathNameByHandle", po); - target_path = (wchar_t *)PyMem_Malloc((buf_size+1)*sizeof(wchar_t)); + target_path = PyMem_New(wchar_t, buf_size+1); if(!target_path) return PyErr_NoMemory(); @@ -4082,7 +4082,7 @@ posix__getvolumepathname(PyObject *self, PyObject *args) return NULL; } - mountpath = (wchar_t *)PyMem_Malloc(buflen * sizeof(wchar_t)); + mountpath = PyMem_New(wchar_t, buflen); if (mountpath == NULL) return PyErr_NoMemory(); @@ -6213,9 +6213,9 @@ posix_getgrouplist(PyObject *self, PyObject *args) #endif #ifdef __APPLE__ - groups = PyMem_Malloc(ngroups * sizeof(int)); + groups = PyMem_New(int, ngroups); #else - groups = PyMem_Malloc(ngroups * sizeof(gid_t)); + groups = PyMem_New(gid_t, ngroups); #endif if (groups == NULL) return PyErr_NoMemory(); @@ -6293,7 +6293,7 @@ posix_getgroups(PyObject *self, PyObject *noargs) /* groups will fit in existing array */ alt_grouplist = grouplist; } else { - alt_grouplist = PyMem_Malloc(n * sizeof(gid_t)); + alt_grouplist = PyMem_New(gid_t, n); if (alt_grouplist == NULL) { errno = EINVAL; return posix_error(); @@ -6319,7 +6319,7 @@ posix_getgroups(PyObject *self, PyObject *noargs) /* Avoid malloc(0) */ alt_grouplist = grouplist; } else { - alt_grouplist = PyMem_Malloc(n * sizeof(gid_t)); + alt_grouplist = PyMem_New(gid_t, n); if (alt_grouplist == NULL) { errno = EINVAL; return posix_error(); diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c index 4ced53b..19be0c7 100644 --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -928,7 +928,7 @@ xmlparse_ExternalEntityParserCreate(xmlparseobject *self, PyObject *args) for (i = 0; handler_info[i].name != NULL; i++) /* do nothing */; - new_parser->handlers = PyMem_Malloc(sizeof(PyObject *) * i); + new_parser->handlers = PyMem_New(PyObject *, i); if (!new_parser->handlers) { Py_DECREF(new_parser); return PyErr_NoMemory(); @@ -1121,7 +1121,7 @@ newxmlparseobject(char *encoding, char *namespace_separator, PyObject *intern) for (i = 0; handler_info[i].name != NULL; i++) /* do nothing */; - self->handlers = PyMem_Malloc(sizeof(PyObject *) * i); + self->handlers = PyMem_New(PyObject *, i); if (!self->handlers) { Py_DECREF(self); return PyErr_NoMemory(); diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index cb44d05..e9feba3 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -4126,9 +4126,11 @@ socket_gethostname(PyObject *self, PyObject *unused) /* MSDN says ERROR_MORE_DATA may occur because DNS allows longer names */ - name = PyMem_Malloc(size * sizeof(wchar_t)); - if (!name) + name = PyMem_New(wchar_t, size); + if (!name) { + PyErr_NoMemory(); return NULL; + } if (!GetComputerNameExW(ComputerNamePhysicalDnsHostname, name, &size)) diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c index ec70e7a..47d2937 100644 --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -556,7 +556,7 @@ nfd_nfkd(PyObject *self, PyObject *input, int k) /* Overallocate at most 10 characters. */ space = (isize > 10 ? 10 : isize) + isize; osize = space; - output = PyMem_Malloc(space * sizeof(Py_UCS4)); + output = PyMem_New(Py_UCS4, space); if (!output) { PyErr_NoMemory(); return NULL; @@ -703,7 +703,7 @@ nfc_nfkc(PyObject *self, PyObject *input, int k) /* We allocate a buffer for the output. If we find that we made no changes, we still return the NFD result. */ - output = PyMem_Malloc(len * sizeof(Py_UCS4)); + output = PyMem_New(Py_UCS4, len); if (!output) { PyErr_NoMemory(); Py_DECREF(result); diff --git a/Modules/zipimport.c b/Modules/zipimport.c index 8fe9195..f2cc245 100644 --- a/Modules/zipimport.c +++ b/Modules/zipimport.c @@ -233,7 +233,7 @@ make_filename(PyObject *prefix, PyObject *name) Py_ssize_t len; len = PyUnicode_GET_LENGTH(prefix) + PyUnicode_GET_LENGTH(name) + 1; - p = buf = PyMem_Malloc(sizeof(Py_UCS4) * len); + p = buf = PyMem_New(Py_UCS4, len); if (buf == NULL) { PyErr_NoMemory(); return NULL; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 84ab6a1..2ffa55b 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -2186,7 +2186,7 @@ _PyUnicode_AsKind(PyObject *s, unsigned int kind) } switch (kind) { case PyUnicode_2BYTE_KIND: - result = PyMem_Malloc(len * sizeof(Py_UCS2)); + result = PyMem_New(Py_UCS2, len); if (!result) return PyErr_NoMemory(); assert(skind == PyUnicode_1BYTE_KIND); @@ -2197,7 +2197,7 @@ _PyUnicode_AsKind(PyObject *s, unsigned int kind) result); return result; case PyUnicode_4BYTE_KIND: - result = PyMem_Malloc(len * sizeof(Py_UCS4)); + result = PyMem_New(Py_UCS4, len); if (!result) return PyErr_NoMemory(); if (skind == PyUnicode_2BYTE_KIND) { @@ -2239,11 +2239,7 @@ as_ucs4(PyObject *string, Py_UCS4 *target, Py_ssize_t targetsize, if (copy_null) targetlen++; if (!target) { - if (PY_SSIZE_T_MAX / sizeof(Py_UCS4) < targetlen) { - PyErr_NoMemory(); - return NULL; - } - target = PyMem_Malloc(targetlen * sizeof(Py_UCS4)); + target = PyMem_New(Py_UCS4, targetlen); if (!target) { PyErr_NoMemory(); return NULL; @@ -2852,12 +2848,7 @@ PyUnicode_AsWideCharString(PyObject *unicode, buflen = unicode_aswidechar(unicode, NULL, 0); if (buflen == -1) return NULL; - if (PY_SSIZE_T_MAX / sizeof(wchar_t) < buflen) { - PyErr_NoMemory(); - return NULL; - } - - buffer = PyMem_MALLOC(buflen * sizeof(wchar_t)); + buffer = PyMem_NEW(wchar_t, buflen); if (buffer == NULL) { PyErr_NoMemory(); return NULL; @@ -3550,10 +3541,7 @@ PyUnicode_DecodeLocaleAndSize(const char *str, Py_ssize_t len, wstr = smallbuf; } else { - if (wlen > PY_SSIZE_T_MAX / sizeof(wchar_t) - 1) - return PyErr_NoMemory(); - - wstr = PyMem_Malloc((wlen+1) * sizeof(wchar_t)); + wstr = PyMem_New(wchar_t, wlen+1); if (!wstr) return PyErr_NoMemory(); } diff --git a/PC/winreg.c b/PC/winreg.c index 63c437e..19d5a70 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -939,7 +939,7 @@ Reg2Py(BYTE *retDataBuf, DWORD retDataSize, DWORD typ) wchar_t *data = (wchar_t *)retDataBuf; int len = retDataSize / 2; int s = countStrings(data, len); - wchar_t **str = (wchar_t **)PyMem_Malloc(sizeof(wchar_t *)*s); + wchar_t **str = PyMem_New(wchar_t *, s); if (str == NULL) return PyErr_NoMemory(); @@ -1206,7 +1206,7 @@ PyEnumValue(PyObject *self, PyObject *args) ++retDataSize; bufDataSize = retDataSize; bufValueSize = retValueSize; - retValueBuf = (wchar_t *)PyMem_Malloc(sizeof(wchar_t) * retValueSize); + retValueBuf = PyMem_New(wchar_t, retValueSize); if (retValueBuf == NULL) return PyErr_NoMemory(); retDataBuf = (BYTE *)PyMem_Malloc(retDataSize); @@ -1277,7 +1277,7 @@ PyExpandEnvironmentStrings(PyObject *self, PyObject *args) return PyErr_SetFromWindowsErrWithFunction(retValueSize, "ExpandEnvironmentStrings"); } - retValue = (wchar_t *)PyMem_Malloc(retValueSize * sizeof(wchar_t)); + retValue = PyMem_New(wchar_t, retValueSize); if (retValue == NULL) { return PyErr_NoMemory(); } diff --git a/Python/peephole.c b/Python/peephole.c index 4185462..c56c8fc 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -290,7 +290,7 @@ fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts, PyObject *v static unsigned int * markblocks(unsigned char *code, Py_ssize_t len) { - unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int)); + unsigned int *blocks = PyMem_New(unsigned int, len); int i,j, opcode, blockcnt = 0; if (blocks == NULL) { @@ -398,7 +398,7 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, goto exitUnchanged; /* Mapping to new jump targets after NOPs are removed */ - addrmap = (int *)PyMem_Malloc(codelen * sizeof(int)); + addrmap = PyMem_New(int, codelen); if (addrmap == NULL) { PyErr_NoMemory(); goto exitError; -- cgit v0.12 From 3584056ca58957a6adca060419e48a9488852550 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 16 Feb 2015 20:54:03 +0200 Subject: Shoould be Py_MIN, not Py_MAX. --- Modules/_tkinter.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index 6d777d3..f5bade0 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -34,7 +34,7 @@ Copyright (C) 1994 Steen Lumholt. #endif #define CHECK_SIZE(size, elemsize) \ - ((size_t)(size) <= Py_MAX((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize))) + ((size_t)(size) <= Py_MIN((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize))) /* Starting with Tcl 8.4, many APIs offer const-correctness. Unfortunately, making _tkinter correct for this API means to break earlier -- cgit v0.12 From 54237f9feaefd209c2aaa5b4003810e69f6714f3 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Mon, 16 Feb 2015 19:45:01 -0500 Subject: fix pydoc.apropos and pydoc.synopsis on modules with empty docstrings (#21548) Patch by Yuyang Guo and Berker Peksag. --- Lib/pydoc.py | 4 ++-- Lib/test/test_pydoc.py | 32 ++++++++++++++++++++++++++++++++ Misc/ACKS | 1 + Misc/NEWS | 3 +++ 4 files changed, 38 insertions(+), 2 deletions(-) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index d53a1b4..d37ebf1 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -270,7 +270,7 @@ def synopsis(filename, cache={}): except: return None del sys.modules['__temp__'] - result = (module.__doc__ or '').splitlines()[0] + result = module.__doc__.splitlines()[0] if module.__doc__ else None # Cache the result. cache[filename] = (mtime, result) return result @@ -2075,7 +2075,7 @@ class ModuleScanner: if onerror: onerror(modname) continue - desc = (module.__doc__ or '').splitlines()[0] + desc = module.__doc__.splitlines()[0] if module.__doc__ else '' path = getattr(module,'__file__',None) name = modname + ' - ' + desc if name.lower().find(key) >= 0: diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index 8bf9b20..1427c77 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -3,12 +3,15 @@ import sys import builtins import contextlib import difflib +import importlib.util import inspect import pydoc +import py_compile import keyword import _pickle import pkgutil import re +import stat import string import test.support import time @@ -557,6 +560,18 @@ class PydocDocTest(unittest.TestCase): self.assertEqual(synopsis, expected) + def test_synopsis_sourceless_empty_doc(self): + with test.support.temp_cwd() as test_dir: + init_path = os.path.join(test_dir, 'foomod42.py') + cached_path = importlib.util.cache_from_source(init_path) + with open(init_path, 'w') as fobj: + fobj.write("foo = 1") + py_compile.compile(init_path) + synopsis = pydoc.synopsis(init_path, {}) + self.assertIsNone(synopsis) + synopsis_cached = pydoc.synopsis(cached_path, {}) + self.assertIsNone(synopsis_cached) + def test_splitdoc_with_description(self): example_string = "I Am A Doc\n\n\nHere is my description" self.assertEqual(pydoc.splitdoc(example_string), @@ -612,6 +627,7 @@ class PydocImportTest(PydocBaseTest): def setUp(self): self.test_dir = os.mkdir(TESTFN) self.addCleanup(rmtree, TESTFN) + importlib.invalidate_caches() def test_badimport(self): # This tests the fix for issue 5230, where if pydoc found the module @@ -670,6 +686,22 @@ class PydocImportTest(PydocBaseTest): self.assertEqual(out.getvalue(), '') self.assertEqual(err.getvalue(), '') + def test_apropos_empty_doc(self): + pkgdir = os.path.join(TESTFN, 'walkpkg') + os.mkdir(pkgdir) + self.addCleanup(rmtree, pkgdir) + init_path = os.path.join(pkgdir, '__init__.py') + with open(init_path, 'w') as fobj: + fobj.write("foo = 1") + current_mode = stat.S_IMODE(os.stat(pkgdir).st_mode) + try: + os.chmod(pkgdir, current_mode & ~stat.S_IEXEC) + with self.restrict_walk_packages(path=[TESTFN]), captured_stdout() as stdout: + pydoc.apropos('') + self.assertIn('walkpkg', stdout.getvalue()) + finally: + os.chmod(pkgdir, current_mode) + @unittest.skip('causes undesireable side-effects (#20128)') def test_modules(self): # See Helper.listmodules(). diff --git a/Misc/ACKS b/Misc/ACKS index 65518db..71f4048 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -506,6 +506,7 @@ Eric Groo Dag Gruneau Filip Gruszczyński Thomas Guettler +Yuyang Guo Anuj Gupta Michael Guravage Lars Gustäbel diff --git a/Misc/NEWS b/Misc/NEWS index b453fe4..3f50594 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,9 @@ Core and Builtins Library ------- +- Issue #21548: Fix pydoc.synopsis() and pydoc.apropos() on modules with empty + docstrings. + - Issue #22885: Fixed arbitrary code execution vulnerability in the dbm.dumb module. Original patch by Claudiu Popa. -- cgit v0.12 From df0db49b1b8b08e7a17dd1358ad4f532a0f3151f Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Tue, 17 Feb 2015 10:18:44 +0200 Subject: Issue #22883: Update PyInt to PyLong in C API example. --- Doc/extending/newtypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index 45b5721..d520702 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -1205,7 +1205,7 @@ Here is an example:: { if (strcmp(name, "data") == 0) { - return PyInt_FromLong(obj->data); + return PyLong_FromLong(obj->data); } PyErr_Format(PyExc_AttributeError, -- cgit v0.12 From 4e82fb99a0b5128abefaf433a56ab1fa48b9a848 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 17 Feb 2015 22:50:33 +0100 Subject: asyncio: BaseSubprocessTransport: repr() mentions when the child process is running --- Lib/asyncio/base_subprocess.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Lib/asyncio/base_subprocess.py b/Lib/asyncio/base_subprocess.py index 5458ab1..f56873f 100644 --- a/Lib/asyncio/base_subprocess.py +++ b/Lib/asyncio/base_subprocess.py @@ -57,6 +57,8 @@ class BaseSubprocessTransport(transports.SubprocessTransport): info.append('pid=%s' % self._pid) if self._returncode is not None: info.append('returncode=%s' % self._returncode) + else: + info.append('running') stdin = self._pipes.get(0) if stdin is not None: -- cgit v0.12 From 4cb814c7e1840b9e4d479cc43e65b6fff451ae90 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 17 Feb 2015 22:53:28 +0100 Subject: asyncio, Tulip issue 220: Merge JoinableQueue with Queue. Merge JoinableQueue with Queue. To more closely match the standard Queue, asyncio.Queue has "join" and "task_done". JoinableQueue is deleted. Docstring for Queue.join shouldn't mention threads. Restore JoinableQueue as a deprecated alias for Queue. To more closely match the standard Queue, asyncio.Queue has "join" and "task_done". JoinableQueue remains as a deprecated alias for Queue to avoid needlessly breaking too much code that depended on it. Patch written by A. Jesse Jiryu Davis . --- Lib/asyncio/queues.py | 102 +++++++++++++++-------------------- Lib/test/test_asyncio/test_queues.py | 10 ++-- 2 files changed, 48 insertions(+), 64 deletions(-) diff --git a/Lib/asyncio/queues.py b/Lib/asyncio/queues.py index 4aeb6c4..84cdabc 100644 --- a/Lib/asyncio/queues.py +++ b/Lib/asyncio/queues.py @@ -1,7 +1,7 @@ """Queues""" -__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue', - 'QueueFull', 'QueueEmpty'] +__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty', + 'JoinableQueue'] import collections import heapq @@ -49,6 +49,9 @@ class Queue: self._getters = collections.deque() # Pairs of (item, Future). self._putters = collections.deque() + self._unfinished_tasks = 0 + self._finished = locks.Event(loop=self._loop) + self._finished.set() self._init(maxsize) def _init(self, maxsize): @@ -59,6 +62,8 @@ class Queue: def _put(self, item): self._queue.append(item) + self._unfinished_tasks += 1 + self._finished.clear() def __repr__(self): return '<{} at {:#x} {}>'.format( @@ -75,6 +80,8 @@ class Queue: result += ' _getters[{}]'.format(len(self._getters)) if self._putters: result += ' _putters[{}]'.format(len(self._putters)) + if self._unfinished_tasks: + result += ' tasks={}'.format(self._unfinished_tasks) return result def _consume_done_getters(self): @@ -126,9 +133,6 @@ class Queue: 'queue non-empty, why are getters waiting?') getter = self._getters.popleft() - - # Use _put and _get instead of passing item straight to getter, in - # case a subclass has logic that must run (e.g. JoinableQueue). self._put(item) # getter cannot be cancelled, we just removed done getters @@ -154,9 +158,6 @@ class Queue: 'queue non-empty, why are getters waiting?') getter = self._getters.popleft() - - # Use _put and _get instead of passing item straight to getter, in - # case a subclass has logic that must run (e.g. JoinableQueue). self._put(item) # getter cannot be cancelled, we just removed done getters @@ -219,6 +220,38 @@ class Queue: else: raise QueueEmpty + def task_done(self): + """Indicate that a formerly enqueued task is complete. + + Used by queue consumers. For each get() used to fetch a task, + a subsequent call to task_done() tells the queue that the processing + on the task is complete. + + If a join() is currently blocking, it will resume when all items have + been processed (meaning that a task_done() call was received for every + item that had been put() into the queue). + + Raises ValueError if called more times than there were items placed in + the queue. + """ + if self._unfinished_tasks <= 0: + raise ValueError('task_done() called too many times') + self._unfinished_tasks -= 1 + if self._unfinished_tasks == 0: + self._finished.set() + + @coroutine + def join(self): + """Block until all items in the queue have been gotten and processed. + + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer calls task_done() to + indicate that the item was retrieved and all work on it is complete. + When the count of unfinished tasks drops to zero, join() unblocks. + """ + if self._unfinished_tasks > 0: + yield from self._finished.wait() + class PriorityQueue(Queue): """A subclass of Queue; retrieves entries in priority order (lowest first). @@ -249,54 +282,5 @@ class LifoQueue(Queue): return self._queue.pop() -class JoinableQueue(Queue): - """A subclass of Queue with task_done() and join() methods.""" - - def __init__(self, maxsize=0, *, loop=None): - super().__init__(maxsize=maxsize, loop=loop) - self._unfinished_tasks = 0 - self._finished = locks.Event(loop=self._loop) - self._finished.set() - - def _format(self): - result = Queue._format(self) - if self._unfinished_tasks: - result += ' tasks={}'.format(self._unfinished_tasks) - return result - - def _put(self, item): - super()._put(item) - self._unfinished_tasks += 1 - self._finished.clear() - - def task_done(self): - """Indicate that a formerly enqueued task is complete. - - Used by queue consumers. For each get() used to fetch a task, - a subsequent call to task_done() tells the queue that the processing - on the task is complete. - - If a join() is currently blocking, it will resume when all items have - been processed (meaning that a task_done() call was received for every - item that had been put() into the queue). - - Raises ValueError if called more times than there were items placed in - the queue. - """ - if self._unfinished_tasks <= 0: - raise ValueError('task_done() called too many times') - self._unfinished_tasks -= 1 - if self._unfinished_tasks == 0: - self._finished.set() - - @coroutine - def join(self): - """Block until all items in the queue have been gotten and processed. - - The count of unfinished tasks goes up whenever an item is added to the - queue. The count goes down whenever a consumer thread calls task_done() - to indicate that the item was retrieved and all work on it is complete. - When the count of unfinished tasks drops to zero, join() unblocks. - """ - if self._unfinished_tasks > 0: - yield from self._finished.wait() +JoinableQueue = Queue +"""Deprecated alias for Queue.""" diff --git a/Lib/test/test_asyncio/test_queues.py b/Lib/test/test_asyncio/test_queues.py index 3d4ac51..a73539d 100644 --- a/Lib/test/test_asyncio/test_queues.py +++ b/Lib/test/test_asyncio/test_queues.py @@ -408,14 +408,14 @@ class PriorityQueueTests(_QueueTestBase): self.assertEqual([1, 2, 3], items) -class JoinableQueueTests(_QueueTestBase): +class QueueJoinTests(_QueueTestBase): def test_task_done_underflow(self): - q = asyncio.JoinableQueue(loop=self.loop) + q = asyncio.Queue(loop=self.loop) self.assertRaises(ValueError, q.task_done) def test_task_done(self): - q = asyncio.JoinableQueue(loop=self.loop) + q = asyncio.Queue(loop=self.loop) for i in range(100): q.put_nowait(i) @@ -452,7 +452,7 @@ class JoinableQueueTests(_QueueTestBase): self.loop.run_until_complete(asyncio.wait(tasks, loop=self.loop)) def test_join_empty_queue(self): - q = asyncio.JoinableQueue(loop=self.loop) + q = asyncio.Queue(loop=self.loop) # Test that a queue join()s successfully, and before anything else # (done twice for insurance). @@ -465,7 +465,7 @@ class JoinableQueueTests(_QueueTestBase): self.loop.run_until_complete(join()) def test_format(self): - q = asyncio.JoinableQueue(loop=self.loop) + q = asyncio.Queue(loop=self.loop) self.assertEqual(q._format(), 'maxsize=0') q._unfinished_tasks = 2 -- cgit v0.12 From 4088ad9dcef0d7bbe26dc4a2527d4220ac558f53 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 17 Feb 2015 22:54:11 +0100 Subject: Issue #23475, asyncio: Fix test_close_kill_running() Really kill the child process, don't mock completly the Popen.kill() method. This change fix memory leaks and reference leaks. --- Lib/test/test_asyncio/test_subprocess.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index de0b08a..92bf1b4 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -355,11 +355,19 @@ class SubprocessMixin: create = self.loop.subprocess_exec(asyncio.SubprocessProtocol, *PROGRAM_BLOCKED) transport, protocol = yield from create + + kill_called = False + def kill(): + nonlocal kill_called + kill_called = True + orig_kill() + proc = transport.get_extra_info('subprocess') - proc.kill = mock.Mock() + orig_kill = proc.kill + proc.kill = kill returncode = transport.get_returncode() transport.close() - return (returncode, proc.kill.called) + return (returncode, kill_called) # Ignore "Close running child process: kill ..." log with test_utils.disable_logger(): -- cgit v0.12 From e170ed27c61fb4c0fbb9db2e9c4c5de4965b4f46 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 17 Feb 2015 23:08:35 +0100 Subject: asyncio doc: fix the sphinx extension for coroutine commands --- Doc/tools/extensions/pyspecific.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 17b3c82..64a5665 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -148,17 +148,12 @@ class PyDecoratorMethod(PyDecoratorMixin, PyClassmember): class PyCoroutineMixin(object): def handle_signature(self, sig, signode): ret = super(PyCoroutineMixin, self).handle_signature(sig, signode) -# signode.insert(0, addnodes.desc_addname('coroutine ', 'coroutine ')) signode.insert(0, addnodes.desc_annotation('coroutine ', 'coroutine ')) return ret - def needs_arglist(self): - return False - class PyCoroutineFunction(PyCoroutineMixin, PyModulelevel): def run(self): - # a decorator function is a function after all self.name = 'py:function' return PyModulelevel.run(self) -- cgit v0.12 From f91d845797255240ea42297d520727f93fcab44c Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 17 Feb 2015 23:09:52 +0100 Subject: asyncio, Tulip issue 220: Update doc of asyncio.Queue, add join and task_done methods --- Doc/library/asyncio-sync.rst | 64 +++++++++++++++++++++++++------------------- 1 file changed, 36 insertions(+), 28 deletions(-) diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index f036bc8..e3d82b0 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -310,6 +310,9 @@ Queue be interrupted between calling :meth:`qsize` and doing an operation on the Queue. + .. versionchanged:: 3.4.3 + New :meth:`join` and :meth:`task_done` methods. + .. method:: empty() Return ``True`` if the queue is empty, ``False`` otherwise. @@ -341,6 +344,20 @@ Queue Return an item if one is immediately available, else raise :exc:`QueueEmpty`. + .. coroutinemethod:: join() + + Block until all items in the queue have been gotten and processed. + + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer thread calls + :meth:`task_done` to indicate that the item was retrieved and all work on + it is complete. When the count of unfinished tasks drops to zero, + :meth:`join` unblocks. + + This method is a :ref:`coroutine `. + + .. versionadded:: 3.4.3 + .. coroutinemethod:: put(item) Put an item into the queue. If the queue is full, wait until a free slot @@ -362,6 +379,23 @@ Queue Number of items in the queue. + .. method:: task_done() + + Indicate that a formerly enqueued task is complete. + + Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a + subsequent call to :meth:`task_done` tells the queue that the processing + on the task is complete. + + If a :meth:`join` is currently blocking, it will resume when all items + have been processed (meaning that a :meth:`task_done` call was received + for every item that had been :meth:`~Queue.put` into the queue). + + Raises :exc:`ValueError` if called more times than there were items + placed in the queue. + + .. versionadded:: 3.4.3 + .. attribute:: maxsize Number of items allowed in the queue. @@ -392,35 +426,9 @@ JoinableQueue .. class:: JoinableQueue - A subclass of :class:`Queue` with :meth:`task_done` and :meth:`join` - methods. - - .. coroutinemethod:: join() - - Block until all items in the queue have been gotten and processed. - - The count of unfinished tasks goes up whenever an item is added to the - queue. The count goes down whenever a consumer thread calls - :meth:`task_done` to indicate that the item was retrieved and all work on - it is complete. When the count of unfinished tasks drops to zero, - :meth:`join` unblocks. - - This method is a :ref:`coroutine `. - - .. method:: task_done() - - Indicate that a formerly enqueued task is complete. - - Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a - subsequent call to :meth:`task_done` tells the queue that the processing - on the task is complete. - - If a :meth:`join` is currently blocking, it will resume when all items - have been processed (meaning that a :meth:`task_done` call was received - for every item that had been :meth:`~Queue.put` into the queue). + Deprecated alias for :class:`Queue`. - Raises :exc:`ValueError` if called more times than there were items - placed in the queue. + .. deprecated:: 3.4.3 Exceptions -- cgit v0.12 From e7a2f64435d795712a766a088541b43dc7cee5b7 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 17 Feb 2015 23:36:02 +0100 Subject: asyncio: Fix warning in test_close_kill_running() Read process exit status to avoid the "Caught subprocess termination from unknown pid" message. --- Lib/test/test_asyncio/test_subprocess.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index 92bf1b4..5ccdafb 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -367,6 +367,7 @@ class SubprocessMixin: proc.kill = kill returncode = transport.get_returncode() transport.close() + yield from transport._wait() return (returncode, kill_called) # Ignore "Close running child process: kill ..." log -- cgit v0.12 From 70e2847347f9b4fd579e23cf2e2f329aab1faa5d Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Tue, 17 Feb 2015 21:11:10 -0500 Subject: document the requestline and close_connection attributes, use real booleans, and add tests (closes #23410) Patch by Martin Panter. --- Doc/library/http.server.rst | 12 ++++++++++++ Lib/http/server.py | 20 ++++++++++---------- Lib/test/test_httpservers.py | 39 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 10 deletions(-) diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst index 0d8e7fe..a750155 100644 --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -64,6 +64,18 @@ of which this module provides three different variants: Contains the server instance. + .. attribute:: close_connection + + Boolean that should be set before :meth:`handle_one_request` returns, + indicating if another request may be expected, or if the connection should + be shut down. + + .. attribute:: requestline + + Contains the string representation of the HTTP request line. The + terminating CRLF is stripped. This attribute should be set by + :meth:`handle_one_request`. If no valid request line was processed, it + should be set to the empty string. .. attribute:: command diff --git a/Lib/http/server.py b/Lib/http/server.py index a27890e..47655e7 100644 --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -273,7 +273,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): """ self.command = None # set in case of error on the first line self.request_version = version = self.default_request_version - self.close_connection = 1 + self.close_connection = True requestline = str(self.raw_requestline, 'iso-8859-1') requestline = requestline.rstrip('\r\n') self.requestline = requestline @@ -299,14 +299,14 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): self.send_error(400, "Bad request version (%r)" % version) return False if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": - self.close_connection = 0 + self.close_connection = False if version_number >= (2, 0): self.send_error(505, "Invalid HTTP Version (%s)" % base_version_number) return False elif len(words) == 2: command, path = words - self.close_connection = 1 + self.close_connection = True if command != 'GET': self.send_error(400, "Bad HTTP/0.9 request type (%r)" % command) @@ -328,10 +328,10 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): conntype = self.headers.get('Connection', "") if conntype.lower() == 'close': - self.close_connection = 1 + self.close_connection = True elif (conntype.lower() == 'keep-alive' and self.protocol_version >= "HTTP/1.1"): - self.close_connection = 0 + self.close_connection = False # Examine the headers and look for an Expect directive expect = self.headers.get('Expect', "") if (expect.lower() == "100-continue" and @@ -376,7 +376,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): self.send_error(414) return if not self.raw_requestline: - self.close_connection = 1 + self.close_connection = True return if not self.parse_request(): # An error code has been sent, just exit @@ -391,12 +391,12 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): except socket.timeout as e: #a read or a write timed out. Discard this connection self.log_error("Request timed out: %r", e) - self.close_connection = 1 + self.close_connection = True return def handle(self): """Handle multiple requests if necessary.""" - self.close_connection = 1 + self.close_connection = True self.handle_one_request() while not self.close_connection: @@ -478,9 +478,9 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): if keyword.lower() == 'connection': if value.lower() == 'close': - self.close_connection = 1 + self.close_connection = True elif value.lower() == 'keep-alive': - self.close_connection = 0 + self.close_connection = False def end_headers(self): """Send the blank line ending the MIME headers.""" diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index 67a4654..74e0714 100644 --- a/Lib/test/test_httpservers.py +++ b/Lib/test/test_httpservers.py @@ -616,6 +616,11 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase): self.verify_expected_headers(result[1:-1]) self.verify_get_called() self.assertEqual(result[-1], b'Data\r\n') + self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1') + self.assertEqual(self.handler.command, 'GET') + self.assertEqual(self.handler.path, '/') + self.assertEqual(self.handler.request_version, 'HTTP/1.1') + self.assertSequenceEqual(self.handler.headers.items(), ()) def test_http_1_0(self): result = self.send_typical_request(b'GET / HTTP/1.0\r\n\r\n') @@ -623,6 +628,11 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase): self.verify_expected_headers(result[1:-1]) self.verify_get_called() self.assertEqual(result[-1], b'Data\r\n') + self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0') + self.assertEqual(self.handler.command, 'GET') + self.assertEqual(self.handler.path, '/') + self.assertEqual(self.handler.request_version, 'HTTP/1.0') + self.assertSequenceEqual(self.handler.headers.items(), ()) def test_http_0_9(self): result = self.send_typical_request(b'GET / HTTP/0.9\r\n\r\n') @@ -636,6 +646,12 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase): self.verify_expected_headers(result[1:-1]) self.verify_get_called() self.assertEqual(result[-1], b'Data\r\n') + self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0') + self.assertEqual(self.handler.command, 'GET') + self.assertEqual(self.handler.path, '/') + self.assertEqual(self.handler.request_version, 'HTTP/1.0') + headers = (("Expect", "100-continue"),) + self.assertSequenceEqual(self.handler.headers.items(), headers) def test_with_continue_1_1(self): result = self.send_typical_request(b'GET / HTTP/1.1\r\nExpect: 100-continue\r\n\r\n') @@ -645,6 +661,12 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase): self.verify_expected_headers(result[2:-1]) self.verify_get_called() self.assertEqual(result[-1], b'Data\r\n') + self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1') + self.assertEqual(self.handler.command, 'GET') + self.assertEqual(self.handler.path, '/') + self.assertEqual(self.handler.request_version, 'HTTP/1.1') + headers = (("Expect", "100-continue"),) + self.assertSequenceEqual(self.handler.headers.items(), headers) def test_header_buffering_of_send_error(self): @@ -730,6 +752,7 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase): result = self.send_typical_request(b'GET ' + b'x' * 65537) self.assertEqual(result[0], b'HTTP/1.1 414 Request-URI Too Long\r\n') self.assertFalse(self.handler.get_called) + self.assertIsInstance(self.handler.requestline, str) def test_header_length(self): # Issue #6791: same for headers @@ -737,6 +760,22 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase): b'GET / HTTP/1.1\r\nX-Foo: bar' + b'r' * 65537 + b'\r\n\r\n') self.assertEqual(result[0], b'HTTP/1.1 400 Line too long\r\n') self.assertFalse(self.handler.get_called) + self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1') + + def test_close_connection(self): + # handle_one_request() should be repeatedly called until + # it sets close_connection + def handle_one_request(): + self.handler.close_connection = next(close_values) + self.handler.handle_one_request = handle_one_request + + close_values = iter((True,)) + self.handler.handle() + self.assertRaises(StopIteration, next, close_values) + + close_values = iter((False, False, True)) + self.handler.handle() + self.assertRaises(StopIteration, next, close_values) class SimpleHTTPRequestHandlerTestCase(unittest.TestCase): """ Test url parsing """ -- cgit v0.12 From 095458596d51bc5c078c0fd5679b8fcd82071419 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Wed, 18 Feb 2015 08:04:37 +0200 Subject: Issue #23474: Enhanced locale testing. --- Lib/test/test__locale.py | 99 +++++++++++++++++++++++++++++------------------- Lib/test/test_locale.py | 2 +- 2 files changed, 61 insertions(+), 40 deletions(-) diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py index 4231f37..8d1c8db 100644 --- a/Lib/test/test__locale.py +++ b/Lib/test/test__locale.py @@ -9,7 +9,6 @@ import locale import sys import unittest from platform import uname -from test.support import run_unittest if uname().system == "Darwin": maj, min, mic = [int(part) for part in uname().release.split(".")] @@ -24,45 +23,52 @@ candidate_locales = ['es_UY', 'fr_FR', 'fi_FI', 'es_CO', 'pt_PT', 'it_IT', 'da_DK', 'nn_NO', 'cs_CZ', 'de_LU', 'es_BO', 'sq_AL', 'sk_SK', 'fr_CH', 'de_DE', 'sr_YU', 'br_FR', 'nl_BE', 'sv_FI', 'pl_PL', 'fr_CA', 'fo_FO', 'bs_BA', 'fr_LU', 'kl_GL', 'fa_IR', 'de_BE', 'sv_SE', 'it_CH', 'uk_UA', - 'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'en_US', + 'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'ps_AF', 'en_US', 'es_ES.ISO8859-1', 'fr_FR.ISO8859-15', 'ru_RU.KOI8-R', 'ko_KR.eucKR'] -# Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to -# workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses -# the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is -# decoded as U+30000020 (an invalid character) by mbstowcs(). -if sys.platform == 'sunos5': - old_locale = locale.setlocale(locale.LC_ALL) - try: - locales = [] - for loc in candidate_locales: - try: - locale.setlocale(locale.LC_ALL, loc) - except Error: - continue - encoding = locale.getpreferredencoding(False) - try: - localeconv() - except Exception as err: - print("WARNING: Skip locale %s (encoding %s): [%s] %s" - % (loc, encoding, type(err), err)) - else: - locales.append(loc) - candidate_locales = locales - finally: - locale.setlocale(locale.LC_ALL, old_locale) - -# Workaround for MSVC6(debug) crash bug -if "MSC v.1200" in sys.version: - def accept(loc): - a = loc.split(".") - return not(len(a) == 2 and len(a[-1]) >= 9) - candidate_locales = [loc for loc in candidate_locales if accept(loc)] +def setUpModule(): + global candidate_locales + # Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to + # workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses + # the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is + # decoded as U+30000020 (an invalid character) by mbstowcs(). + if sys.platform == 'sunos5': + old_locale = locale.setlocale(locale.LC_ALL) + try: + locales = [] + for loc in candidate_locales: + try: + locale.setlocale(locale.LC_ALL, loc) + except Error: + continue + encoding = locale.getpreferredencoding(False) + try: + localeconv() + except Exception as err: + print("WARNING: Skip locale %s (encoding %s): [%s] %s" + % (loc, encoding, type(err), err)) + else: + locales.append(loc) + candidate_locales = locales + finally: + locale.setlocale(locale.LC_ALL, old_locale) + + # Workaround for MSVC6(debug) crash bug + if "MSC v.1200" in sys.version: + def accept(loc): + a = loc.split(".") + return not(len(a) == 2 and len(a[-1]) >= 9) + candidate_locales = [loc for loc in candidate_locales if accept(loc)] # List known locale values to test against when available. # Dict formatted as `` : (, )``. If a # value is not known, use '' . -known_numerics = {'fr_FR' : (',', ''), 'en_US':('.', ',')} +known_numerics = { + 'en_US': ('.', ','), + 'fr_FR' : (',', ' '), + 'de_DE' : (',', '.'), + 'ps_AF': ('\u066b', '\u066c'), +} class _LocaleTests(unittest.TestCase): @@ -91,10 +97,12 @@ class _LocaleTests(unittest.TestCase): calc_value, known_value, calc_type, data_type, set_locale, used_locale)) + return True @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available") def test_lc_numeric_nl_langinfo(self): # Test nl_langinfo against known values + tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) @@ -103,10 +111,14 @@ class _LocaleTests(unittest.TestCase): continue for li, lc in ((RADIXCHAR, "decimal_point"), (THOUSEP, "thousands_sep")): - self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc) + if self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc): + tested = True + if not tested: + self.skipTest('no suitable locales') def test_lc_numeric_localeconv(self): # Test localeconv against known values + tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) @@ -116,11 +128,15 @@ class _LocaleTests(unittest.TestCase): formatting = localeconv() for lc in ("decimal_point", "thousands_sep"): - self.numeric_tester('localeconv', formatting[lc], lc, loc) + if self.numeric_tester('localeconv', formatting[lc], lc, loc): + tested = True + if not tested: + self.skipTest('no suitable locales') @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available") def test_lc_numeric_basic(self): # Test nl_langinfo against localeconv + tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) @@ -140,10 +156,14 @@ class _LocaleTests(unittest.TestCase): "(set to %s, using %s)" % ( nl_radixchar, li_radixchar, loc, set_locale)) + tested = True + if not tested: + self.skipTest('no suitable locales') def test_float_parsing(self): # Bug #1391872: Test whether float parsing is okay on European # locales. + tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) @@ -162,9 +182,10 @@ class _LocaleTests(unittest.TestCase): if localeconv()['decimal_point'] != '.': self.assertRaises(ValueError, float, localeconv()['decimal_point'].join(['1', '23'])) + tested = True + if not tested: + self.skipTest('no suitable locales') -def test_main(): - run_unittest(_LocaleTests) if __name__ == '__main__': - test_main() + unittest.main() diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py index e979753..9369a25 100644 --- a/Lib/test/test_locale.py +++ b/Lib/test/test_locale.py @@ -511,7 +511,7 @@ class TestMiscellaneous(unittest.TestCase): self.skipTest('test needs Turkish locale') loc = locale.getlocale(locale.LC_CTYPE) if verbose: - print('got locale %a' % (loc,)) + print('testing with %a' % (loc,), end=' ', flush=True) locale.setlocale(locale.LC_CTYPE, loc) self.assertEqual(loc, locale.getlocale(locale.LC_CTYPE)) -- cgit v0.12 From 893cce921cf99bac08f14fb92426bd8e191e9c79 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Wed, 18 Feb 2015 08:52:46 -0500 Subject: remove RPM, since it's unused and unmaintained --- Misc/README | 1 - Misc/RPM/README | 33 ---- Misc/RPM/python-3.2.spec | 390 ----------------------------------------------- 3 files changed, 424 deletions(-) delete mode 100644 Misc/RPM/README delete mode 100644 Misc/RPM/python-3.2.spec diff --git a/Misc/README b/Misc/README index b239cee..7de627a 100644 --- a/Misc/README +++ b/Misc/README @@ -20,7 +20,6 @@ python.pc.in Package configuration info template for pkg-config python-wing*.wpr Wing IDE project file README The file you're reading now README.valgrind Information for Valgrind users, see valgrind-python.supp -RPM (Old) tools to build RPMs SpecialBuilds.txt Describes extra symbols you can set for debug builds TextMate A TextMate bundle for Python development valgrind-python.supp Valgrind suppression file, see README.valgrind diff --git a/Misc/RPM/README b/Misc/RPM/README deleted file mode 100644 index d883c95..0000000 --- a/Misc/RPM/README +++ /dev/null @@ -1,33 +0,0 @@ -This directory contains support file used to build RPM releases of -Python. Its contents are maintained by Sean Reifschneider -. - -If you wish to build RPMs from the base Python release tar-file, note -that you will have to download the -"doc//html-.tar.bz2" -file from python.org and place it into your "SOURCES" directory for -the build to complete. This is the same directory that you place the -Python-2.3.1 release tar-file in. You can then use the ".spec" file in -this directory to build RPMs. - -You may also wish to pursue RPMs provided by distribution makers to see if -they have one suitable for your uses. If, for example, you just want a -slightly newer version of Python than what the distro provides, you could -pick up the closest SRPM your distro provides, and then modify it to -the newer version, and build that. It may be as simple as just changing -the "version" information in the spec file (or it may require fixing -patches). - -NOTE: I am *NOT* recommending just using the binary RPM, and never do an -install with "--force" or "--nodeps". - -Also worth pursuing may be newer versions provided by similar distros. For -example, a Python 3 SRPM from Fedora may be a good baseline to try building -on CentOS. - -Many newer SRPMs won't install on older distros because of format changes. -You can manually extract these SRPMS with: - - mkdir foo - cd foo - rpm2cpio <../python3-*.src.rpm | cpio -ivd diff --git a/Misc/RPM/python-3.2.spec b/Misc/RPM/python-3.2.spec deleted file mode 100644 index e0f94c5..0000000 --- a/Misc/RPM/python-3.2.spec +++ /dev/null @@ -1,390 +0,0 @@ -########################## -# User-modifiable configs -########################## - -# Is the resulting package and the installed binary named "python" or -# "python2"? -#WARNING: Commenting out doesn't work. Last line is what's used. -%define config_binsuffix none -%define config_binsuffix 2.6 - -# Build tkinter? "auto" enables it if /usr/bin/wish exists. -#WARNING: Commenting out doesn't work. Last line is what's used. -%define config_tkinter no -%define config_tkinter yes -%define config_tkinter auto - -# Use pymalloc? The last line (commented or not) determines wether -# pymalloc is used. -#WARNING: Commenting out doesn't work. Last line is what's used. -%define config_pymalloc no -%define config_pymalloc yes - -# Enable IPV6? -#WARNING: Commenting out doesn't work. Last line is what's used. -%define config_ipv6 yes -%define config_ipv6 no - -# Build shared libraries or .a library? -#WARNING: Commenting out doesn't work. Last line is what's used. -%define config_sharedlib no -%define config_sharedlib yes - -# Location of the HTML directory. -%define config_htmldir /var/www/html/python - -################################# -# End of user-modifiable configs -################################# - -%define name python -#--start constants-- -%define version 3.2.6 -%define libvers 3.2 -#--end constants-- -%define release 1pydotorg -%define __prefix /usr - -# kludge to get around rpm define weirdness -%define ipv6 %(if [ "%{config_ipv6}" = yes ]; then echo --enable-ipv6; else echo --disable-ipv6; fi) -%define pymalloc %(if [ "%{config_pymalloc}" = yes ]; then echo --with-pymalloc; else echo --without-pymalloc; fi) -%define binsuffix %(if [ "%{config_binsuffix}" = none ]; then echo ; else echo "%{config_binsuffix}"; fi) -%define include_tkinter %(if [ \\( "%{config_tkinter}" = auto -a -f /usr/bin/wish \\) -o "%{config_tkinter}" = yes ]; then echo 1; else echo 0; fi) -%define libdirname %(( uname -m | egrep -q '_64$' && [ -d /usr/lib64 ] && echo lib64 ) || echo lib) -%define sharedlib %(if [ "%{config_sharedlib}" = yes ]; then echo --enable-shared; else echo ; fi) -%define include_sharedlib %(if [ "%{config_sharedlib}" = yes ]; then echo 1; else echo 0; fi) - -# detect if documentation is available -%define include_docs %(if [ -f "%{_sourcedir}/html-%{version}.tar.bz2" ]; then echo 1; else echo 0; fi) - -Summary: An interpreted, interactive, object-oriented programming language. -Name: %{name}%{binsuffix} -Version: %{version} -Release: %{release} -License: PSF -Group: Development/Languages -Source: Python-%{version}.tar.bz2 -%if %{include_docs} -Source1: html-%{version}.tar.bz2 -%endif -BuildRoot: %{_tmppath}/%{name}-%{version}-root -BuildPrereq: expat-devel -BuildPrereq: db4-devel -BuildPrereq: gdbm-devel -BuildPrereq: sqlite-devel -Prefix: %{__prefix} -Packager: Sean Reifschneider - -%description -Python is an interpreted, interactive, object-oriented programming -language. It incorporates modules, exceptions, dynamic typing, very high -level dynamic data types, and classes. Python combines remarkable power -with very clear syntax. It has interfaces to many system calls and -libraries, as well as to various window systems, and is extensible in C or -C++. It is also usable as an extension language for applications that need -a programmable interface. Finally, Python is portable: it runs on many -brands of UNIX, on PCs under Windows, MS-DOS, and OS/2, and on the -Mac. - -%package devel -Summary: The libraries and header files needed for Python extension development. -Prereq: python%{binsuffix} = %{PACKAGE_VERSION} -Group: Development/Libraries - -%description devel -The Python programming language's interpreter can be extended with -dynamically loaded extensions and can be embedded in other programs. -This package contains the header files and libraries needed to do -these types of tasks. - -Install python-devel if you want to develop Python extensions. The -python package will also need to be installed. You'll probably also -want to install the python-docs package, which contains Python -documentation. - -%if %{include_tkinter} -%package tkinter -Summary: A graphical user interface for the Python scripting language. -Group: Development/Languages -Prereq: python%{binsuffix} = %{PACKAGE_VERSION}-%{release} - -%description tkinter -The Tkinter (Tk interface) program is an graphical user interface for -the Python scripting language. - -You should install the tkinter package if you'd like to use a graphical -user interface for Python programming. -%endif - -%package tools -Summary: A collection of development tools included with Python. -Group: Development/Tools -Prereq: python%{binsuffix} = %{PACKAGE_VERSION}-%{release} - -%description tools -The Python package includes several development tools that are used -to build python programs. This package contains a selection of those -tools, including the IDLE Python IDE. - -Install python-tools if you want to use these tools to develop -Python programs. You will also need to install the python and -tkinter packages. - -%if %{include_docs} -%package docs -Summary: Python-related documentation. -Group: Development/Documentation - -%description docs -Documentation relating to the Python programming language in HTML and info -formats. -%endif - -%changelog -* Mon Dec 20 2004 Sean Reifschneider [2.4-2pydotorg] -- Changing the idle wrapper so that it passes arguments to idle. - -* Tue Oct 19 2004 Sean Reifschneider [2.4b1-1pydotorg] -- Updating to 2.4. - -* Thu Jul 22 2004 Sean Reifschneider [2.3.4-3pydotorg] -- Paul Tiemann fixes for %{prefix}. -- Adding permission changes for directory as suggested by reimeika.ca -- Adding code to detect when it should be using lib64. -- Adding a define for the location of /var/www/html for docs. - -* Thu May 27 2004 Sean Reifschneider [2.3.4-2pydotorg] -- Including changes from Ian Holsman to build under Red Hat 7.3. -- Fixing some problems with the /usr/local path change. - -* Sat Mar 27 2004 Sean Reifschneider [2.3.2-3pydotorg] -- Being more agressive about finding the paths to fix for - #!/usr/local/bin/python. - -* Sat Feb 07 2004 Sean Reifschneider [2.3.3-2pydotorg] -- Adding code to remove "#!/usr/local/bin/python" from particular files and - causing the RPM build to terminate if there are any unexpected files - which have that line in them. - -* Mon Oct 13 2003 Sean Reifschneider [2.3.2-1pydotorg] -- Adding code to detect wether documentation is available to build. - -* Fri Sep 19 2003 Sean Reifschneider [2.3.1-1pydotorg] -- Updating to the 2.3.1 release. - -* Mon Feb 24 2003 Sean Reifschneider [2.3b1-1pydotorg] -- Updating to 2.3b1 release. - -* Mon Feb 17 2003 Sean Reifschneider [2.3a1-1] -- Updating to 2.3 release. - -* Sun Dec 23 2001 Sean Reifschneider -[Release 2.2-2] -- Added -docs package. -- Added "auto" config_tkinter setting which only enables tk if - /usr/bin/wish exists. - -* Sat Dec 22 2001 Sean Reifschneider -[Release 2.2-1] -- Updated to 2.2. -- Changed the extension to "2" from "2.2". - -* Tue Nov 18 2001 Sean Reifschneider -[Release 2.2c1-1] -- Updated to 2.2c1. - -* Thu Nov 1 2001 Sean Reifschneider -[Release 2.2b1-3] -- Changed the way the sed for fixing the #! in pydoc works. - -* Wed Oct 24 2001 Sean Reifschneider -[Release 2.2b1-2] -- Fixed missing "email" package, thanks to anonymous report on sourceforge. -- Fixed missing "compiler" package. - -* Mon Oct 22 2001 Sean Reifschneider -[Release 2.2b1-1] -- Updated to 2.2b1. - -* Mon Oct 9 2001 Sean Reifschneider -[Release 2.2a4-4] -- otto@balinor.mat.unimi.it mentioned that the license file is missing. - -* Sun Sep 30 2001 Sean Reifschneider -[Release 2.2a4-3] -- Ignacio Vazquez-Abrams pointed out that I had a spruious double-quote in - the spec files. Thanks. - -* Wed Jul 25 2001 Sean Reifschneider -[Release 2.2a1-1] -- Updated to 2.2a1 release. -- Changed idle and pydoc to use binsuffix macro - -####### -# PREP -####### -%prep -%setup -n Python-%{version} - -######## -# BUILD -######## -%build -echo "Setting for ipv6: %{ipv6}" -echo "Setting for pymalloc: %{pymalloc}" -echo "Setting for binsuffix: %{binsuffix}" -echo "Setting for include_tkinter: %{include_tkinter}" -echo "Setting for libdirname: %{libdirname}" -echo "Setting for sharedlib: %{sharedlib}" -echo "Setting for include_sharedlib: %{include_sharedlib}" -./configure --enable-unicode=ucs4 %{sharedlib} %{ipv6} %{pymalloc} --prefix=%{__prefix} -make - -########## -# INSTALL -########## -%install -# set the install path -echo '[install_scripts]' >setup.cfg -echo 'install_dir='"${RPM_BUILD_ROOT}%{__prefix}/bin" >>setup.cfg - -[ -d "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != "/" ] && rm -rf $RPM_BUILD_ROOT -mkdir -p $RPM_BUILD_ROOT%{__prefix}/%{libdirname}/python%{libvers}/lib-dynload -make prefix=$RPM_BUILD_ROOT%{__prefix} install - -# REPLACE PATH IN PYDOC -if [ ! -z "%{binsuffix}" ] -then - ( - cd $RPM_BUILD_ROOT%{__prefix}/bin - mv pydoc pydoc.old - sed 's|#!.*|#!%{__prefix}/bin/env python'%{binsuffix}'|' \ - pydoc.old >pydoc - chmod 755 pydoc - rm -f pydoc.old - ) -fi - -# add the binsuffix -if [ ! -z "%{binsuffix}" ] -then - rm -f $RPM_BUILD_ROOT%{__prefix}/bin/python[0-9a-zA-Z]* - ( cd $RPM_BUILD_ROOT%{__prefix}/bin; - for file in *; do mv "$file" "$file"%{binsuffix}; done ) - ( cd $RPM_BUILD_ROOT%{_mandir}/man1; mv python.1 python%{binsuffix}.1 ) -fi - -######## -# Tools -echo '#!%{__prefix}/bin/env python%{binsuffix}' >${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix} -echo 'import os, sys' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix} -echo 'os.execvp("%{__prefix}/bin/python%{binsuffix}", ["%{__prefix}/bin/python%{binsuffix}", "%{__prefix}/lib/python%{libvers}/idlelib/idle.py"] + sys.argv[1:])' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix} -echo 'print "Failed to exec Idle"' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix} -echo 'sys.exit(1)' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix} -chmod 755 $RPM_BUILD_ROOT%{__prefix}/bin/idle%{binsuffix} -cp -a Tools $RPM_BUILD_ROOT%{__prefix}/%{libdirname}/python%{libvers} - -# MAKE FILE LISTS -rm -f mainpkg.files -find "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers} -type f | - sed "s|^${RPM_BUILD_ROOT}|/|" | - grep -v -e '/python%{libvers}/config$' -e '_tkinter.so$' >mainpkg.files -find "$RPM_BUILD_ROOT""%{__prefix}"/bin -type f -o -type l | - sed "s|^${RPM_BUILD_ROOT}|/|" | - grep -v -e '/bin/2to3%{binsuffix}$' | - grep -v -e '/bin/pydoc%{binsuffix}$' | - grep -v -e '/bin/smtpd.py%{binsuffix}$' | - grep -v -e '/bin/idle%{binsuffix}$' >>mainpkg.files - -rm -f tools.files -find "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers}/idlelib \ - "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers}/Tools -type f | - sed "s|^${RPM_BUILD_ROOT}|/|" >tools.files -echo "%{__prefix}"/bin/2to3%{binsuffix} >>tools.files -echo "%{__prefix}"/bin/pydoc%{binsuffix} >>tools.files -echo "%{__prefix}"/bin/smtpd.py%{binsuffix} >>tools.files -echo "%{__prefix}"/bin/idle%{binsuffix} >>tools.files - -###### -# Docs -%if %{include_docs} -mkdir -p "$RPM_BUILD_ROOT"%{config_htmldir} -( - cd "$RPM_BUILD_ROOT"%{config_htmldir} - bunzip2 < %{SOURCE1} | tar x -) -%endif - -# fix the #! line in installed files -find "$RPM_BUILD_ROOT" -type f -print0 | - xargs -0 grep -l /usr/local/bin/python | while read file -do - FIXFILE="$file" - sed 's|^#!.*python|#!%{__prefix}/bin/env python'"%{binsuffix}"'|' \ - "$FIXFILE" >/tmp/fix-python-path.$$ - cat /tmp/fix-python-path.$$ >"$FIXFILE" - rm -f /tmp/fix-python-path.$$ -done - -# check to see if there are any straggling #! lines -find "$RPM_BUILD_ROOT" -type f | xargs egrep -n '^#! */usr/local/bin/python' \ - | grep ':1:#!' >/tmp/python-rpm-files.$$ || true -if [ -s /tmp/python-rpm-files.$$ ] -then - echo '*****************************************************' - cat /tmp/python-rpm-files.$$ - cat <<@EOF - ***************************************************** - There are still files referencing /usr/local/bin/python in the - install directory. They are listed above. Please fix the .spec - file and try again. If you are an end-user, you probably want - to report this to jafo-rpms@tummy.com as well. - ***************************************************** -@EOF - rm -f /tmp/python-rpm-files.$$ - exit 1 -fi -rm -f /tmp/python-rpm-files.$$ - -######## -# CLEAN -######## -%clean -[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf $RPM_BUILD_ROOT -rm -f mainpkg.files tools.files - -######## -# FILES -######## -%files -f mainpkg.files -%defattr(-,root,root) -%doc Misc/README Misc/cheatsheet Misc/Porting -%doc LICENSE Misc/ACKS Misc/HISTORY Misc/NEWS -%{_mandir}/man1/python%{binsuffix}.1* - -%attr(755,root,root) %dir %{__prefix}/include/python%{libvers} -%attr(755,root,root) %dir %{__prefix}/%{libdirname}/python%{libvers}/ -%if %{include_sharedlib} -%{__prefix}/%{libdirname}/libpython* -%endif - -%files devel -%defattr(-,root,root) -%{__prefix}/include/python%{libvers}/*.h -%{__prefix}/%{libdirname}/python%{libvers}/config - -%files -f tools.files tools -%defattr(-,root,root) - -%if %{include_tkinter} -%files tkinter -%defattr(-,root,root) -%{__prefix}/%{libdirname}/python%{libvers}/tkinter -%{__prefix}/%{libdirname}/python%{libvers}/lib-dynload/_tkinter.so* -%endif - -%if %{include_docs} -%files docs -%defattr(-,root,root) -%{config_htmldir}/* -%endif -- cgit v0.12 From 500af332f407d9a83a0232e4018e10cb49a76a79 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Thu, 19 Feb 2015 17:57:08 -0500 Subject: remove rc4 from the default client ciphers (closes #23481) --- Lib/ssl.py | 6 ++---- Misc/NEWS | 2 ++ 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/ssl.py b/Lib/ssl.py index 8c75f00..72115e4 100644 --- a/Lib/ssl.py +++ b/Lib/ssl.py @@ -170,14 +170,12 @@ else: # * Prefer any AES-GCM over any AES-CBC for better performance and security # * Then Use HIGH cipher suites as a fallback # * Then Use 3DES as fallback which is secure but slow -# * Finally use RC4 as a fallback which is problematic but needed for -# compatibility some times. # * Disable NULL authentication, NULL encryption, and MD5 MACs for security # reasons _DEFAULT_CIPHERS = ( 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' - 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:' - 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5' + 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' + '!eNULL:!MD5' ) # Restricted and more secure ciphers for the server side diff --git a/Misc/NEWS b/Misc/NEWS index 3f50594..5e69549 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,8 @@ Core and Builtins Library ------- +- Issue #23481: Remove RC4 from the SSL module's default cipher list. + - Issue #21548: Fix pydoc.synopsis() and pydoc.apropos() on modules with empty docstrings. -- cgit v0.12 From 57c616f1e43f8c65a48fb6f1e85d040e8f929779 Mon Sep 17 00:00:00 2001 From: Zachary Ware Date: Thu, 19 Feb 2015 22:15:36 -0600 Subject: Fix a typo pointed out on docs@ --- Doc/reference/expressions.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index d0682ca..1a5088a 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -1067,7 +1067,7 @@ Comparison of objects of the same type depends on the type: * Numbers are compared arithmetically. * The values :const:`float('NaN')` and :const:`Decimal('NaN')` are special. - The are identical to themselves, ``x is x`` but are not equal to themselves, + They are identical to themselves, ``x is x`` but are not equal to themselves, ``x != x``. Additionally, comparing any value to a not-a-number value will return ``False``. For example, both ``3 < float('NaN')`` and ``float('NaN') < 3`` will return ``False``. -- cgit v0.12 From 56dee1e463d2a9962aca0c577a5782a89cce3dbb Mon Sep 17 00:00:00 2001 From: Zachary Ware Date: Thu, 19 Feb 2015 22:30:15 -0600 Subject: Fix typo pointed out on docs@ by Yaniv Sayeh --- Doc/library/copyreg.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst index 50d5879..18306c7 100644 --- a/Doc/library/copyreg.rst +++ b/Doc/library/copyreg.rst @@ -9,7 +9,7 @@ module: pickle module: copy -The :mod:`copyreg` module offers a way to define fuctions used while pickling +The :mod:`copyreg` module offers a way to define functions used while pickling specific objects. The :mod:`pickle` and :mod:`copy` modules use those functions when pickling/copying those objects. The module provides configuration information about object constructors which are not classes. -- cgit v0.12 From babc688180ac9214fcc217ef906b8d11c1babe36 Mon Sep 17 00:00:00 2001 From: Berker Peksag Date: Fri, 20 Feb 2015 09:39:38 +0200 Subject: Issue #23439: Add missing entries to http.client.__all__. Also, document the LineTooLong exception since it can be raised by the members of public API (e.g. http.client.HTTPResponse). Patch by Martin Panter. --- Doc/library/http.client.rst | 6 ++++++ Lib/http/client.py | 4 +++- Lib/test/test_httplib.py | 15 +++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst index a3f2e35..b6e78b5 100644 --- a/Doc/library/http.client.rst +++ b/Doc/library/http.client.rst @@ -169,6 +169,12 @@ The following exceptions are raised as appropriate: status code that we don't understand. +.. exception:: LineTooLong + + A subclass of :exc:`HTTPException`. Raised if an excessively long line + is received in the HTTP protocol from the server. + + The constants defined in this module are: .. data:: HTTP_PORT diff --git a/Lib/http/client.py b/Lib/http/client.py index 6de4b0e..d3d9b30 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -74,12 +74,14 @@ import socket import collections from urllib.parse import urlsplit +# HTTPMessage, parse_headers(), and the HTTP status code constants are +# intentionally omitted for simplicity __all__ = ["HTTPResponse", "HTTPConnection", "HTTPException", "NotConnected", "UnknownProtocol", "UnknownTransferEncoding", "UnimplementedFileMode", "IncompleteRead", "InvalidURL", "ImproperConnectionState", "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", - "BadStatusLine", "error", "responses"] + "BadStatusLine", "LineTooLong", "error", "responses"] HTTP_PORT = 80 HTTPS_PORT = 443 diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index 3fc3466..d0a0e8d 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -708,7 +708,22 @@ class BasicTest(TestCase): self.assertTrue(response.closed) self.assertTrue(conn.sock.file_closed) + class OfflineTest(TestCase): + def test_all(self): + # Documented objects defined in the module should be in __all__ + expected = {"responses"} # White-list documented dict() object + # HTTPMessage, parse_headers(), and the HTTP status code constants are + # intentionally omitted for simplicity + blacklist = {"HTTPMessage", "parse_headers"} + for name in dir(client): + if name in blacklist: + continue + module_object = getattr(client, name) + if getattr(module_object, "__module__", None) == "http.client": + expected.add(name) + self.assertCountEqual(client.__all__, expected) + def test_responses(self): self.assertEqual(client.responses[client.NOT_FOUND], "Not Found") -- cgit v0.12 From e55181f517bbfc875065ce86ed3e05cf0e0246fa Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Fri, 20 Feb 2015 21:34:06 +0200 Subject: Issue #23490: Fixed possible crashes related to interoperability between old-style and new API for string with 2**30-1 characters. --- Objects/unicodeobject.c | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 2ffa55b..d8f1ce1 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1535,6 +1535,10 @@ _PyUnicode_Ready(PyObject *unicode) /* in case the native representation is 2-bytes, we need to allocate a new normalized 4-byte version. */ length_wo_surrogates = _PyUnicode_WSTR_LENGTH(unicode) - num_surrogates; + if (length_wo_surrogates > PY_SSIZE_T_MAX / 4 - 1) { + PyErr_NoMemory(); + return -1; + } _PyUnicode_DATA_ANY(unicode) = PyObject_MALLOC(4 * (length_wo_surrogates + 1)); if (!_PyUnicode_DATA_ANY(unicode)) { PyErr_NoMemory(); @@ -3846,6 +3850,11 @@ PyUnicode_AsUnicodeAndSize(PyObject *unicode, Py_ssize_t *size) #endif } else { + if ((size_t)_PyUnicode_LENGTH(unicode) > + PY_SSIZE_T_MAX / sizeof(wchar_t) - 1) { + PyErr_NoMemory(); + return NULL; + } _PyUnicode_WSTR(unicode) = (wchar_t *) PyObject_MALLOC(sizeof(wchar_t) * (_PyUnicode_LENGTH(unicode) + 1)); if (!_PyUnicode_WSTR(unicode)) { -- cgit v0.12 From 5e3d7a401d8568d3308b8928b146a9c306bc3ca8 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Fri, 20 Feb 2015 23:46:06 +0200 Subject: Issue #23374: Fixed pydoc failure with non-ASCII files when stdout encoding differs from file system encoding (e.g. on Mac OS). --- Lib/pydoc.py | 22 +++++++++++++--------- Lib/test/test_pydoc.py | 9 +++++++++ Misc/NEWS | 3 +++ 3 files changed, 25 insertions(+), 9 deletions(-) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index d37ebf1..8bbebc9 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1407,9 +1407,6 @@ class _PlainTextDoc(TextDoc): def pager(text): """The first time this is called, determine what kind of pager to use.""" global pager - # Escape non-encodable characters to avoid encoding errors later - encoding = sys.getfilesystemencoding() - text = text.encode(encoding, 'backslashreplace').decode(encoding) pager = getpager() pager(text) @@ -1452,10 +1449,12 @@ def plain(text): def pipepager(text, cmd): """Page through text by feeding it to another program.""" - pipe = os.popen(cmd, 'w') + import subprocess + proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE) try: - pipe.write(text) - pipe.close() + with proc: + with io.TextIOWrapper(proc.stdin, errors='backslashreplace') as pipe: + pipe.write(text) except OSError: pass # Ignore broken pipes caused by quitting the pager program. @@ -1463,16 +1462,21 @@ def tempfilepager(text, cmd): """Page through text by invoking a program on a temporary file.""" import tempfile filename = tempfile.mktemp() - with open(filename, 'w') as file: + with open(filename, 'w', errors='backslashreplace') as file: file.write(text) try: os.system(cmd + ' "' + filename + '"') finally: os.unlink(filename) +def _escape_stdout(text): + # Escape non-encodable characters to avoid encoding errors later + encoding = getattr(sys.stdout, 'encoding', None) or 'utf-8' + return text.encode(encoding, 'backslashreplace').decode(encoding) + def ttypager(text): """Page through text on a text terminal.""" - lines = plain(text).split('\n') + lines = plain(_escape_stdout(text)).split('\n') try: import tty fd = sys.stdin.fileno() @@ -1516,7 +1520,7 @@ def ttypager(text): def plainpager(text): """Simply print unformatted text. This is the ultimate fallback.""" - sys.stdout.write(plain(text)) + sys.stdout.write(plain(_escape_stdout(text))) def describe(thing): """Produce a short description of the given thing.""" diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index 1427c77..6a44c22 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -35,6 +35,10 @@ try: except ImportError: threading = None +class nonascii: + 'Це не латиниця' + pass + if test.support.HAVE_DOCSTRINGS: expected_data_docstrings = ( 'dictionary for instance variables (if defined)', @@ -474,6 +478,11 @@ class PydocDocTest(unittest.TestCase): self.assertEqual(expected, result, "documentation for missing module found") + def test_not_ascii(self): + result = run_pydoc('test.test_pydoc.nonascii', PYTHONIOENCODING='ascii') + encoded = nonascii.__doc__.encode('ascii', 'backslashreplace') + self.assertIn(encoded, result) + def test_input_strip(self): missing_module = " test.i_am_not_here " result = str(run_pydoc(missing_module), 'ascii') diff --git a/Misc/NEWS b/Misc/NEWS index 5e69549..83518d24 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,9 @@ Core and Builtins Library ------- +- Issue #23374: Fixed pydoc failure with non-ASCII files when stdout encoding + differs from file system encoding (e.g. on Mac OS). + - Issue #23481: Remove RC4 from the SSL module's default cipher list. - Issue #21548: Fix pydoc.synopsis() and pydoc.apropos() on modules with empty -- cgit v0.12 From a3712a9a6c9a05de287d2403cdb5aecbc417ce93 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sat, 21 Feb 2015 00:35:09 +0200 Subject: Issue #5700: io.FileIO() called flush() after closing the file. flush() was not called in close() if closefd=False. --- Lib/test/test_io.py | 50 ++++++++++++++++++++++++++++++++++++++++++++++++-- Misc/NEWS | 3 +++ Modules/_io/fileio.c | 21 ++++++++++++++------- 3 files changed, 65 insertions(+), 9 deletions(-) diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index a424f76..79cd87b 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -593,13 +593,43 @@ class IOTest(unittest.TestCase): with self.open(zero, "r") as f: self.assertRaises(OverflowError, f.read) - def test_flush_error_on_close(self): - f = self.open(support.TESTFN, "wb", buffering=0) + def check_flush_error_on_close(self, *args, **kwargs): + # Test that the file is closed despite failed flush + # and that flush() is called before file closed. + f = self.open(*args, **kwargs) + closed = [] def bad_flush(): + closed[:] = [f.closed] raise OSError() f.flush = bad_flush self.assertRaises(OSError, f.close) # exception not swallowed self.assertTrue(f.closed) + self.assertTrue(closed) # flush() called + self.assertFalse(closed[0]) # flush() called before file closed + + def test_flush_error_on_close(self): + # raw file + # Issue #5700: io.FileIO calls flush() after file closed + self.check_flush_error_on_close(support.TESTFN, 'wb', buffering=0) + fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) + self.check_flush_error_on_close(fd, 'wb', buffering=0) + fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) + self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False) + os.close(fd) + # buffered io + self.check_flush_error_on_close(support.TESTFN, 'wb') + fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) + self.check_flush_error_on_close(fd, 'wb') + fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) + self.check_flush_error_on_close(fd, 'wb', closefd=False) + os.close(fd) + # text io + self.check_flush_error_on_close(support.TESTFN, 'w') + fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) + self.check_flush_error_on_close(fd, 'w') + fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) + self.check_flush_error_on_close(fd, 'w', closefd=False) + os.close(fd) def test_multi_close(self): f = self.open(support.TESTFN, "wb", buffering=0) @@ -788,13 +818,21 @@ class CommonBufferedTests: self.assertEqual(repr(b), "<%s name=b'dummy'>" % clsname) def test_flush_error_on_close(self): + # Test that buffered file is closed despite failed flush + # and that flush() is called before file closed. raw = self.MockRawIO() + closed = [] def bad_flush(): + closed[:] = [b.closed, raw.closed] raise OSError() raw.flush = bad_flush b = self.tp(raw) self.assertRaises(OSError, b.close) # exception not swallowed self.assertTrue(b.closed) + self.assertTrue(raw.closed) + self.assertTrue(closed) # flush() called + self.assertFalse(closed[0]) # flush() called before file closed + self.assertFalse(closed[1]) def test_close_error_on_close(self): raw = self.MockRawIO() @@ -2618,12 +2656,20 @@ class TextIOWrapperTest(unittest.TestCase): self.assertEqual(content.count("Thread%03d\n" % n), 1) def test_flush_error_on_close(self): + # Test that text file is closed despite failed flush + # and that flush() is called before file closed. txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") + closed = [] def bad_flush(): + closed[:] = [txt.closed, txt.buffer.closed] raise OSError() txt.flush = bad_flush self.assertRaises(OSError, txt.close) # exception not swallowed self.assertTrue(txt.closed) + self.assertTrue(txt.buffer.closed) + self.assertTrue(closed) # flush() called + self.assertFalse(closed[0]) # flush() called before file closed + self.assertFalse(closed[1]) def test_close_error_on_close(self): buffer = self.BytesIO(self.testdata) diff --git a/Misc/NEWS b/Misc/NEWS index 83518d24..bcabefa 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,9 @@ Core and Builtins Library ------- +- Issue #5700: io.FileIO() called flush() after closing the file. + flush() was not called in close() if closefd=False. + - Issue #23374: Fixed pydoc failure with non-ASCII files when stdout encoding differs from file system encoding (e.g. on Mac OS). diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index a2b253b..80ca99c 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -126,11 +126,18 @@ internal_close(fileio *self) static PyObject * fileio_close(fileio *self) { + PyObject *res; + PyObject *exc, *val, *tb; + int rc; _Py_IDENTIFIER(close); + res = _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type, + &PyId_close, "O", self); if (!self->closefd) { self->fd = -1; - Py_RETURN_NONE; + return res; } + if (res == NULL) + PyErr_Fetch(&exc, &val, &tb); if (self->finalizing) { PyObject *r = fileio_dealloc_warn(self, (PyObject *) self); if (r) @@ -138,12 +145,12 @@ fileio_close(fileio *self) else PyErr_Clear(); } - errno = internal_close(self); - if (errno < 0) - return NULL; - - return _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type, - &PyId_close, "O", self); + rc = internal_close(self); + if (res == NULL) + _PyErr_ChainExceptions(exc, val, tb); + if (rc < 0) + Py_CLEAR(res); + return res; } static PyObject * -- cgit v0.12 From a1543cdcd65d9a2be302be0da0cfb9c53c17f806 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sat, 21 Feb 2015 01:19:58 +0200 Subject: Issue #23215: Multibyte codecs with custom error handlers that ignores errors consumed too much memory and raised SystemError or MemoryError. Original patch by Aleksi Torhamo. --- Lib/test/test_multibytecodec.py | 7 +++++++ Misc/NEWS | 4 ++++ Modules/cjkcodecs/multibytecodec.c | 19 +++++++++++-------- 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_multibytecodec.py b/Lib/test/test_multibytecodec.py index ce267dd..2929f98 100644 --- a/Lib/test/test_multibytecodec.py +++ b/Lib/test/test_multibytecodec.py @@ -44,6 +44,13 @@ class Test_MultibyteCodec(unittest.TestCase): self.assertRaises(IndexError, dec, b'apple\x92ham\x93spam', 'test.cjktest') + def test_errorcallback_custom_ignore(self): + # Issue #23215: MemoryError with custom error handlers and multibyte codecs + data = 100 * "\udc00" + codecs.register_error("test.ignore", codecs.ignore_errors) + for enc in ALL_CJKENCODINGS: + self.assertEqual(data.encode(enc, "test.ignore"), b'') + def test_codingspec(self): try: for enc in ALL_CJKENCODINGS: diff --git a/Misc/NEWS b/Misc/NEWS index bcabefa..f1426d2 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,10 @@ Core and Builtins Library ------- +- Issue #23215: Multibyte codecs with custom error handlers that ignores errors + consumed too much memory and raised SystemError or MemoryError. + Original patch by Aleksi Torhamo. + - Issue #5700: io.FileIO() called flush() after closing the file. flush() was not called in close() if closefd=False. diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 087ae9b..435529f 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -182,8 +182,10 @@ expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize) orgsize = PyBytes_GET_SIZE(buf->outobj); incsize = (esize < (orgsize >> 1) ? (orgsize >> 1) | 1 : esize); - if (orgsize > PY_SSIZE_T_MAX - incsize) + if (orgsize > PY_SSIZE_T_MAX - incsize) { + PyErr_NoMemory(); return -1; + } if (_PyBytes_Resize(&buf->outobj, orgsize + incsize) == -1) return -1; @@ -194,11 +196,11 @@ expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize) return 0; } -#define REQUIRE_ENCODEBUFFER(buf, s) { \ - if ((s) < 1 || (buf)->outbuf + (s) > (buf)->outbuf_end) \ +#define REQUIRE_ENCODEBUFFER(buf, s) do { \ + if ((s) < 0 || (s) > (buf)->outbuf_end - (buf)->outbuf) \ if (expand_encodebuffer(buf, s) == -1) \ goto errorexit; \ -} +} while(0) /** @@ -332,10 +334,11 @@ multibytecodec_encerror(MultibyteCodec *codec, assert(PyBytes_Check(retstr)); retstrsize = PyBytes_GET_SIZE(retstr); - REQUIRE_ENCODEBUFFER(buf, retstrsize); - - memcpy(buf->outbuf, PyBytes_AS_STRING(retstr), retstrsize); - buf->outbuf += retstrsize; + if (retstrsize > 0) { + REQUIRE_ENCODEBUFFER(buf, retstrsize); + memcpy(buf->outbuf, PyBytes_AS_STRING(retstr), retstrsize); + buf->outbuf += retstrsize; + } newpos = PyLong_AsSsize_t(PyTuple_GET_ITEM(retobj, 1)); if (newpos < 0 && !PyErr_Occurred()) -- cgit v0.12 From a3369a524c6f862936c1d8a24e23c796aa91f2c7 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sat, 21 Feb 2015 12:08:52 +0200 Subject: Issues #814253, #9179: Warnings now are raised when group references and conditional group references are used in lookbehind assertions in regular expressions. --- Doc/library/re.rst | 7 +++++-- Lib/sre_parse.py | 20 ++++++++++++++++++++ Lib/test/test_re.py | 33 ++++++++++++++++++++++++++++++++- Misc/NEWS | 4 ++++ 4 files changed, 61 insertions(+), 3 deletions(-) diff --git a/Doc/library/re.rst b/Doc/library/re.rst index dfc25ea..c3c8b65 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -281,7 +281,9 @@ The special characters are: assertion`. ``(?<=abc)def`` will find a match in ``abcdef``, since the lookbehind will back up 3 characters and check if the contained pattern matches. The contained pattern must only match strings of some fixed length, meaning that - ``abc`` or ``a|b`` are allowed, but ``a*`` and ``a{3,4}`` are not. Note that + ``abc`` or ``a|b`` are allowed, but ``a*`` and ``a{3,4}`` are not. Group + references are not supported even if they match strings of some fixed length. + Note that patterns which start with positive lookbehind assertions will not match at the beginning of the string being searched; you will most likely want to use the :func:`search` function rather than the :func:`match` function: @@ -301,7 +303,8 @@ The special characters are: Matches if the current position in the string is not preceded by a match for ``...``. This is called a :dfn:`negative lookbehind assertion`. Similar to positive lookbehind assertions, the contained pattern must only match strings of - some fixed length. Patterns which start with negative lookbehind assertions may + some fixed length and shouldn't contain group references. + Patterns which start with negative lookbehind assertions may match at the beginning of the string being searched. ``(?(id/name)yes-pattern|no-pattern)`` diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py index b56d437..df1e643 100644 --- a/Lib/sre_parse.py +++ b/Lib/sre_parse.py @@ -69,6 +69,8 @@ class Pattern: self.open = [] self.groups = 1 self.groupdict = {} + self.lookbehind = 0 + def opengroup(self, name=None): gid = self.groups self.groups = gid + 1 @@ -352,6 +354,11 @@ def _escape(source, escape, state): if group < state.groups: if not state.checkgroup(group): raise error("cannot refer to open group") + if state.lookbehind: + import warnings + warnings.warn('group references in lookbehind ' + 'assertions are not supported', + RuntimeWarning) return GROUPREF, group raise ValueError if len(escape) == 2: @@ -630,6 +637,11 @@ def _parse(source, state): if gid is None: msg = "unknown group name: {0!r}".format(name) raise error(msg) + if state.lookbehind: + import warnings + warnings.warn('group references in lookbehind ' + 'assertions are not supported', + RuntimeWarning) subpatternappend((GROUPREF, gid)) continue else: @@ -658,7 +670,10 @@ def _parse(source, state): raise error("syntax error") dir = -1 # lookbehind char = sourceget() + state.lookbehind += 1 p = _parse_sub(source, state) + if dir < 0: + state.lookbehind -= 1 if not sourcematch(")"): raise error("unbalanced parenthesis") if char == "=": @@ -689,6 +704,11 @@ def _parse(source, state): condgroup = int(condname) except ValueError: raise error("bad character in group name") + if state.lookbehind: + import warnings + warnings.warn('group references in lookbehind ' + 'assertions are not supported', + RuntimeWarning) else: # flags if not source.next in FLAGS: diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index d2547d4..7348af3 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -557,7 +557,7 @@ class ReTests(unittest.TestCase): self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0), "a\n\nb") - def test_non_consuming(self): + def test_lookahead(self): self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a") self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a") self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a") @@ -571,6 +571,37 @@ class ReTests(unittest.TestCase): self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a") self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a") + # Group reference. + self.assertTrue(re.match(r'(a)b(?=\1)a', 'aba')) + self.assertIsNone(re.match(r'(a)b(?=\1)c', 'abac')) + # Named group reference. + self.assertTrue(re.match(r'(?Pa)b(?=(?P=g))a', 'aba')) + self.assertIsNone(re.match(r'(?Pa)b(?=(?P=g))c', 'abac')) + # Conditional group reference. + self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc')) + self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(2)c|x))c', 'abc')) + self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc')) + self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(1)b|x))c', 'abc')) + self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(1)c|x))c', 'abc')) + # Group used before defined. + self.assertTrue(re.match(r'(a)b(?=(?(2)x|c))(c)', 'abc')) + self.assertIsNone(re.match(r'(a)b(?=(?(2)b|x))(c)', 'abc')) + self.assertTrue(re.match(r'(a)b(?=(?(1)c|x))(c)', 'abc')) + + def test_lookbehind(self): + self.assertTrue(re.match(r'ab(?<=b)c', 'abc')) + self.assertIsNone(re.match(r'ab(?<=c)c', 'abc')) + self.assertIsNone(re.match(r'ab(?a)a(?<=(?P=g))c') + # Conditional group reference. + self.assertWarns(RuntimeWarning, re.compile, r'(a)b(?<=(?(1)b|x))c') + # Group used before defined. + self.assertWarns(RuntimeWarning, re.compile, r'(a)b(?<=(?(2)b|x))(c)') + def test_ignore_case(self): self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC") self.assertEqual(re.match(b"abc", b"ABC", re.I).group(0), b"ABC") diff --git a/Misc/NEWS b/Misc/NEWS index f1426d2..37ff72e 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,10 @@ Core and Builtins Library ------- +- Issues #814253, #9179: Warnings now are raised when group references and + conditional group references are used in lookbehind assertions in regular + expressions. + - Issue #23215: Multibyte codecs with custom error handlers that ignores errors consumed too much memory and raised SystemError or MemoryError. Original patch by Aleksi Torhamo. -- cgit v0.12 From 80a1803193bcaef7a50324974117c3aa3fb0e48f Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 22 Feb 2015 17:25:33 +0200 Subject: Issue #6639: Module-level turtle functions no longer raise TclError after closing the window. --- Lib/turtle.py | 67 +++++++++++++++++++++++----------------------- Lib/turtledemo/__main__.py | 4 +++ Misc/NEWS | 3 +++ 3 files changed, 40 insertions(+), 34 deletions(-) diff --git a/Lib/turtle.py b/Lib/turtle.py index f4400c9..cbd4f47 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -1288,7 +1288,7 @@ class TurtleScreen(TurtleScreenBase): def _incrementudc(self): """Increment update counter.""" if not TurtleScreen._RUNNING: - TurtleScreen._RUNNNING = True + TurtleScreen._RUNNING = True raise Terminator if self._tracing > 0: self._updatecounter += 1 @@ -3754,7 +3754,7 @@ class _Screen(TurtleScreen): Turtle._screen = None _Screen._root = None _Screen._canvas = None - TurtleScreen._RUNNING = True + TurtleScreen._RUNNING = False root.destroy() def bye(self): @@ -3795,7 +3795,6 @@ class _Screen(TurtleScreen): except AttributeError: exit(0) - class Turtle(RawTurtle): """RawTurtle auto-creating (scrolled) canvas. @@ -3818,18 +3817,6 @@ class Turtle(RawTurtle): Pen = Turtle -def _getpen(): - """Create the 'anonymous' turtle if not already present.""" - if Turtle._pen is None: - Turtle._pen = Turtle() - return Turtle._pen - -def _getscreen(): - """Create a TurtleScreen if not already present.""" - if Turtle._screen is None: - Turtle._screen = Screen() - return Turtle._screen - def write_docstringdict(filename="turtle_docstringdict"): """Create and write docstring-dictionary to file. @@ -3952,26 +3939,38 @@ def _screen_docrevise(docstr): ## as functions. So we can enhance, change, add, delete methods to these ## classes and do not need to change anything here. +__func_body = """\ +def {name}{paramslist}: + if {obj} is None: + if not TurtleScreen._RUNNING: + TurtleScreen._RUNNING = True + raise Terminator + {obj} = {init} + try: + return {obj}.{name}{argslist} + except TK.TclError: + if not TurtleScreen._RUNNING: + TurtleScreen._RUNNING = True + raise Terminator + raise +""" -for methodname in _tg_screen_functions: - pl1, pl2 = getmethparlist(eval('_Screen.' + methodname)) - if pl1 == "": - print(">>>>>>", pl1, pl2) - continue - defstr = ("def %(key)s%(pl1)s: return _getscreen().%(key)s%(pl2)s" % - {'key':methodname, 'pl1':pl1, 'pl2':pl2}) - exec(defstr) - eval(methodname).__doc__ = _screen_docrevise(eval('_Screen.'+methodname).__doc__) - -for methodname in _tg_turtle_functions: - pl1, pl2 = getmethparlist(eval('Turtle.' + methodname)) - if pl1 == "": - print(">>>>>>", pl1, pl2) - continue - defstr = ("def %(key)s%(pl1)s: return _getpen().%(key)s%(pl2)s" % - {'key':methodname, 'pl1':pl1, 'pl2':pl2}) - exec(defstr) - eval(methodname).__doc__ = _turtle_docrevise(eval('Turtle.'+methodname).__doc__) +def _make_global_funcs(functions, cls, obj, init, docrevise): + for methodname in functions: + method = getattr(cls, methodname) + pl1, pl2 = getmethparlist(method) + if pl1 == "": + print(">>>>>>", pl1, pl2) + continue + defstr = __func_body.format(obj=obj, init=init, name=methodname, + paramslist=pl1, argslist=pl2) + exec(defstr, globals()) + globals()[methodname].__doc__ = docrevise(method.__doc__) + +_make_global_funcs(_tg_screen_functions, _Screen, + 'Turtle._screen', 'Screen()', _screen_docrevise) +_make_global_funcs(_tg_turtle_functions, Turtle, + 'Turtle._pen', 'Turtle()', _turtle_docrevise) done = mainloop diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py index 6280c84..106d058 100755 --- a/Lib/turtledemo/__main__.py +++ b/Lib/turtledemo/__main__.py @@ -344,6 +344,8 @@ class DemoWindow(object): else: self.state = DONE except turtle.Terminator: + if self.root is None: + return self.state = DONE result = "stopped!" if self.state == DONE: @@ -369,7 +371,9 @@ class DemoWindow(object): turtle.TurtleScreen._RUNNING = False def _destroy(self): + turtle.TurtleScreen._RUNNING = False self.root.destroy() + self.root = None def main(): diff --git a/Misc/NEWS b/Misc/NEWS index 37ff72e..826eab1 100644 --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,9 @@ Core and Builtins Library ------- +- Issue #6639: Module-level turtle functions no longer raise TclError after + closing the window. + - Issues #814253, #9179: Warnings now are raised when group references and conditional group references are used in lookbehind assertions in regular expressions. -- cgit v0.12 From c26a1a490fd290fb40bf00a8014834484aeafdd4 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 23 Feb 2015 00:28:38 +0200 Subject: Broke reference loops in tests added in issue #5700. --- Lib/test/test_io.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 79cd87b..668b023 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -606,6 +606,7 @@ class IOTest(unittest.TestCase): self.assertTrue(f.closed) self.assertTrue(closed) # flush() called self.assertFalse(closed[0]) # flush() called before file closed + f.flush = lambda: None # break reference loop def test_flush_error_on_close(self): # raw file @@ -833,6 +834,7 @@ class CommonBufferedTests: self.assertTrue(closed) # flush() called self.assertFalse(closed[0]) # flush() called before file closed self.assertFalse(closed[1]) + raw.flush = lambda: None # break reference loop def test_close_error_on_close(self): raw = self.MockRawIO() @@ -2670,6 +2672,7 @@ class TextIOWrapperTest(unittest.TestCase): self.assertTrue(closed) # flush() called self.assertFalse(closed[0]) # flush() called before file closed self.assertFalse(closed[1]) + txt.flush = lambda: None # break reference loop def test_close_error_on_close(self): buffer = self.BytesIO(self.testdata) -- cgit v0.12 From 75790938a46784476389d3c7fbe14970f560d7bd Mon Sep 17 00:00:00 2001 From: Ned Deily Date: Sun, 22 Feb 2015 16:14:32 -0800 Subject: Issue #23499: Fix grammar error noticed by SilentGhost --- Doc/library/mimetypes.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/mimetypes.rst b/Doc/library/mimetypes.rst index f836243..8739ea3 100644 --- a/Doc/library/mimetypes.rst +++ b/Doc/library/mimetypes.rst @@ -106,8 +106,8 @@ behavior of the module. extension is already known, the new type will replace the old one. When the type is already known the extension will be added to the list of known extensions. - When *strict* is ``True`` (the default), the mapping will added to the official MIME - types, otherwise to the non-standard ones. + When *strict* is ``True`` (the default), the mapping will be added to the + official MIME types, otherwise to the non-standard ones. .. data:: inited -- cgit v0.12 From b8064a8a16b63a929f94af300624bb239671ad82 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Mon, 23 Feb 2015 11:41:56 +0100 Subject: asyncio doc: close explicitly event loops --- Doc/library/asyncio-dev.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index bf77a8f..d7f474e 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -212,6 +212,7 @@ Example of unhandled exception:: loop = asyncio.get_event_loop() asyncio.async(bug()) loop.run_forever() + loop.close() Output:: @@ -258,6 +259,7 @@ coroutine in another coroutine and use classic try/except:: loop = asyncio.get_event_loop() asyncio.async(handle_exception()) loop.run_forever() + loop.close() Another option is to use the :meth:`BaseEventLoop.run_until_complete` function:: -- cgit v0.12 From 7e91af3a91ad6fee49c2ed75309552a6b5afea52 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Wed, 25 Feb 2015 13:05:53 +0100 Subject: Remove unsupported (in latex) character from the source. --- Doc/library/codecs.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index dd44cb2..19d7192 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -393,7 +393,7 @@ functions: Implements the ``'replace'`` error handling (for :term:`text encodings ` only): substitutes ``'?'`` for encoding errors (to be encoded by the codec), and ``'\ufffd'`` (the Unicode replacement - character, ``'�'``) for decoding errors. + character) for decoding errors. .. function:: ignore_errors(exception) -- cgit v0.12 From 615a58eaabde8df2db51b435d712733bc9ce5a86 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 25 Feb 2015 13:55:43 +0100 Subject: asyncio doc: move queues to a new page --- Doc/library/asyncio-queue.rst | 167 ++++++++++++++++++++++++++++++++++++++++ Doc/library/asyncio-sync.rst | 172 ++---------------------------------------- Doc/library/asyncio.rst | 1 + 3 files changed, 174 insertions(+), 166 deletions(-) create mode 100644 Doc/library/asyncio-queue.rst diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst new file mode 100644 index 0000000..19957a0 --- /dev/null +++ b/Doc/library/asyncio-queue.rst @@ -0,0 +1,167 @@ +.. currentmodule:: asyncio + +Queues +====== + +Queues: + +* :class:`Queue` +* :class:`PriorityQueue` +* :class:`LifoQueue` +* :class:`JoinableQueue` + +asyncio queue API was designed to be close to classes of the :mod:`queue` +module (:class:`~queue.Queue`, :class:`~queue.PriorityQueue`, +:class:`~queue.LifoQueue`), but it has no *timeout* parameter. The +:func:`asyncio.wait_for` function can be used to cancel a task after a timeout. + +Queue +----- + +.. class:: Queue(maxsize=0, \*, loop=None) + + A queue, useful for coordinating producer and consumer coroutines. + + If *maxsize* is less than or equal to zero, the queue size is infinite. If + it is an integer greater than ``0``, then ``yield from put()`` will block + when the queue reaches *maxsize*, until an item is removed by :meth:`get`. + + Unlike the standard library :mod:`queue`, you can reliably know this Queue's + size with :meth:`qsize`, since your single-threaded asyncio application won't + be interrupted between calling :meth:`qsize` and doing an operation on the + Queue. + + .. versionchanged:: 3.4.3 + New :meth:`join` and :meth:`task_done` methods. + + .. method:: empty() + + Return ``True`` if the queue is empty, ``False`` otherwise. + + .. method:: full() + + Return ``True`` if there are :attr:`maxsize` items in the queue. + + .. note:: + + If the Queue was initialized with ``maxsize=0`` (the default), then + :meth:`full()` is never ``True``. + + .. coroutinemethod:: get() + + Remove and return an item from the queue. If queue is empty, wait until + an item is available. + + This method is a :ref:`coroutine `. + + .. seealso:: + + The :meth:`empty` method. + + .. method:: get_nowait() + + Remove and return an item from the queue. + + Return an item if one is immediately available, else raise + :exc:`QueueEmpty`. + + .. coroutinemethod:: join() + + Block until all items in the queue have been gotten and processed. + + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer thread calls + :meth:`task_done` to indicate that the item was retrieved and all work on + it is complete. When the count of unfinished tasks drops to zero, + :meth:`join` unblocks. + + This method is a :ref:`coroutine `. + + .. versionadded:: 3.4.3 + + .. coroutinemethod:: put(item) + + Put an item into the queue. If the queue is full, wait until a free slot + is available before adding item. + + This method is a :ref:`coroutine `. + + .. seealso:: + + The :meth:`full` method. + + .. method:: put_nowait(item) + + Put an item into the queue without blocking. + + If no free slot is immediately available, raise :exc:`QueueFull`. + + .. method:: qsize() + + Number of items in the queue. + + .. method:: task_done() + + Indicate that a formerly enqueued task is complete. + + Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a + subsequent call to :meth:`task_done` tells the queue that the processing + on the task is complete. + + If a :meth:`join` is currently blocking, it will resume when all items + have been processed (meaning that a :meth:`task_done` call was received + for every item that had been :meth:`~Queue.put` into the queue). + + Raises :exc:`ValueError` if called more times than there were items + placed in the queue. + + .. versionadded:: 3.4.3 + + .. attribute:: maxsize + + Number of items allowed in the queue. + + +PriorityQueue +------------- + +.. class:: PriorityQueue + + A subclass of :class:`Queue`; retrieves entries in priority order (lowest + first). + + Entries are typically tuples of the form: (priority number, data). + + +LifoQueue +--------- + +.. class:: LifoQueue + + A subclass of :class:`Queue` that retrieves most recently added entries + first. + + +JoinableQueue +^^^^^^^^^^^^^ + +.. class:: JoinableQueue + + Deprecated alias for :class:`Queue`. + + .. deprecated:: 3.4.3 + + +Exceptions +^^^^^^^^^^ + +.. exception:: QueueEmpty + + Exception raised when the :meth:`~Queue.get_nowait` method is called on a + :class:`Queue` object which is empty. + + +.. exception:: QueueFull + + Exception raised when the :meth:`~Queue.put_nowait` method is called on a + :class:`Queue` object which is full. diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index e3d82b0..f53c7d0 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -9,22 +9,16 @@ Locks: * :class:`Lock` * :class:`Event` * :class:`Condition` -* :class:`Semaphore` -* :class:`BoundedSemaphore` -Queues: +Semaphores: -* :class:`Queue` -* :class:`PriorityQueue` -* :class:`LifoQueue` -* :class:`JoinableQueue` +* :class:`Semaphore` +* :class:`BoundedSemaphore` -asyncio locks and queues API were designed to be close to classes of the -:mod:`threading` module (:class:`~threading.Lock`, :class:`~threading.Event`, +asyncio lock API was designed to be close to classes of the :mod:`threading` +module (:class:`~threading.Lock`, :class:`~threading.Event`, :class:`~threading.Condition`, :class:`~threading.Semaphore`, -:class:`~threading.BoundedSemaphore`) and the :mod:`queue` module -(:class:`~queue.Queue`, :class:`~queue.PriorityQueue`, -:class:`~queue.LifoQueue`), but they have no *timeout* parameter. The +:class:`~threading.BoundedSemaphore`), but it has no *timeout* parameter. The :func:`asyncio.wait_for` function can be used to cancel a task after a timeout. Locks @@ -290,157 +284,3 @@ BoundedSemaphore This raises :exc:`ValueError` in :meth:`~Semaphore.release` if it would increase the value above the initial value. - -Queues ------- - -Queue -^^^^^ - -.. class:: Queue(maxsize=0, \*, loop=None) - - A queue, useful for coordinating producer and consumer coroutines. - - If *maxsize* is less than or equal to zero, the queue size is infinite. If - it is an integer greater than ``0``, then ``yield from put()`` will block - when the queue reaches *maxsize*, until an item is removed by :meth:`get`. - - Unlike the standard library :mod:`queue`, you can reliably know this Queue's - size with :meth:`qsize`, since your single-threaded asyncio application won't - be interrupted between calling :meth:`qsize` and doing an operation on the - Queue. - - .. versionchanged:: 3.4.3 - New :meth:`join` and :meth:`task_done` methods. - - .. method:: empty() - - Return ``True`` if the queue is empty, ``False`` otherwise. - - .. method:: full() - - Return ``True`` if there are :attr:`maxsize` items in the queue. - - .. note:: - - If the Queue was initialized with ``maxsize=0`` (the default), then - :meth:`full()` is never ``True``. - - .. coroutinemethod:: get() - - Remove and return an item from the queue. If queue is empty, wait until - an item is available. - - This method is a :ref:`coroutine `. - - .. seealso:: - - The :meth:`empty` method. - - .. method:: get_nowait() - - Remove and return an item from the queue. - - Return an item if one is immediately available, else raise - :exc:`QueueEmpty`. - - .. coroutinemethod:: join() - - Block until all items in the queue have been gotten and processed. - - The count of unfinished tasks goes up whenever an item is added to the - queue. The count goes down whenever a consumer thread calls - :meth:`task_done` to indicate that the item was retrieved and all work on - it is complete. When the count of unfinished tasks drops to zero, - :meth:`join` unblocks. - - This method is a :ref:`coroutine `. - - .. versionadded:: 3.4.3 - - .. coroutinemethod:: put(item) - - Put an item into the queue. If the queue is full, wait until a free slot - is available before adding item. - - This method is a :ref:`coroutine `. - - .. seealso:: - - The :meth:`full` method. - - .. method:: put_nowait(item) - - Put an item into the queue without blocking. - - If no free slot is immediately available, raise :exc:`QueueFull`. - - .. method:: qsize() - - Number of items in the queue. - - .. method:: task_done() - - Indicate that a formerly enqueued task is complete. - - Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a - subsequent call to :meth:`task_done` tells the queue that the processing - on the task is complete. - - If a :meth:`join` is currently blocking, it will resume when all items - have been processed (meaning that a :meth:`task_done` call was received - for every item that had been :meth:`~Queue.put` into the queue). - - Raises :exc:`ValueError` if called more times than there were items - placed in the queue. - - .. versionadded:: 3.4.3 - - .. attribute:: maxsize - - Number of items allowed in the queue. - - -PriorityQueue -^^^^^^^^^^^^^ - -.. class:: PriorityQueue - - A subclass of :class:`Queue`; retrieves entries in priority order (lowest - first). - - Entries are typically tuples of the form: (priority number, data). - - -LifoQueue -^^^^^^^^^ - -.. class:: LifoQueue - - A subclass of :class:`Queue` that retrieves most recently added entries - first. - - -JoinableQueue -^^^^^^^^^^^^^ - -.. class:: JoinableQueue - - Deprecated alias for :class:`Queue`. - - .. deprecated:: 3.4.3 - - -Exceptions -^^^^^^^^^^ - -.. exception:: QueueEmpty - - Exception raised when the :meth:`~Queue.get_nowait` method is called on a - :class:`Queue` object which is empty. - - -.. exception:: QueueFull - - Exception raised when the :meth:`~Queue.put_nowait` method is called on a - :class:`Queue` object which is full. diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst index 6900198..117721c 100644 --- a/Doc/library/asyncio.rst +++ b/Doc/library/asyncio.rst @@ -58,6 +58,7 @@ Table of contents: asyncio-stream.rst asyncio-subprocess.rst asyncio-sync.rst + asyncio-queue.rst asyncio-dev.rst .. seealso:: -- cgit v0.12 From 532c69a4280185de3faf198776b6d5d75bdec095 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 25 Feb 2015 14:23:51 +0100 Subject: asyncio doc: begin with warnings on asyncio traps --- Doc/library/asyncio.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst index 117721c..9b4d65e 100644 --- a/Doc/library/asyncio.rst +++ b/Doc/library/asyncio.rst @@ -46,6 +46,11 @@ Here is a more detailed list of the package contents: you absolutely, positively have to use a library that makes blocking I/O calls. +Asynchronous programming is more complex than classical "sequential" +programming: see the :ref:`Develop with asyncio ` page which lists +common traps and explains how to avoid them. :ref:`Enable the debug mode +` during development to detect common issues. + Table of contents: .. toctree:: -- cgit v0.12 From 83704963c0d4e7b1474d6102ed6287a7ae4907a8 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 25 Feb 2015 14:24:15 +0100 Subject: asyncio: add a note about (non) thread safety in each class --- Doc/library/asyncio-eventloop.rst | 5 +++++ Doc/library/asyncio-protocol.rst | 2 ++ Doc/library/asyncio-queue.rst | 2 ++ Doc/library/asyncio-stream.rst | 4 ++++ Doc/library/asyncio-subprocess.rst | 5 +++++ Doc/library/asyncio-task.rst | 4 ++++ 6 files changed, 22 insertions(+) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index f2c8945..d27eb4b 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -22,6 +22,8 @@ It provides multiple facilities, amongst which: Base class of event loops. + This class is :ref:`not thread safe `. + Run an event loop ----------------- @@ -104,6 +106,9 @@ keywords to your callback, use :func:`functools.partial`. For example, Like :meth:`call_soon`, but thread safe. + See the :ref:`concurrency and multithreading ` + section of the documentation. + .. _asyncio-delayed-calls: diff --git a/Doc/library/asyncio-protocol.rst b/Doc/library/asyncio-protocol.rst index b6fcc48..2e671e8 100644 --- a/Doc/library/asyncio-protocol.rst +++ b/Doc/library/asyncio-protocol.rst @@ -23,6 +23,8 @@ then call the transport's methods for various purposes. subprocess pipes. The methods available on a transport depend on the transport's kind. +The transport classes are :ref:`not thread safe `. + BaseTransport ------------- diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst index 19957a0..c82e08b 100644 --- a/Doc/library/asyncio-queue.rst +++ b/Doc/library/asyncio-queue.rst @@ -31,6 +31,8 @@ Queue be interrupted between calling :meth:`qsize` and doing an operation on the Queue. + This class is :ref:`not thread safe `. + .. versionchanged:: 3.4.3 New :meth:`join` and :meth:`task_done` methods. diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index 22b7341..41b24ac 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -85,6 +85,8 @@ StreamReader .. class:: StreamReader(limit=None, loop=None) + This class is :ref:`not thread safe `. + .. method:: exception() Get the exception. @@ -155,6 +157,8 @@ StreamWriter wait for flow control. It also adds a transport attribute which references the :class:`Transport` directly. + This class is :ref:`not thread safe `. + .. attribute:: transport Transport. diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 1334f5b..1b82030 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -193,6 +193,9 @@ Process :meth:`~subprocess.Popen.wait` method of the :class:`~subprocess.Popen` class is implemented as a busy loop. + This class is :ref:`not thread safe `. See also the + :ref:`Subprocess and threads ` section. + .. coroutinemethod:: wait() Wait for child process to terminate. Set and return :attr:`returncode` @@ -310,6 +313,8 @@ are limits: subprocesses from other threads. Call the :func:`get_child_watcher` function in the main thread to instantiate the child watcher. +The :class:`asyncio.subprocess.Process` class is not thread safe. + .. seealso:: The :ref:`Concurrency and multithreading in asyncio diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index edc05c3..158a0d8 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -209,6 +209,8 @@ Future :func:`~concurrent.futures.as_completed` functions in the :mod:`concurrent.futures` package. + This class is :ref:`not thread safe `. + .. method:: cancel() Cancel the future and schedule callbacks. @@ -375,6 +377,8 @@ Task Don't directly create :class:`Task` instances: use the :func:`async` function or the :meth:`BaseEventLoop.create_task` method. + This class is :ref:`not thread safe `. + .. classmethod:: all_tasks(loop=None) Return a set of all tasks for an event loop. -- cgit v0.12 From b808d590a24066bc03d21b55ed5e890a012477a8 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Wed, 25 Feb 2015 10:12:26 -0500 Subject: fix merge_collapse to actually maintain the invariant it purports to (closes #23515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See de Gouw, Stijn and Rot, Jurriaan and de Boer, Frank S and Bubel, Richard and Hähnle, Reiner "OpenJDK’s java.utils.Collection.sort() is broken: The good, the bad and the worst case" --- Objects/listobject.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Objects/listobject.c b/Objects/listobject.c index fd5a72a..b6c1d78 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -1832,7 +1832,8 @@ merge_collapse(MergeState *ms) assert(ms); while (ms->n > 1) { Py_ssize_t n = ms->n - 2; - if (n > 0 && p[n-1].len <= p[n].len + p[n+1].len) { + if ((n > 0 && p[n-1].len <= p[n].len + p[n+1].len) || + (n > 1 && p[n-2].len <= p[n-1].len + p[n].len)) { if (p[n-1].len < p[n+1].len) --n; if (merge_at(ms, n) < 0) -- cgit v0.12 From f9e3cf1f9f6eaefba19593783e1cdf7681d1fa65 Mon Sep 17 00:00:00 2001 From: Berker Peksag Date: Wed, 25 Feb 2015 18:14:09 +0200 Subject: Issue #23511: Port email-simple.py to Python 3. Also, update email examples to use the context manager version of open(). Patch by Baptiste Mispelon. --- Doc/includes/email-headers.py | 5 +++-- Doc/includes/email-mime.py | 5 ++--- Doc/includes/email-read-alternative-new-api.py | 3 ++- Doc/includes/email-simple.py | 7 +++---- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Doc/includes/email-headers.py b/Doc/includes/email-headers.py index a53317d..89c8f3a 100644 --- a/Doc/includes/email-headers.py +++ b/Doc/includes/email-headers.py @@ -1,8 +1,9 @@ # Import the email modules we'll need from email.parser import Parser -# If the e-mail headers are in a file, uncomment this line: -#headers = Parser().parse(open(messagefile, 'r')) +# If the e-mail headers are in a file, uncomment these two lines: +# with open(messagefile) as fp: +# headers = Parser().parse(fp) # Or for parsing headers in a string, use: headers = Parser().parsestr('From: \n' diff --git a/Doc/includes/email-mime.py b/Doc/includes/email-mime.py index a90edc1..61d0830 100644 --- a/Doc/includes/email-mime.py +++ b/Doc/includes/email-mime.py @@ -20,9 +20,8 @@ msg.preamble = 'Our family reunion' for file in pngfiles: # Open the files in binary mode. Let the MIMEImage class automatically # guess the specific image type. - fp = open(file, 'rb') - img = MIMEImage(fp.read()) - fp.close() + with open(file, 'rb') as fp: + img = MIMEImage(fp.read()) msg.attach(img) # Send the email via our own SMTP server. diff --git a/Doc/includes/email-read-alternative-new-api.py b/Doc/includes/email-read-alternative-new-api.py index 8ab4e9f..3f5ab24 100644 --- a/Doc/includes/email-read-alternative-new-api.py +++ b/Doc/includes/email-read-alternative-new-api.py @@ -12,7 +12,8 @@ from email.parser import BytesParser from imaginary import magic_html_parser # In a real program you'd get the filename from the arguments. -msg = BytesParser(policy=policy.default).parse(open('outgoing.msg', 'rb')) +with open('outgoing.msg', 'rb') as fp: + msg = BytesParser(policy=policy.default).parse(fp) # Now the header items can be accessed as a dictionary, and any non-ASCII will # be converted to unicode: diff --git a/Doc/includes/email-simple.py b/Doc/includes/email-simple.py index 077568d..b9b8b41 100644 --- a/Doc/includes/email-simple.py +++ b/Doc/includes/email-simple.py @@ -6,10 +6,9 @@ from email.mime.text import MIMEText # Open a plain text file for reading. For this example, assume that # the text file contains only ASCII characters. -fp = open(textfile, 'rb') -# Create a text/plain message -msg = MIMEText(fp.read()) -fp.close() +with open(textfile) as fp: + # Create a text/plain message + msg = MIMEText(fp.read()) # me == the sender's email address # you == the recipient's email address -- cgit v0.12 From ed135f46ec3dc41413402b84cedd4fd39f24fe1a Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Wed, 25 Feb 2015 16:47:14 -0500 Subject: remove unused import --- Lib/test/test_statistics.py | 1 - 1 file changed, 1 deletion(-) diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index f1da21e..758a481 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -9,7 +9,6 @@ import doctest import math import random import sys -import types import unittest from decimal import Decimal -- cgit v0.12 From 7b2c3c6840052ea6f8b41253faf38b9e24f9a453 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 26 Feb 2015 10:39:16 +0100 Subject: asyncio doc: lock classes are not thread safe --- Doc/library/asyncio-sync.rst | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index f53c7d0..622ff5fd 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -54,6 +54,8 @@ Lock Locks also support the context management protocol. ``(yield from lock)`` should be used as context manager expression. + This class is :ref:`not thread safe `. + Usage:: lock = Lock() @@ -117,6 +119,8 @@ Event method. The :meth:`wait` method blocks until the flag is true. The flag is initially false. + This class is :ref:`not thread safe `. + .. method:: clear() Reset the internal flag to false. Subsequently, coroutines calling @@ -160,6 +164,8 @@ Condition object, and it is used as the underlying lock. Otherwise, a new :class:`Lock` object is created and used as the underlying lock. + This class is :ref:`not thread safe `. + .. coroutinemethod:: acquire() Acquire the underlying lock. @@ -252,6 +258,8 @@ Semaphore defaults to ``1``. If the value given is less than ``0``, :exc:`ValueError` is raised. + This class is :ref:`not thread safe `. + .. coroutinemethod:: acquire() Acquire a semaphore. @@ -279,8 +287,8 @@ BoundedSemaphore .. class:: BoundedSemaphore(value=1, \*, loop=None) - A bounded semaphore implementation. Inherit from :class:`Semaphore`. + A bounded semaphore implementation. Inherit from :class:`Semaphore`. - This raises :exc:`ValueError` in :meth:`~Semaphore.release` if it would - increase the value above the initial value. + This raises :exc:`ValueError` in :meth:`~Semaphore.release` if it would + increase the value above the initial value. -- cgit v0.12